1 //===-- BasicBlockSections.cpp ---=========--------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // BasicBlockSections implementation.
10 //
11 // The purpose of this pass is to assign sections to basic blocks when
12 // -fbasic-block-sections= option is used. Further, with profile information
13 // only the subset of basic blocks with profiles are placed in separate sections
14 // and the rest are grouped in a cold section. The exception handling blocks are
15 // treated specially to ensure they are all in one seciton.
16 //
17 // Basic Block Sections
18 // ====================
19 //
20 // With option, -fbasic-block-sections=list, every function may be split into
21 // clusters of basic blocks. Every cluster will be emitted into a separate
22 // section with its basic blocks sequenced in the given order. To get the
23 // optimized performance, the clusters must form an optimal BB layout for the
24 // function. We insert a symbol at the beginning of every cluster's section to
25 // allow the linker to reorder the sections in any arbitrary sequence. A global
26 // order of these sections would encapsulate the function layout.
27 // For example, consider the following clusters for a function foo (consisting
28 // of 6 basic blocks 0, 1, ..., 5).
29 //
30 // 0 2
31 // 1 3 5
32 //
33 // * Basic blocks 0 and 2 are placed in one section with symbol `foo`
34 // referencing the beginning of this section.
35 // * Basic blocks 1, 3, 5 are placed in a separate section. A new symbol
36 // `foo.__part.1` will reference the beginning of this section.
37 // * Basic block 4 (note that it is not referenced in the list) is placed in
38 // one section, and a new symbol `foo.cold` will point to it.
39 //
40 // There are a couple of challenges to be addressed:
41 //
42 // 1. The last basic block of every cluster should not have any implicit
43 // fallthrough to its next basic block, as it can be reordered by the linker.
44 // The compiler should make these fallthroughs explicit by adding
45 // unconditional jumps..
46 //
47 // 2. All inter-cluster branch targets would now need to be resolved by the
48 // linker as they cannot be calculated during compile time. This is done
49 // using static relocations. Further, the compiler tries to use short branch
50 // instructions on some ISAs for small branch offsets. This is not possible
51 // for inter-cluster branches as the offset is not determined at compile
52 // time, and therefore, long branch instructions have to be used for those.
53 //
54 // 3. Debug Information (DebugInfo) and Call Frame Information (CFI) emission
55 // needs special handling with basic block sections. DebugInfo needs to be
56 // emitted with more relocations as basic block sections can break a
57 // function into potentially several disjoint pieces, and CFI needs to be
58 // emitted per cluster. This also bloats the object file and binary sizes.
59 //
60 // Basic Block Address Map
61 // ==================
62 //
63 // With -fbasic-block-address-map, we emit the offsets of BB addresses of
64 // every function into the .llvm_bb_addr_map section. Along with the function
65 // symbols, this allows for mapping of virtual addresses in PMU profiles back to
66 // the corresponding basic blocks. This logic is implemented in AsmPrinter. This
67 // pass only assigns the BBSectionType of every function to ``labels``.
68 //
69 //===----------------------------------------------------------------------===//
70
71 #include "llvm/ADT/SmallVector.h"
72 #include "llvm/ADT/StringRef.h"
73 #include "llvm/CodeGen/BasicBlockSectionUtils.h"
74 #include "llvm/CodeGen/BasicBlockSectionsProfileReader.h"
75 #include "llvm/CodeGen/MachineFunction.h"
76 #include "llvm/CodeGen/MachineFunctionPass.h"
77 #include "llvm/CodeGen/Passes.h"
78 #include "llvm/CodeGen/TargetInstrInfo.h"
79 #include "llvm/InitializePasses.h"
80 #include "llvm/Target/TargetMachine.h"
81 #include <optional>
82
83 using namespace llvm;
84
85 // Placing the cold clusters in a separate section mitigates against poor
86 // profiles and allows optimizations such as hugepage mapping to be applied at a
87 // section granularity. Defaults to ".text.split." which is recognized by lld
88 // via the `-z keep-text-section-prefix` flag.
89 cl::opt<std::string> llvm::BBSectionsColdTextPrefix(
90 "bbsections-cold-text-prefix",
91 cl::desc("The text prefix to use for cold basic block clusters"),
92 cl::init(".text.split."), cl::Hidden);
93
94 static cl::opt<bool> BBSectionsDetectSourceDrift(
95 "bbsections-detect-source-drift",
96 cl::desc("This checks if there is a fdo instr. profile hash "
97 "mismatch for this function"),
98 cl::init(true), cl::Hidden);
99
100 namespace {
101
102 class BasicBlockSections : public MachineFunctionPass {
103 public:
104 static char ID;
105
106 BasicBlockSectionsProfileReaderWrapperPass *BBSectionsProfileReader = nullptr;
107
BasicBlockSections()108 BasicBlockSections() : MachineFunctionPass(ID) {
109 initializeBasicBlockSectionsPass(*PassRegistry::getPassRegistry());
110 }
111
getPassName() const112 StringRef getPassName() const override {
113 return "Basic Block Sections Analysis";
114 }
115
116 void getAnalysisUsage(AnalysisUsage &AU) const override;
117
118 /// Identify basic blocks that need separate sections and prepare to emit them
119 /// accordingly.
120 bool runOnMachineFunction(MachineFunction &MF) override;
121
122 private:
123 bool handleBBSections(MachineFunction &MF);
124 bool handleBBAddrMap(MachineFunction &MF);
125 };
126
127 } // end anonymous namespace
128
129 char BasicBlockSections::ID = 0;
130 INITIALIZE_PASS_BEGIN(
131 BasicBlockSections, "bbsections-prepare",
132 "Prepares for basic block sections, by splitting functions "
133 "into clusters of basic blocks.",
134 false, false)
INITIALIZE_PASS_DEPENDENCY(BasicBlockSectionsProfileReaderWrapperPass)135 INITIALIZE_PASS_DEPENDENCY(BasicBlockSectionsProfileReaderWrapperPass)
136 INITIALIZE_PASS_END(BasicBlockSections, "bbsections-prepare",
137 "Prepares for basic block sections, by splitting functions "
138 "into clusters of basic blocks.",
139 false, false)
140
141 // This function updates and optimizes the branching instructions of every basic
142 // block in a given function to account for changes in the layout.
143 static void
144 updateBranches(MachineFunction &MF,
145 const SmallVector<MachineBasicBlock *> &PreLayoutFallThroughs) {
146 const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo();
147 SmallVector<MachineOperand, 4> Cond;
148 for (auto &MBB : MF) {
149 auto NextMBBI = std::next(MBB.getIterator());
150 auto *FTMBB = PreLayoutFallThroughs[MBB.getNumber()];
151 // If this block had a fallthrough before we need an explicit unconditional
152 // branch to that block if either
153 // 1- the block ends a section, which means its next block may be
154 // reorderd by the linker, or
155 // 2- the fallthrough block is not adjacent to the block in the new
156 // order.
157 if (FTMBB && (MBB.isEndSection() || &*NextMBBI != FTMBB))
158 TII->insertUnconditionalBranch(MBB, FTMBB, MBB.findBranchDebugLoc());
159
160 // We do not optimize branches for machine basic blocks ending sections, as
161 // their adjacent block might be reordered by the linker.
162 if (MBB.isEndSection())
163 continue;
164
165 // It might be possible to optimize branches by flipping the branch
166 // condition.
167 Cond.clear();
168 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; // For analyzeBranch.
169 if (TII->analyzeBranch(MBB, TBB, FBB, Cond))
170 continue;
171 MBB.updateTerminator(FTMBB);
172 }
173 }
174
175 // This function sorts basic blocks according to the cluster's information.
176 // All explicitly specified clusters of basic blocks will be ordered
177 // accordingly. All non-specified BBs go into a separate "Cold" section.
178 // Additionally, if exception handling landing pads end up in more than one
179 // clusters, they are moved into a single "Exception" section. Eventually,
180 // clusters are ordered in increasing order of their IDs, with the "Exception"
181 // and "Cold" succeeding all other clusters.
182 // FuncClusterInfo represents the cluster information for basic blocks. It
183 // maps from BBID of basic blocks to their cluster information. If this is
184 // empty, it means unique sections for all basic blocks in the function.
185 static void
assignSections(MachineFunction & MF,const DenseMap<UniqueBBID,BBClusterInfo> & FuncClusterInfo)186 assignSections(MachineFunction &MF,
187 const DenseMap<UniqueBBID, BBClusterInfo> &FuncClusterInfo) {
188 assert(MF.hasBBSections() && "BB Sections is not set for function.");
189 // This variable stores the section ID of the cluster containing eh_pads (if
190 // all eh_pads are one cluster). If more than one cluster contain eh_pads, we
191 // set it equal to ExceptionSectionID.
192 std::optional<MBBSectionID> EHPadsSectionID;
193
194 for (auto &MBB : MF) {
195 // With the 'all' option, every basic block is placed in a unique section.
196 // With the 'list' option, every basic block is placed in a section
197 // associated with its cluster, unless we want individual unique sections
198 // for every basic block in this function (if FuncClusterInfo is empty).
199 if (MF.getTarget().getBBSectionsType() == llvm::BasicBlockSection::All ||
200 FuncClusterInfo.empty()) {
201 // If unique sections are desired for all basic blocks of the function, we
202 // set every basic block's section ID equal to its original position in
203 // the layout (which is equal to its number). This ensures that basic
204 // blocks are ordered canonically.
205 MBB.setSectionID(MBB.getNumber());
206 } else {
207 auto I = FuncClusterInfo.find(*MBB.getBBID());
208 if (I != FuncClusterInfo.end()) {
209 MBB.setSectionID(I->second.ClusterID);
210 } else {
211 const TargetInstrInfo &TII =
212 *MBB.getParent()->getSubtarget().getInstrInfo();
213
214 if (TII.isMBBSafeToSplitToCold(MBB)) {
215 // BB goes into the special cold section if it is not specified in the
216 // cluster info map.
217 MBB.setSectionID(MBBSectionID::ColdSectionID);
218 }
219 }
220 }
221
222 if (MBB.isEHPad() && EHPadsSectionID != MBB.getSectionID() &&
223 EHPadsSectionID != MBBSectionID::ExceptionSectionID) {
224 // If we already have one cluster containing eh_pads, this must be updated
225 // to ExceptionSectionID. Otherwise, we set it equal to the current
226 // section ID.
227 EHPadsSectionID = EHPadsSectionID ? MBBSectionID::ExceptionSectionID
228 : MBB.getSectionID();
229 }
230 }
231
232 // If EHPads are in more than one section, this places all of them in the
233 // special exception section.
234 if (EHPadsSectionID == MBBSectionID::ExceptionSectionID)
235 for (auto &MBB : MF)
236 if (MBB.isEHPad())
237 MBB.setSectionID(*EHPadsSectionID);
238 }
239
sortBasicBlocksAndUpdateBranches(MachineFunction & MF,MachineBasicBlockComparator MBBCmp)240 void llvm::sortBasicBlocksAndUpdateBranches(
241 MachineFunction &MF, MachineBasicBlockComparator MBBCmp) {
242 [[maybe_unused]] const MachineBasicBlock *EntryBlock = &MF.front();
243 SmallVector<MachineBasicBlock *> PreLayoutFallThroughs(MF.getNumBlockIDs());
244 for (auto &MBB : MF)
245 PreLayoutFallThroughs[MBB.getNumber()] =
246 MBB.getFallThrough(/*JumpToFallThrough=*/false);
247
248 MF.sort(MBBCmp);
249 assert(&MF.front() == EntryBlock &&
250 "Entry block should not be displaced by basic block sections");
251
252 // Set IsBeginSection and IsEndSection according to the assigned section IDs.
253 MF.assignBeginEndSections();
254
255 // After reordering basic blocks, we must update basic block branches to
256 // insert explicit fallthrough branches when required and optimize branches
257 // when possible.
258 updateBranches(MF, PreLayoutFallThroughs);
259 }
260
261 // If the exception section begins with a landing pad, that landing pad will
262 // assume a zero offset (relative to @LPStart) in the LSDA. However, a value of
263 // zero implies "no landing pad." This function inserts a NOP just before the EH
264 // pad label to ensure a nonzero offset.
avoidZeroOffsetLandingPad(MachineFunction & MF)265 void llvm::avoidZeroOffsetLandingPad(MachineFunction &MF) {
266 for (auto &MBB : MF) {
267 if (MBB.isBeginSection() && MBB.isEHPad()) {
268 MachineBasicBlock::iterator MI = MBB.begin();
269 while (!MI->isEHLabel())
270 ++MI;
271 MF.getSubtarget().getInstrInfo()->insertNoop(MBB, MI);
272 }
273 }
274 }
275
hasInstrProfHashMismatch(MachineFunction & MF)276 bool llvm::hasInstrProfHashMismatch(MachineFunction &MF) {
277 if (!BBSectionsDetectSourceDrift)
278 return false;
279
280 const char MetadataName[] = "instr_prof_hash_mismatch";
281 auto *Existing = MF.getFunction().getMetadata(LLVMContext::MD_annotation);
282 if (Existing) {
283 MDTuple *Tuple = cast<MDTuple>(Existing);
284 for (const auto &N : Tuple->operands())
285 if (N.equalsStr(MetadataName))
286 return true;
287 }
288
289 return false;
290 }
291
292 // Identify, arrange, and modify basic blocks which need separate sections
293 // according to the specification provided by the -fbasic-block-sections flag.
handleBBSections(MachineFunction & MF)294 bool BasicBlockSections::handleBBSections(MachineFunction &MF) {
295 auto BBSectionsType = MF.getTarget().getBBSectionsType();
296 if (BBSectionsType == BasicBlockSection::None)
297 return false;
298
299 // Check for source drift. If the source has changed since the profiles
300 // were obtained, optimizing basic blocks might be sub-optimal.
301 // This only applies to BasicBlockSection::List as it creates
302 // clusters of basic blocks using basic block ids. Source drift can
303 // invalidate these groupings leading to sub-optimal code generation with
304 // regards to performance.
305 if (BBSectionsType == BasicBlockSection::List &&
306 hasInstrProfHashMismatch(MF))
307 return false;
308 // Renumber blocks before sorting them. This is useful for accessing the
309 // original layout positions and finding the original fallthroughs.
310 MF.RenumberBlocks();
311
312 if (BBSectionsType == BasicBlockSection::Labels) {
313 MF.setBBSectionsType(BBSectionsType);
314 return true;
315 }
316
317 DenseMap<UniqueBBID, BBClusterInfo> FuncClusterInfo;
318 if (BBSectionsType == BasicBlockSection::List) {
319 auto [HasProfile, ClusterInfo] =
320 getAnalysis<BasicBlockSectionsProfileReaderWrapperPass>()
321 .getClusterInfoForFunction(MF.getName());
322 if (!HasProfile)
323 return false;
324 for (auto &BBClusterInfo : ClusterInfo) {
325 FuncClusterInfo.try_emplace(BBClusterInfo.BBID, BBClusterInfo);
326 }
327 }
328
329 MF.setBBSectionsType(BBSectionsType);
330 assignSections(MF, FuncClusterInfo);
331
332 const MachineBasicBlock &EntryBB = MF.front();
333 auto EntryBBSectionID = EntryBB.getSectionID();
334
335 // Helper function for ordering BB sections as follows:
336 // * Entry section (section including the entry block).
337 // * Regular sections (in increasing order of their Number).
338 // ...
339 // * Exception section
340 // * Cold section
341 auto MBBSectionOrder = [EntryBBSectionID](const MBBSectionID &LHS,
342 const MBBSectionID &RHS) {
343 // We make sure that the section containing the entry block precedes all the
344 // other sections.
345 if (LHS == EntryBBSectionID || RHS == EntryBBSectionID)
346 return LHS == EntryBBSectionID;
347 return LHS.Type == RHS.Type ? LHS.Number < RHS.Number : LHS.Type < RHS.Type;
348 };
349
350 // We sort all basic blocks to make sure the basic blocks of every cluster are
351 // contiguous and ordered accordingly. Furthermore, clusters are ordered in
352 // increasing order of their section IDs, with the exception and the
353 // cold section placed at the end of the function.
354 // Also, we force the entry block of the function to be placed at the
355 // beginning of the function, regardless of the requested order.
356 auto Comparator = [&](const MachineBasicBlock &X,
357 const MachineBasicBlock &Y) {
358 auto XSectionID = X.getSectionID();
359 auto YSectionID = Y.getSectionID();
360 if (XSectionID != YSectionID)
361 return MBBSectionOrder(XSectionID, YSectionID);
362 // Make sure that the entry block is placed at the beginning.
363 if (&X == &EntryBB || &Y == &EntryBB)
364 return &X == &EntryBB;
365 // If the two basic block are in the same section, the order is decided by
366 // their position within the section.
367 if (XSectionID.Type == MBBSectionID::SectionType::Default)
368 return FuncClusterInfo.lookup(*X.getBBID()).PositionInCluster <
369 FuncClusterInfo.lookup(*Y.getBBID()).PositionInCluster;
370 return X.getNumber() < Y.getNumber();
371 };
372
373 sortBasicBlocksAndUpdateBranches(MF, Comparator);
374 avoidZeroOffsetLandingPad(MF);
375 return true;
376 }
377
378 // When the BB address map needs to be generated, this renumbers basic blocks to
379 // make them appear in increasing order of their IDs in the function. This
380 // avoids the need to store basic block IDs in the BB address map section, since
381 // they can be determined implicitly.
handleBBAddrMap(MachineFunction & MF)382 bool BasicBlockSections::handleBBAddrMap(MachineFunction &MF) {
383 if (MF.getTarget().getBBSectionsType() == BasicBlockSection::Labels)
384 return false;
385 if (!MF.getTarget().Options.BBAddrMap)
386 return false;
387 MF.RenumberBlocks();
388 return true;
389 }
390
runOnMachineFunction(MachineFunction & MF)391 bool BasicBlockSections::runOnMachineFunction(MachineFunction &MF) {
392 // First handle the basic block sections.
393 auto R1 = handleBBSections(MF);
394 // Handle basic block address map after basic block sections are finalized.
395 auto R2 = handleBBAddrMap(MF);
396 return R1 || R2;
397 }
398
getAnalysisUsage(AnalysisUsage & AU) const399 void BasicBlockSections::getAnalysisUsage(AnalysisUsage &AU) const {
400 AU.setPreservesAll();
401 AU.addRequired<BasicBlockSectionsProfileReaderWrapperPass>();
402 MachineFunctionPass::getAnalysisUsage(AU);
403 }
404
createBasicBlockSectionsPass()405 MachineFunctionPass *llvm::createBasicBlockSectionsPass() {
406 return new BasicBlockSections();
407 }
408