xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/BasicBlockSections.cpp (revision 770cf0a5f02dc8983a89c6568d741fbc25baa999)
1 //===-- BasicBlockSections.cpp ---=========--------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // BasicBlockSections implementation.
10 //
11 // The purpose of this pass is to assign sections to basic blocks when
12 // -fbasic-block-sections= option is used. Further, with profile information
13 // only the subset of basic blocks with profiles are placed in separate sections
14 // and the rest are grouped in a cold section. The exception handling blocks are
15 // treated specially to ensure they are all in one seciton.
16 //
17 // Basic Block Sections
18 // ====================
19 //
20 // With option, -fbasic-block-sections=list, every function may be split into
21 // clusters of basic blocks. Every cluster will be emitted into a separate
22 // section with its basic blocks sequenced in the given order. To get the
23 // optimized performance, the clusters must form an optimal BB layout for the
24 // function. We insert a symbol at the beginning of every cluster's section to
25 // allow the linker to reorder the sections in any arbitrary sequence. A global
26 // order of these sections would encapsulate the function layout.
27 // For example, consider the following clusters for a function foo (consisting
28 // of 6 basic blocks 0, 1, ..., 5).
29 //
30 // 0 2
31 // 1 3 5
32 //
33 // * Basic blocks 0 and 2 are placed in one section with symbol `foo`
34 //   referencing the beginning of this section.
35 // * Basic blocks 1, 3, 5 are placed in a separate section. A new symbol
36 //   `foo.__part.1` will reference the beginning of this section.
37 // * Basic block 4 (note that it is not referenced in the list) is placed in
38 //   one section, and a new symbol `foo.cold` will point to it.
39 //
40 // There are a couple of challenges to be addressed:
41 //
42 // 1. The last basic block of every cluster should not have any implicit
43 //    fallthrough to its next basic block, as it can be reordered by the linker.
44 //    The compiler should make these fallthroughs explicit by adding
45 //    unconditional jumps..
46 //
47 // 2. All inter-cluster branch targets would now need to be resolved by the
48 //    linker as they cannot be calculated during compile time. This is done
49 //    using static relocations. Further, the compiler tries to use short branch
50 //    instructions on some ISAs for small branch offsets. This is not possible
51 //    for inter-cluster branches as the offset is not determined at compile
52 //    time, and therefore, long branch instructions have to be used for those.
53 //
54 // 3. Debug Information (DebugInfo) and Call Frame Information (CFI) emission
55 //    needs special handling with basic block sections. DebugInfo needs to be
56 //    emitted with more relocations as basic block sections can break a
57 //    function into potentially several disjoint pieces, and CFI needs to be
58 //    emitted per cluster. This also bloats the object file and binary sizes.
59 //
60 // Basic Block Address Map
61 // ==================
62 //
63 // With -fbasic-block-address-map, we emit the offsets of BB addresses of
64 // every function into the .llvm_bb_addr_map section. Along with the function
65 // symbols, this allows for mapping of virtual addresses in PMU profiles back to
66 // the corresponding basic blocks. This logic is implemented in AsmPrinter. This
67 // pass only assigns the BBSectionType of every function to ``labels``.
68 //
69 //===----------------------------------------------------------------------===//
70 
71 #include "llvm/ADT/SmallVector.h"
72 #include "llvm/ADT/StringRef.h"
73 #include "llvm/CodeGen/BasicBlockSectionUtils.h"
74 #include "llvm/CodeGen/BasicBlockSectionsProfileReader.h"
75 #include "llvm/CodeGen/MachineDominators.h"
76 #include "llvm/CodeGen/MachineFunction.h"
77 #include "llvm/CodeGen/MachineFunctionPass.h"
78 #include "llvm/CodeGen/MachinePostDominators.h"
79 #include "llvm/CodeGen/Passes.h"
80 #include "llvm/CodeGen/TargetInstrInfo.h"
81 #include "llvm/InitializePasses.h"
82 #include "llvm/Support/UniqueBBID.h"
83 #include "llvm/Target/TargetMachine.h"
84 #include <optional>
85 
86 using namespace llvm;
87 
88 // Placing the cold clusters in a separate section mitigates against poor
89 // profiles and allows optimizations such as hugepage mapping to be applied at a
90 // section granularity. Defaults to ".text.split." which is recognized by lld
91 // via the `-z keep-text-section-prefix` flag.
92 cl::opt<std::string> llvm::BBSectionsColdTextPrefix(
93     "bbsections-cold-text-prefix",
94     cl::desc("The text prefix to use for cold basic block clusters"),
95     cl::init(".text.split."), cl::Hidden);
96 
97 static cl::opt<bool> BBSectionsDetectSourceDrift(
98     "bbsections-detect-source-drift",
99     cl::desc("This checks if there is a fdo instr. profile hash "
100              "mismatch for this function"),
101     cl::init(true), cl::Hidden);
102 
103 namespace {
104 
105 class BasicBlockSections : public MachineFunctionPass {
106 public:
107   static char ID;
108 
109   BasicBlockSectionsProfileReaderWrapperPass *BBSectionsProfileReader = nullptr;
110 
111   BasicBlockSections() : MachineFunctionPass(ID) {
112     initializeBasicBlockSectionsPass(*PassRegistry::getPassRegistry());
113   }
114 
115   StringRef getPassName() const override {
116     return "Basic Block Sections Analysis";
117   }
118 
119   void getAnalysisUsage(AnalysisUsage &AU) const override;
120 
121   /// Identify basic blocks that need separate sections and prepare to emit them
122   /// accordingly.
123   bool runOnMachineFunction(MachineFunction &MF) override;
124 
125 private:
126   bool handleBBSections(MachineFunction &MF);
127   bool handleBBAddrMap(MachineFunction &MF);
128 };
129 
130 } // end anonymous namespace
131 
132 char BasicBlockSections::ID = 0;
133 INITIALIZE_PASS_BEGIN(
134     BasicBlockSections, "bbsections-prepare",
135     "Prepares for basic block sections, by splitting functions "
136     "into clusters of basic blocks.",
137     false, false)
138 INITIALIZE_PASS_DEPENDENCY(BasicBlockSectionsProfileReaderWrapperPass)
139 INITIALIZE_PASS_END(BasicBlockSections, "bbsections-prepare",
140                     "Prepares for basic block sections, by splitting functions "
141                     "into clusters of basic blocks.",
142                     false, false)
143 
144 // This function updates and optimizes the branching instructions of every basic
145 // block in a given function to account for changes in the layout.
146 static void
147 updateBranches(MachineFunction &MF,
148                const SmallVector<MachineBasicBlock *> &PreLayoutFallThroughs) {
149   const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo();
150   SmallVector<MachineOperand, 4> Cond;
151   for (auto &MBB : MF) {
152     auto NextMBBI = std::next(MBB.getIterator());
153     auto *FTMBB = PreLayoutFallThroughs[MBB.getNumber()];
154     // If this block had a fallthrough before we need an explicit unconditional
155     // branch to that block if either
156     //     1- the block ends a section, which means its next block may be
157     //        reorderd by the linker, or
158     //     2- the fallthrough block is not adjacent to the block in the new
159     //        order.
160     if (FTMBB && (MBB.isEndSection() || &*NextMBBI != FTMBB))
161       TII->insertUnconditionalBranch(MBB, FTMBB, MBB.findBranchDebugLoc());
162 
163     // We do not optimize branches for machine basic blocks ending sections, as
164     // their adjacent block might be reordered by the linker.
165     if (MBB.isEndSection())
166       continue;
167 
168     // It might be possible to optimize branches by flipping the branch
169     // condition.
170     Cond.clear();
171     MachineBasicBlock *TBB = nullptr, *FBB = nullptr; // For analyzeBranch.
172     if (TII->analyzeBranch(MBB, TBB, FBB, Cond))
173       continue;
174     MBB.updateTerminator(FTMBB);
175   }
176 }
177 
178 // This function sorts basic blocks according to the cluster's information.
179 // All explicitly specified clusters of basic blocks will be ordered
180 // accordingly. All non-specified BBs go into a separate "Cold" section.
181 // Additionally, if exception handling landing pads end up in more than one
182 // clusters, they are moved into a single "Exception" section. Eventually,
183 // clusters are ordered in increasing order of their IDs, with the "Exception"
184 // and "Cold" succeeding all other clusters.
185 // FuncClusterInfo represents the cluster information for basic blocks. It
186 // maps from BBID of basic blocks to their cluster information. If this is
187 // empty, it means unique sections for all basic blocks in the function.
188 static void
189 assignSections(MachineFunction &MF,
190                const DenseMap<UniqueBBID, BBClusterInfo> &FuncClusterInfo) {
191   assert(MF.hasBBSections() && "BB Sections is not set for function.");
192   // This variable stores the section ID of the cluster containing eh_pads (if
193   // all eh_pads are one cluster). If more than one cluster contain eh_pads, we
194   // set it equal to ExceptionSectionID.
195   std::optional<MBBSectionID> EHPadsSectionID;
196 
197   for (auto &MBB : MF) {
198     // With the 'all' option, every basic block is placed in a unique section.
199     // With the 'list' option, every basic block is placed in a section
200     // associated with its cluster, unless we want individual unique sections
201     // for every basic block in this function (if FuncClusterInfo is empty).
202     if (MF.getTarget().getBBSectionsType() == llvm::BasicBlockSection::All ||
203         FuncClusterInfo.empty()) {
204       // If unique sections are desired for all basic blocks of the function, we
205       // set every basic block's section ID equal to its original position in
206       // the layout (which is equal to its number). This ensures that basic
207       // blocks are ordered canonically.
208       MBB.setSectionID(MBB.getNumber());
209     } else {
210       auto I = FuncClusterInfo.find(*MBB.getBBID());
211       if (I != FuncClusterInfo.end()) {
212         MBB.setSectionID(I->second.ClusterID);
213       } else {
214         const TargetInstrInfo &TII =
215             *MBB.getParent()->getSubtarget().getInstrInfo();
216 
217         if (TII.isMBBSafeToSplitToCold(MBB)) {
218           // BB goes into the special cold section if it is not specified in the
219           // cluster info map.
220           MBB.setSectionID(MBBSectionID::ColdSectionID);
221         }
222       }
223     }
224 
225     if (MBB.isEHPad() && EHPadsSectionID != MBB.getSectionID() &&
226         EHPadsSectionID != MBBSectionID::ExceptionSectionID) {
227       // If we already have one cluster containing eh_pads, this must be updated
228       // to ExceptionSectionID. Otherwise, we set it equal to the current
229       // section ID.
230       EHPadsSectionID = EHPadsSectionID ? MBBSectionID::ExceptionSectionID
231                                         : MBB.getSectionID();
232     }
233   }
234 
235   // If EHPads are in more than one section, this places all of them in the
236   // special exception section.
237   if (EHPadsSectionID == MBBSectionID::ExceptionSectionID)
238     for (auto &MBB : MF)
239       if (MBB.isEHPad())
240         MBB.setSectionID(*EHPadsSectionID);
241 }
242 
243 void llvm::sortBasicBlocksAndUpdateBranches(
244     MachineFunction &MF, MachineBasicBlockComparator MBBCmp) {
245   [[maybe_unused]] const MachineBasicBlock *EntryBlock = &MF.front();
246   SmallVector<MachineBasicBlock *> PreLayoutFallThroughs(MF.getNumBlockIDs());
247   for (auto &MBB : MF)
248     PreLayoutFallThroughs[MBB.getNumber()] =
249         MBB.getFallThrough(/*JumpToFallThrough=*/false);
250 
251   MF.sort(MBBCmp);
252   assert(&MF.front() == EntryBlock &&
253          "Entry block should not be displaced by basic block sections");
254 
255   // Set IsBeginSection and IsEndSection according to the assigned section IDs.
256   MF.assignBeginEndSections();
257 
258   // After reordering basic blocks, we must update basic block branches to
259   // insert explicit fallthrough branches when required and optimize branches
260   // when possible.
261   updateBranches(MF, PreLayoutFallThroughs);
262 }
263 
264 // If the exception section begins with a landing pad, that landing pad will
265 // assume a zero offset (relative to @LPStart) in the LSDA. However, a value of
266 // zero implies "no landing pad." This function inserts a NOP just before the EH
267 // pad label to ensure a nonzero offset.
268 void llvm::avoidZeroOffsetLandingPad(MachineFunction &MF) {
269   for (auto &MBB : MF) {
270     if (MBB.isBeginSection() && MBB.isEHPad()) {
271       MachineBasicBlock::iterator MI = MBB.begin();
272       while (!MI->isEHLabel())
273         ++MI;
274       MF.getSubtarget().getInstrInfo()->insertNoop(MBB, MI);
275     }
276   }
277 }
278 
279 bool llvm::hasInstrProfHashMismatch(MachineFunction &MF) {
280   if (!BBSectionsDetectSourceDrift)
281     return false;
282 
283   const char MetadataName[] = "instr_prof_hash_mismatch";
284   auto *Existing = MF.getFunction().getMetadata(LLVMContext::MD_annotation);
285   if (Existing) {
286     MDTuple *Tuple = cast<MDTuple>(Existing);
287     for (const auto &N : Tuple->operands())
288       if (N.equalsStr(MetadataName))
289         return true;
290   }
291 
292   return false;
293 }
294 
295 // Identify, arrange, and modify basic blocks which need separate sections
296 // according to the specification provided by the -fbasic-block-sections flag.
297 bool BasicBlockSections::handleBBSections(MachineFunction &MF) {
298   auto BBSectionsType = MF.getTarget().getBBSectionsType();
299   if (BBSectionsType == BasicBlockSection::None)
300     return false;
301 
302   // Check for source drift. If the source has changed since the profiles
303   // were obtained, optimizing basic blocks might be sub-optimal.
304   // This only applies to BasicBlockSection::List as it creates
305   // clusters of basic blocks using basic block ids. Source drift can
306   // invalidate these groupings leading to sub-optimal code generation with
307   // regards to performance.
308   if (BBSectionsType == BasicBlockSection::List &&
309       hasInstrProfHashMismatch(MF))
310     return false;
311   // Renumber blocks before sorting them. This is useful for accessing the
312   // original layout positions and finding the original fallthroughs.
313   MF.RenumberBlocks();
314 
315   DenseMap<UniqueBBID, BBClusterInfo> FuncClusterInfo;
316   if (BBSectionsType == BasicBlockSection::List) {
317     auto [HasProfile, ClusterInfo] =
318         getAnalysis<BasicBlockSectionsProfileReaderWrapperPass>()
319             .getClusterInfoForFunction(MF.getName());
320     if (!HasProfile)
321       return false;
322     for (auto &BBClusterInfo : ClusterInfo) {
323       FuncClusterInfo.try_emplace(BBClusterInfo.BBID, BBClusterInfo);
324     }
325   }
326 
327   MF.setBBSectionsType(BBSectionsType);
328   assignSections(MF, FuncClusterInfo);
329 
330   const MachineBasicBlock &EntryBB = MF.front();
331   auto EntryBBSectionID = EntryBB.getSectionID();
332 
333   // Helper function for ordering BB sections as follows:
334   //   * Entry section (section including the entry block).
335   //   * Regular sections (in increasing order of their Number).
336   //     ...
337   //   * Exception section
338   //   * Cold section
339   auto MBBSectionOrder = [EntryBBSectionID](const MBBSectionID &LHS,
340                                             const MBBSectionID &RHS) {
341     // We make sure that the section containing the entry block precedes all the
342     // other sections.
343     if (LHS == EntryBBSectionID || RHS == EntryBBSectionID)
344       return LHS == EntryBBSectionID;
345     return LHS.Type == RHS.Type ? LHS.Number < RHS.Number : LHS.Type < RHS.Type;
346   };
347 
348   // We sort all basic blocks to make sure the basic blocks of every cluster are
349   // contiguous and ordered accordingly. Furthermore, clusters are ordered in
350   // increasing order of their section IDs, with the exception and the
351   // cold section placed at the end of the function.
352   // Also, we force the entry block of the function to be placed at the
353   // beginning of the function, regardless of the requested order.
354   auto Comparator = [&](const MachineBasicBlock &X,
355                         const MachineBasicBlock &Y) {
356     auto XSectionID = X.getSectionID();
357     auto YSectionID = Y.getSectionID();
358     if (XSectionID != YSectionID)
359       return MBBSectionOrder(XSectionID, YSectionID);
360     // Make sure that the entry block is placed at the beginning.
361     if (&X == &EntryBB || &Y == &EntryBB)
362       return &X == &EntryBB;
363     // If the two basic block are in the same section, the order is decided by
364     // their position within the section.
365     if (XSectionID.Type == MBBSectionID::SectionType::Default)
366       return FuncClusterInfo.lookup(*X.getBBID()).PositionInCluster <
367              FuncClusterInfo.lookup(*Y.getBBID()).PositionInCluster;
368     return X.getNumber() < Y.getNumber();
369   };
370 
371   sortBasicBlocksAndUpdateBranches(MF, Comparator);
372   avoidZeroOffsetLandingPad(MF);
373   return true;
374 }
375 
376 // When the BB address map needs to be generated, this renumbers basic blocks to
377 // make them appear in increasing order of their IDs in the function. This
378 // avoids the need to store basic block IDs in the BB address map section, since
379 // they can be determined implicitly.
380 bool BasicBlockSections::handleBBAddrMap(MachineFunction &MF) {
381   if (!MF.getTarget().Options.BBAddrMap)
382     return false;
383   MF.RenumberBlocks();
384   return true;
385 }
386 
387 bool BasicBlockSections::runOnMachineFunction(MachineFunction &MF) {
388   // First handle the basic block sections.
389   auto R1 = handleBBSections(MF);
390   // Handle basic block address map after basic block sections are finalized.
391   auto R2 = handleBBAddrMap(MF);
392 
393   // We renumber blocks, so update the dominator tree we want to preserve.
394   if (auto *WP = getAnalysisIfAvailable<MachineDominatorTreeWrapperPass>())
395     WP->getDomTree().updateBlockNumbers();
396   if (auto *WP = getAnalysisIfAvailable<MachinePostDominatorTreeWrapperPass>())
397     WP->getPostDomTree().updateBlockNumbers();
398 
399   return R1 || R2;
400 }
401 
402 void BasicBlockSections::getAnalysisUsage(AnalysisUsage &AU) const {
403   AU.setPreservesAll();
404   AU.addRequired<BasicBlockSectionsProfileReaderWrapperPass>();
405   AU.addUsedIfAvailable<MachineDominatorTreeWrapperPass>();
406   AU.addUsedIfAvailable<MachinePostDominatorTreeWrapperPass>();
407   MachineFunctionPass::getAnalysisUsage(AU);
408 }
409 
410 MachineFunctionPass *llvm::createBasicBlockSectionsPass() {
411   return new BasicBlockSections();
412 }
413