1 //===-- BasicBlockSections.cpp ---=========--------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // BasicBlockSections implementation. 10 // 11 // The purpose of this pass is to assign sections to basic blocks when 12 // -fbasic-block-sections= option is used. Further, with profile information 13 // only the subset of basic blocks with profiles are placed in separate sections 14 // and the rest are grouped in a cold section. The exception handling blocks are 15 // treated specially to ensure they are all in one seciton. 16 // 17 // Basic Block Sections 18 // ==================== 19 // 20 // With option, -fbasic-block-sections=list, every function may be split into 21 // clusters of basic blocks. Every cluster will be emitted into a separate 22 // section with its basic blocks sequenced in the given order. To get the 23 // optimized performance, the clusters must form an optimal BB layout for the 24 // function. We insert a symbol at the beginning of every cluster's section to 25 // allow the linker to reorder the sections in any arbitrary sequence. A global 26 // order of these sections would encapsulate the function layout. 27 // For example, consider the following clusters for a function foo (consisting 28 // of 6 basic blocks 0, 1, ..., 5). 29 // 30 // 0 2 31 // 1 3 5 32 // 33 // * Basic blocks 0 and 2 are placed in one section with symbol `foo` 34 // referencing the beginning of this section. 35 // * Basic blocks 1, 3, 5 are placed in a separate section. A new symbol 36 // `foo.__part.1` will reference the beginning of this section. 37 // * Basic block 4 (note that it is not referenced in the list) is placed in 38 // one section, and a new symbol `foo.cold` will point to it. 39 // 40 // There are a couple of challenges to be addressed: 41 // 42 // 1. The last basic block of every cluster should not have any implicit 43 // fallthrough to its next basic block, as it can be reordered by the linker. 44 // The compiler should make these fallthroughs explicit by adding 45 // unconditional jumps.. 46 // 47 // 2. All inter-cluster branch targets would now need to be resolved by the 48 // linker as they cannot be calculated during compile time. This is done 49 // using static relocations. Further, the compiler tries to use short branch 50 // instructions on some ISAs for small branch offsets. This is not possible 51 // for inter-cluster branches as the offset is not determined at compile 52 // time, and therefore, long branch instructions have to be used for those. 53 // 54 // 3. Debug Information (DebugInfo) and Call Frame Information (CFI) emission 55 // needs special handling with basic block sections. DebugInfo needs to be 56 // emitted with more relocations as basic block sections can break a 57 // function into potentially several disjoint pieces, and CFI needs to be 58 // emitted per cluster. This also bloats the object file and binary sizes. 59 // 60 // Basic Block Labels 61 // ================== 62 // 63 // With -fbasic-block-sections=labels, we encode the offsets of BB addresses of 64 // every function into the .llvm_bb_addr_map section. Along with the function 65 // symbols, this allows for mapping of virtual addresses in PMU profiles back to 66 // the corresponding basic blocks. This logic is implemented in AsmPrinter. This 67 // pass only assigns the BBSectionType of every function to ``labels``. 68 // 69 //===----------------------------------------------------------------------===// 70 71 #include "llvm/ADT/SmallVector.h" 72 #include "llvm/ADT/StringRef.h" 73 #include "llvm/CodeGen/BasicBlockSectionUtils.h" 74 #include "llvm/CodeGen/BasicBlockSectionsProfileReader.h" 75 #include "llvm/CodeGen/MachineFunction.h" 76 #include "llvm/CodeGen/MachineFunctionPass.h" 77 #include "llvm/CodeGen/Passes.h" 78 #include "llvm/CodeGen/TargetInstrInfo.h" 79 #include "llvm/InitializePasses.h" 80 #include "llvm/Target/TargetMachine.h" 81 #include <optional> 82 83 using namespace llvm; 84 85 // Placing the cold clusters in a separate section mitigates against poor 86 // profiles and allows optimizations such as hugepage mapping to be applied at a 87 // section granularity. Defaults to ".text.split." which is recognized by lld 88 // via the `-z keep-text-section-prefix` flag. 89 cl::opt<std::string> llvm::BBSectionsColdTextPrefix( 90 "bbsections-cold-text-prefix", 91 cl::desc("The text prefix to use for cold basic block clusters"), 92 cl::init(".text.split."), cl::Hidden); 93 94 static cl::opt<bool> BBSectionsDetectSourceDrift( 95 "bbsections-detect-source-drift", 96 cl::desc("This checks if there is a fdo instr. profile hash " 97 "mismatch for this function"), 98 cl::init(true), cl::Hidden); 99 100 namespace { 101 102 class BasicBlockSections : public MachineFunctionPass { 103 public: 104 static char ID; 105 106 BasicBlockSectionsProfileReader *BBSectionsProfileReader = nullptr; 107 108 BasicBlockSections() : MachineFunctionPass(ID) { 109 initializeBasicBlockSectionsPass(*PassRegistry::getPassRegistry()); 110 } 111 112 StringRef getPassName() const override { 113 return "Basic Block Sections Analysis"; 114 } 115 116 void getAnalysisUsage(AnalysisUsage &AU) const override; 117 118 /// Identify basic blocks that need separate sections and prepare to emit them 119 /// accordingly. 120 bool runOnMachineFunction(MachineFunction &MF) override; 121 }; 122 123 } // end anonymous namespace 124 125 char BasicBlockSections::ID = 0; 126 INITIALIZE_PASS_BEGIN( 127 BasicBlockSections, "bbsections-prepare", 128 "Prepares for basic block sections, by splitting functions " 129 "into clusters of basic blocks.", 130 false, false) 131 INITIALIZE_PASS_DEPENDENCY(BasicBlockSectionsProfileReader) 132 INITIALIZE_PASS_END(BasicBlockSections, "bbsections-prepare", 133 "Prepares for basic block sections, by splitting functions " 134 "into clusters of basic blocks.", 135 false, false) 136 137 // This function updates and optimizes the branching instructions of every basic 138 // block in a given function to account for changes in the layout. 139 static void 140 updateBranches(MachineFunction &MF, 141 const SmallVector<MachineBasicBlock *> &PreLayoutFallThroughs) { 142 const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo(); 143 SmallVector<MachineOperand, 4> Cond; 144 for (auto &MBB : MF) { 145 auto NextMBBI = std::next(MBB.getIterator()); 146 auto *FTMBB = PreLayoutFallThroughs[MBB.getNumber()]; 147 // If this block had a fallthrough before we need an explicit unconditional 148 // branch to that block if either 149 // 1- the block ends a section, which means its next block may be 150 // reorderd by the linker, or 151 // 2- the fallthrough block is not adjacent to the block in the new 152 // order. 153 if (FTMBB && (MBB.isEndSection() || &*NextMBBI != FTMBB)) 154 TII->insertUnconditionalBranch(MBB, FTMBB, MBB.findBranchDebugLoc()); 155 156 // We do not optimize branches for machine basic blocks ending sections, as 157 // their adjacent block might be reordered by the linker. 158 if (MBB.isEndSection()) 159 continue; 160 161 // It might be possible to optimize branches by flipping the branch 162 // condition. 163 Cond.clear(); 164 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; // For analyzeBranch. 165 if (TII->analyzeBranch(MBB, TBB, FBB, Cond)) 166 continue; 167 MBB.updateTerminator(FTMBB); 168 } 169 } 170 171 // This function provides the BBCluster information associated with a function. 172 // Returns true if a valid association exists and false otherwise. 173 bool getBBClusterInfoForFunction( 174 const MachineFunction &MF, 175 BasicBlockSectionsProfileReader *BBSectionsProfileReader, 176 DenseMap<unsigned, BBClusterInfo> &V) { 177 178 // Find the assoicated cluster information. 179 std::pair<bool, SmallVector<BBClusterInfo, 4>> P = 180 BBSectionsProfileReader->getBBClusterInfoForFunction(MF.getName()); 181 if (!P.first) 182 return false; 183 184 if (P.second.empty()) { 185 // This indicates that sections are desired for all basic blocks of this 186 // function. We clear the BBClusterInfo vector to denote this. 187 V.clear(); 188 return true; 189 } 190 191 for (const BBClusterInfo &BBCI : P.second) 192 V[BBCI.BBID] = BBCI; 193 return true; 194 } 195 196 // This function sorts basic blocks according to the cluster's information. 197 // All explicitly specified clusters of basic blocks will be ordered 198 // accordingly. All non-specified BBs go into a separate "Cold" section. 199 // Additionally, if exception handling landing pads end up in more than one 200 // clusters, they are moved into a single "Exception" section. Eventually, 201 // clusters are ordered in increasing order of their IDs, with the "Exception" 202 // and "Cold" succeeding all other clusters. 203 // FuncBBClusterInfo represent the cluster information for basic blocks. It 204 // maps from BBID of basic blocks to their cluster information. If this is 205 // empty, it means unique sections for all basic blocks in the function. 206 static void 207 assignSections(MachineFunction &MF, 208 const DenseMap<unsigned, BBClusterInfo> &FuncBBClusterInfo) { 209 assert(MF.hasBBSections() && "BB Sections is not set for function."); 210 // This variable stores the section ID of the cluster containing eh_pads (if 211 // all eh_pads are one cluster). If more than one cluster contain eh_pads, we 212 // set it equal to ExceptionSectionID. 213 std::optional<MBBSectionID> EHPadsSectionID; 214 215 for (auto &MBB : MF) { 216 // With the 'all' option, every basic block is placed in a unique section. 217 // With the 'list' option, every basic block is placed in a section 218 // associated with its cluster, unless we want individual unique sections 219 // for every basic block in this function (if FuncBBClusterInfo is empty). 220 if (MF.getTarget().getBBSectionsType() == llvm::BasicBlockSection::All || 221 FuncBBClusterInfo.empty()) { 222 // If unique sections are desired for all basic blocks of the function, we 223 // set every basic block's section ID equal to its original position in 224 // the layout (which is equal to its number). This ensures that basic 225 // blocks are ordered canonically. 226 MBB.setSectionID(MBB.getNumber()); 227 } else { 228 // TODO: Replace `getBBIDOrNumber` with `getBBID` once version 1 is 229 // deprecated. 230 auto I = FuncBBClusterInfo.find(MBB.getBBIDOrNumber()); 231 if (I != FuncBBClusterInfo.end()) { 232 MBB.setSectionID(I->second.ClusterID); 233 } else { 234 // BB goes into the special cold section if it is not specified in the 235 // cluster info map. 236 MBB.setSectionID(MBBSectionID::ColdSectionID); 237 } 238 } 239 240 if (MBB.isEHPad() && EHPadsSectionID != MBB.getSectionID() && 241 EHPadsSectionID != MBBSectionID::ExceptionSectionID) { 242 // If we already have one cluster containing eh_pads, this must be updated 243 // to ExceptionSectionID. Otherwise, we set it equal to the current 244 // section ID. 245 EHPadsSectionID = EHPadsSectionID ? MBBSectionID::ExceptionSectionID 246 : MBB.getSectionID(); 247 } 248 } 249 250 // If EHPads are in more than one section, this places all of them in the 251 // special exception section. 252 if (EHPadsSectionID == MBBSectionID::ExceptionSectionID) 253 for (auto &MBB : MF) 254 if (MBB.isEHPad()) 255 MBB.setSectionID(*EHPadsSectionID); 256 } 257 258 void llvm::sortBasicBlocksAndUpdateBranches( 259 MachineFunction &MF, MachineBasicBlockComparator MBBCmp) { 260 [[maybe_unused]] const MachineBasicBlock *EntryBlock = &MF.front(); 261 SmallVector<MachineBasicBlock *> PreLayoutFallThroughs(MF.getNumBlockIDs()); 262 for (auto &MBB : MF) 263 PreLayoutFallThroughs[MBB.getNumber()] = MBB.getFallThrough(); 264 265 MF.sort(MBBCmp); 266 assert(&MF.front() == EntryBlock && 267 "Entry block should not be displaced by basic block sections"); 268 269 // Set IsBeginSection and IsEndSection according to the assigned section IDs. 270 MF.assignBeginEndSections(); 271 272 // After reordering basic blocks, we must update basic block branches to 273 // insert explicit fallthrough branches when required and optimize branches 274 // when possible. 275 updateBranches(MF, PreLayoutFallThroughs); 276 } 277 278 // If the exception section begins with a landing pad, that landing pad will 279 // assume a zero offset (relative to @LPStart) in the LSDA. However, a value of 280 // zero implies "no landing pad." This function inserts a NOP just before the EH 281 // pad label to ensure a nonzero offset. 282 void llvm::avoidZeroOffsetLandingPad(MachineFunction &MF) { 283 for (auto &MBB : MF) { 284 if (MBB.isBeginSection() && MBB.isEHPad()) { 285 MachineBasicBlock::iterator MI = MBB.begin(); 286 while (!MI->isEHLabel()) 287 ++MI; 288 MCInst Nop = MF.getSubtarget().getInstrInfo()->getNop(); 289 BuildMI(MBB, MI, DebugLoc(), 290 MF.getSubtarget().getInstrInfo()->get(Nop.getOpcode())); 291 } 292 } 293 } 294 295 // This checks if the source of this function has drifted since this binary was 296 // profiled previously. For now, we are piggy backing on what PGO does to 297 // detect this with instrumented profiles. PGO emits an hash of the IR and 298 // checks if the hash has changed. Advanced basic block layout is usually done 299 // on top of PGO optimized binaries and hence this check works well in practice. 300 static bool hasInstrProfHashMismatch(MachineFunction &MF) { 301 if (!BBSectionsDetectSourceDrift) 302 return false; 303 304 const char MetadataName[] = "instr_prof_hash_mismatch"; 305 auto *Existing = MF.getFunction().getMetadata(LLVMContext::MD_annotation); 306 if (Existing) { 307 MDTuple *Tuple = cast<MDTuple>(Existing); 308 for (const auto &N : Tuple->operands()) 309 if (N.equalsStr(MetadataName)) 310 return true; 311 } 312 313 return false; 314 } 315 316 bool BasicBlockSections::runOnMachineFunction(MachineFunction &MF) { 317 auto BBSectionsType = MF.getTarget().getBBSectionsType(); 318 assert(BBSectionsType != BasicBlockSection::None && 319 "BB Sections not enabled!"); 320 321 // Check for source drift. If the source has changed since the profiles 322 // were obtained, optimizing basic blocks might be sub-optimal. 323 // This only applies to BasicBlockSection::List as it creates 324 // clusters of basic blocks using basic block ids. Source drift can 325 // invalidate these groupings leading to sub-optimal code generation with 326 // regards to performance. 327 if (BBSectionsType == BasicBlockSection::List && 328 hasInstrProfHashMismatch(MF)) 329 return true; 330 // Renumber blocks before sorting them. This is useful during sorting, 331 // basic blocks in the same section will retain the default order. 332 // This renumbering should also be done for basic block labels to match the 333 // profiles with the correct blocks. 334 // For LLVM_BB_ADDR_MAP versions 2 and higher, this renumbering serves 335 // the different purpose of accessing the original layout positions and 336 // finding the original fallthroughs. 337 // TODO: Change the above comment accordingly when version 1 is deprecated. 338 MF.RenumberBlocks(); 339 340 if (BBSectionsType == BasicBlockSection::Labels) { 341 MF.setBBSectionsType(BBSectionsType); 342 return true; 343 } 344 345 BBSectionsProfileReader = &getAnalysis<BasicBlockSectionsProfileReader>(); 346 347 // Map from BBID of blocks to their cluster information. 348 DenseMap<unsigned, BBClusterInfo> FuncBBClusterInfo; 349 if (BBSectionsType == BasicBlockSection::List && 350 !getBBClusterInfoForFunction(MF, BBSectionsProfileReader, 351 FuncBBClusterInfo)) 352 return true; 353 MF.setBBSectionsType(BBSectionsType); 354 assignSections(MF, FuncBBClusterInfo); 355 356 // We make sure that the cluster including the entry basic block precedes all 357 // other clusters. 358 auto EntryBBSectionID = MF.front().getSectionID(); 359 360 // Helper function for ordering BB sections as follows: 361 // * Entry section (section including the entry block). 362 // * Regular sections (in increasing order of their Number). 363 // ... 364 // * Exception section 365 // * Cold section 366 auto MBBSectionOrder = [EntryBBSectionID](const MBBSectionID &LHS, 367 const MBBSectionID &RHS) { 368 // We make sure that the section containing the entry block precedes all the 369 // other sections. 370 if (LHS == EntryBBSectionID || RHS == EntryBBSectionID) 371 return LHS == EntryBBSectionID; 372 return LHS.Type == RHS.Type ? LHS.Number < RHS.Number : LHS.Type < RHS.Type; 373 }; 374 375 // We sort all basic blocks to make sure the basic blocks of every cluster are 376 // contiguous and ordered accordingly. Furthermore, clusters are ordered in 377 // increasing order of their section IDs, with the exception and the 378 // cold section placed at the end of the function. 379 auto Comparator = [&](const MachineBasicBlock &X, 380 const MachineBasicBlock &Y) { 381 auto XSectionID = X.getSectionID(); 382 auto YSectionID = Y.getSectionID(); 383 if (XSectionID != YSectionID) 384 return MBBSectionOrder(XSectionID, YSectionID); 385 // If the two basic block are in the same section, the order is decided by 386 // their position within the section. 387 if (XSectionID.Type == MBBSectionID::SectionType::Default) 388 return FuncBBClusterInfo.lookup(X.getBBIDOrNumber()).PositionInCluster < 389 FuncBBClusterInfo.lookup(Y.getBBIDOrNumber()).PositionInCluster; 390 return X.getNumber() < Y.getNumber(); 391 }; 392 393 sortBasicBlocksAndUpdateBranches(MF, Comparator); 394 avoidZeroOffsetLandingPad(MF); 395 return true; 396 } 397 398 void BasicBlockSections::getAnalysisUsage(AnalysisUsage &AU) const { 399 AU.setPreservesAll(); 400 AU.addRequired<BasicBlockSectionsProfileReader>(); 401 MachineFunctionPass::getAnalysisUsage(AU); 402 } 403 404 MachineFunctionPass *llvm::createBasicBlockSectionsPass() { 405 return new BasicBlockSections(); 406 } 407