1 //===- BranchProbabilityInfo.cpp - Branch Probability Analysis ------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // Loops should be simplified before this analysis. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/Analysis/BranchProbabilityInfo.h" 14 #include "llvm/ADT/PostOrderIterator.h" 15 #include "llvm/ADT/SCCIterator.h" 16 #include "llvm/ADT/STLExtras.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/Analysis/LoopInfo.h" 19 #include "llvm/Analysis/PostDominators.h" 20 #include "llvm/Analysis/TargetLibraryInfo.h" 21 #include "llvm/IR/Attributes.h" 22 #include "llvm/IR/BasicBlock.h" 23 #include "llvm/IR/CFG.h" 24 #include "llvm/IR/Constants.h" 25 #include "llvm/IR/Dominators.h" 26 #include "llvm/IR/Function.h" 27 #include "llvm/IR/InstrTypes.h" 28 #include "llvm/IR/Instruction.h" 29 #include "llvm/IR/Instructions.h" 30 #include "llvm/IR/LLVMContext.h" 31 #include "llvm/IR/Metadata.h" 32 #include "llvm/IR/PassManager.h" 33 #include "llvm/IR/Type.h" 34 #include "llvm/IR/Value.h" 35 #include "llvm/InitializePasses.h" 36 #include "llvm/Pass.h" 37 #include "llvm/Support/BranchProbability.h" 38 #include "llvm/Support/Casting.h" 39 #include "llvm/Support/CommandLine.h" 40 #include "llvm/Support/Debug.h" 41 #include "llvm/Support/raw_ostream.h" 42 #include <cassert> 43 #include <cstdint> 44 #include <iterator> 45 #include <map> 46 #include <utility> 47 48 using namespace llvm; 49 50 #define DEBUG_TYPE "branch-prob" 51 52 static cl::opt<bool> PrintBranchProb( 53 "print-bpi", cl::init(false), cl::Hidden, 54 cl::desc("Print the branch probability info.")); 55 56 cl::opt<std::string> PrintBranchProbFuncName( 57 "print-bpi-func-name", cl::Hidden, 58 cl::desc("The option to specify the name of the function " 59 "whose branch probability info is printed.")); 60 61 INITIALIZE_PASS_BEGIN(BranchProbabilityInfoWrapperPass, "branch-prob", 62 "Branch Probability Analysis", false, true) 63 INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) 64 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 65 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) 66 INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass) 67 INITIALIZE_PASS_END(BranchProbabilityInfoWrapperPass, "branch-prob", 68 "Branch Probability Analysis", false, true) 69 70 BranchProbabilityInfoWrapperPass::BranchProbabilityInfoWrapperPass() 71 : FunctionPass(ID) { 72 initializeBranchProbabilityInfoWrapperPassPass( 73 *PassRegistry::getPassRegistry()); 74 } 75 76 char BranchProbabilityInfoWrapperPass::ID = 0; 77 78 // Weights are for internal use only. They are used by heuristics to help to 79 // estimate edges' probability. Example: 80 // 81 // Using "Loop Branch Heuristics" we predict weights of edges for the 82 // block BB2. 83 // ... 84 // | 85 // V 86 // BB1<-+ 87 // | | 88 // | | (Weight = 124) 89 // V | 90 // BB2--+ 91 // | 92 // | (Weight = 4) 93 // V 94 // BB3 95 // 96 // Probability of the edge BB2->BB1 = 124 / (124 + 4) = 0.96875 97 // Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125 98 static const uint32_t LBH_TAKEN_WEIGHT = 124; 99 static const uint32_t LBH_NONTAKEN_WEIGHT = 4; 100 101 /// Unreachable-terminating branch taken probability. 102 /// 103 /// This is the probability for a branch being taken to a block that terminates 104 /// (eventually) in unreachable. These are predicted as unlikely as possible. 105 /// All reachable probability will proportionally share the remaining part. 106 static const BranchProbability UR_TAKEN_PROB = BranchProbability::getRaw(1); 107 108 /// Heuristics and lookup tables for non-loop branches: 109 /// Pointer Heuristics (PH) 110 static const uint32_t PH_TAKEN_WEIGHT = 20; 111 static const uint32_t PH_NONTAKEN_WEIGHT = 12; 112 static const BranchProbability 113 PtrTakenProb(PH_TAKEN_WEIGHT, PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); 114 static const BranchProbability 115 PtrUntakenProb(PH_NONTAKEN_WEIGHT, PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); 116 117 using ProbabilityList = SmallVector<BranchProbability>; 118 using ProbabilityTable = std::map<CmpInst::Predicate, ProbabilityList>; 119 120 /// Pointer comparisons: 121 static const ProbabilityTable PointerTable{ 122 {ICmpInst::ICMP_NE, {PtrTakenProb, PtrUntakenProb}}, /// p != q -> Likely 123 {ICmpInst::ICMP_EQ, {PtrUntakenProb, PtrTakenProb}}, /// p == q -> Unlikely 124 }; 125 126 /// Zero Heuristics (ZH) 127 static const uint32_t ZH_TAKEN_WEIGHT = 20; 128 static const uint32_t ZH_NONTAKEN_WEIGHT = 12; 129 static const BranchProbability 130 ZeroTakenProb(ZH_TAKEN_WEIGHT, ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); 131 static const BranchProbability 132 ZeroUntakenProb(ZH_NONTAKEN_WEIGHT, ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); 133 134 /// Integer compares with 0: 135 static const ProbabilityTable ICmpWithZeroTable{ 136 {CmpInst::ICMP_EQ, {ZeroUntakenProb, ZeroTakenProb}}, /// X == 0 -> Unlikely 137 {CmpInst::ICMP_NE, {ZeroTakenProb, ZeroUntakenProb}}, /// X != 0 -> Likely 138 {CmpInst::ICMP_SLT, {ZeroUntakenProb, ZeroTakenProb}}, /// X < 0 -> Unlikely 139 {CmpInst::ICMP_SGT, {ZeroTakenProb, ZeroUntakenProb}}, /// X > 0 -> Likely 140 }; 141 142 /// Integer compares with -1: 143 static const ProbabilityTable ICmpWithMinusOneTable{ 144 {CmpInst::ICMP_EQ, {ZeroUntakenProb, ZeroTakenProb}}, /// X == -1 -> Unlikely 145 {CmpInst::ICMP_NE, {ZeroTakenProb, ZeroUntakenProb}}, /// X != -1 -> Likely 146 // InstCombine canonicalizes X >= 0 into X > -1 147 {CmpInst::ICMP_SGT, {ZeroTakenProb, ZeroUntakenProb}}, /// X >= 0 -> Likely 148 }; 149 150 /// Integer compares with 1: 151 static const ProbabilityTable ICmpWithOneTable{ 152 // InstCombine canonicalizes X <= 0 into X < 1 153 {CmpInst::ICMP_SLT, {ZeroUntakenProb, ZeroTakenProb}}, /// X <= 0 -> Unlikely 154 }; 155 156 /// strcmp and similar functions return zero, negative, or positive, if the 157 /// first string is equal, less, or greater than the second. We consider it 158 /// likely that the strings are not equal, so a comparison with zero is 159 /// probably false, but also a comparison with any other number is also 160 /// probably false given that what exactly is returned for nonzero values is 161 /// not specified. Any kind of comparison other than equality we know 162 /// nothing about. 163 static const ProbabilityTable ICmpWithLibCallTable{ 164 {CmpInst::ICMP_EQ, {ZeroUntakenProb, ZeroTakenProb}}, 165 {CmpInst::ICMP_NE, {ZeroTakenProb, ZeroUntakenProb}}, 166 }; 167 168 // Floating-Point Heuristics (FPH) 169 static const uint32_t FPH_TAKEN_WEIGHT = 20; 170 static const uint32_t FPH_NONTAKEN_WEIGHT = 12; 171 172 /// This is the probability for an ordered floating point comparison. 173 static const uint32_t FPH_ORD_WEIGHT = 1024 * 1024 - 1; 174 /// This is the probability for an unordered floating point comparison, it means 175 /// one or two of the operands are NaN. Usually it is used to test for an 176 /// exceptional case, so the result is unlikely. 177 static const uint32_t FPH_UNO_WEIGHT = 1; 178 179 static const BranchProbability FPOrdTakenProb(FPH_ORD_WEIGHT, 180 FPH_ORD_WEIGHT + FPH_UNO_WEIGHT); 181 static const BranchProbability 182 FPOrdUntakenProb(FPH_UNO_WEIGHT, FPH_ORD_WEIGHT + FPH_UNO_WEIGHT); 183 static const BranchProbability 184 FPTakenProb(FPH_TAKEN_WEIGHT, FPH_TAKEN_WEIGHT + FPH_NONTAKEN_WEIGHT); 185 static const BranchProbability 186 FPUntakenProb(FPH_NONTAKEN_WEIGHT, FPH_TAKEN_WEIGHT + FPH_NONTAKEN_WEIGHT); 187 188 /// Floating-Point compares: 189 static const ProbabilityTable FCmpTable{ 190 {FCmpInst::FCMP_ORD, {FPOrdTakenProb, FPOrdUntakenProb}}, /// !isnan -> Likely 191 {FCmpInst::FCMP_UNO, {FPOrdUntakenProb, FPOrdTakenProb}}, /// isnan -> Unlikely 192 }; 193 194 /// Set of dedicated "absolute" execution weights for a block. These weights are 195 /// meaningful relative to each other and their derivatives only. 196 enum class BlockExecWeight : std::uint32_t { 197 /// Special weight used for cases with exact zero probability. 198 ZERO = 0x0, 199 /// Minimal possible non zero weight. 200 LOWEST_NON_ZERO = 0x1, 201 /// Weight to an 'unreachable' block. 202 UNREACHABLE = ZERO, 203 /// Weight to a block containing non returning call. 204 NORETURN = LOWEST_NON_ZERO, 205 /// Weight to 'unwind' block of an invoke instruction. 206 UNWIND = LOWEST_NON_ZERO, 207 /// Weight to a 'cold' block. Cold blocks are the ones containing calls marked 208 /// with attribute 'cold'. 209 COLD = 0xffff, 210 /// Default weight is used in cases when there is no dedicated execution 211 /// weight set. It is not propagated through the domination line either. 212 DEFAULT = 0xfffff 213 }; 214 215 BranchProbabilityInfo::SccInfo::SccInfo(const Function &F) { 216 // Record SCC numbers of blocks in the CFG to identify irreducible loops. 217 // FIXME: We could only calculate this if the CFG is known to be irreducible 218 // (perhaps cache this info in LoopInfo if we can easily calculate it there?). 219 int SccNum = 0; 220 for (scc_iterator<const Function *> It = scc_begin(&F); !It.isAtEnd(); 221 ++It, ++SccNum) { 222 // Ignore single-block SCCs since they either aren't loops or LoopInfo will 223 // catch them. 224 const std::vector<const BasicBlock *> &Scc = *It; 225 if (Scc.size() == 1) 226 continue; 227 228 LLVM_DEBUG(dbgs() << "BPI: SCC " << SccNum << ":"); 229 for (const auto *BB : Scc) { 230 LLVM_DEBUG(dbgs() << " " << BB->getName()); 231 SccNums[BB] = SccNum; 232 calculateSccBlockType(BB, SccNum); 233 } 234 LLVM_DEBUG(dbgs() << "\n"); 235 } 236 } 237 238 int BranchProbabilityInfo::SccInfo::getSCCNum(const BasicBlock *BB) const { 239 auto SccIt = SccNums.find(BB); 240 if (SccIt == SccNums.end()) 241 return -1; 242 return SccIt->second; 243 } 244 245 void BranchProbabilityInfo::SccInfo::getSccEnterBlocks( 246 int SccNum, SmallVectorImpl<BasicBlock *> &Enters) const { 247 248 for (auto MapIt : SccBlocks[SccNum]) { 249 const auto *BB = MapIt.first; 250 if (isSCCHeader(BB, SccNum)) 251 for (const auto *Pred : predecessors(BB)) 252 if (getSCCNum(Pred) != SccNum) 253 Enters.push_back(const_cast<BasicBlock *>(BB)); 254 } 255 } 256 257 void BranchProbabilityInfo::SccInfo::getSccExitBlocks( 258 int SccNum, SmallVectorImpl<BasicBlock *> &Exits) const { 259 for (auto MapIt : SccBlocks[SccNum]) { 260 const auto *BB = MapIt.first; 261 if (isSCCExitingBlock(BB, SccNum)) 262 for (const auto *Succ : successors(BB)) 263 if (getSCCNum(Succ) != SccNum) 264 Exits.push_back(const_cast<BasicBlock *>(Succ)); 265 } 266 } 267 268 uint32_t BranchProbabilityInfo::SccInfo::getSccBlockType(const BasicBlock *BB, 269 int SccNum) const { 270 assert(getSCCNum(BB) == SccNum); 271 272 assert(SccBlocks.size() > static_cast<unsigned>(SccNum) && "Unknown SCC"); 273 const auto &SccBlockTypes = SccBlocks[SccNum]; 274 275 auto It = SccBlockTypes.find(BB); 276 if (It != SccBlockTypes.end()) { 277 return It->second; 278 } 279 return Inner; 280 } 281 282 void BranchProbabilityInfo::SccInfo::calculateSccBlockType(const BasicBlock *BB, 283 int SccNum) { 284 assert(getSCCNum(BB) == SccNum); 285 uint32_t BlockType = Inner; 286 287 if (llvm::any_of(predecessors(BB), [&](const BasicBlock *Pred) { 288 // Consider any block that is an entry point to the SCC as 289 // a header. 290 return getSCCNum(Pred) != SccNum; 291 })) 292 BlockType |= Header; 293 294 if (llvm::any_of(successors(BB), [&](const BasicBlock *Succ) { 295 return getSCCNum(Succ) != SccNum; 296 })) 297 BlockType |= Exiting; 298 299 // Lazily compute the set of headers for a given SCC and cache the results 300 // in the SccHeaderMap. 301 if (SccBlocks.size() <= static_cast<unsigned>(SccNum)) 302 SccBlocks.resize(SccNum + 1); 303 auto &SccBlockTypes = SccBlocks[SccNum]; 304 305 if (BlockType != Inner) { 306 bool IsInserted; 307 std::tie(std::ignore, IsInserted) = 308 SccBlockTypes.insert(std::make_pair(BB, BlockType)); 309 assert(IsInserted && "Duplicated block in SCC"); 310 } 311 } 312 313 BranchProbabilityInfo::LoopBlock::LoopBlock(const BasicBlock *BB, 314 const LoopInfo &LI, 315 const SccInfo &SccI) 316 : BB(BB) { 317 LD.first = LI.getLoopFor(BB); 318 if (!LD.first) { 319 LD.second = SccI.getSCCNum(BB); 320 } 321 } 322 323 bool BranchProbabilityInfo::isLoopEnteringEdge(const LoopEdge &Edge) const { 324 const auto &SrcBlock = Edge.first; 325 const auto &DstBlock = Edge.second; 326 return (DstBlock.getLoop() && 327 !DstBlock.getLoop()->contains(SrcBlock.getLoop())) || 328 // Assume that SCCs can't be nested. 329 (DstBlock.getSccNum() != -1 && 330 SrcBlock.getSccNum() != DstBlock.getSccNum()); 331 } 332 333 bool BranchProbabilityInfo::isLoopExitingEdge(const LoopEdge &Edge) const { 334 return isLoopEnteringEdge({Edge.second, Edge.first}); 335 } 336 337 bool BranchProbabilityInfo::isLoopEnteringExitingEdge( 338 const LoopEdge &Edge) const { 339 return isLoopEnteringEdge(Edge) || isLoopExitingEdge(Edge); 340 } 341 342 bool BranchProbabilityInfo::isLoopBackEdge(const LoopEdge &Edge) const { 343 const auto &SrcBlock = Edge.first; 344 const auto &DstBlock = Edge.second; 345 return SrcBlock.belongsToSameLoop(DstBlock) && 346 ((DstBlock.getLoop() && 347 DstBlock.getLoop()->getHeader() == DstBlock.getBlock()) || 348 (DstBlock.getSccNum() != -1 && 349 SccI->isSCCHeader(DstBlock.getBlock(), DstBlock.getSccNum()))); 350 } 351 352 void BranchProbabilityInfo::getLoopEnterBlocks( 353 const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Enters) const { 354 if (LB.getLoop()) { 355 auto *Header = LB.getLoop()->getHeader(); 356 Enters.append(pred_begin(Header), pred_end(Header)); 357 } else { 358 assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?"); 359 SccI->getSccEnterBlocks(LB.getSccNum(), Enters); 360 } 361 } 362 363 void BranchProbabilityInfo::getLoopExitBlocks( 364 const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Exits) const { 365 if (LB.getLoop()) { 366 LB.getLoop()->getExitBlocks(Exits); 367 } else { 368 assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?"); 369 SccI->getSccExitBlocks(LB.getSccNum(), Exits); 370 } 371 } 372 373 // Propagate existing explicit probabilities from either profile data or 374 // 'expect' intrinsic processing. Examine metadata against unreachable 375 // heuristic. The probability of the edge coming to unreachable block is 376 // set to min of metadata and unreachable heuristic. 377 bool BranchProbabilityInfo::calcMetadataWeights(const BasicBlock *BB) { 378 const Instruction *TI = BB->getTerminator(); 379 assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); 380 if (!(isa<BranchInst>(TI) || isa<SwitchInst>(TI) || isa<IndirectBrInst>(TI) || 381 isa<InvokeInst>(TI))) 382 return false; 383 384 MDNode *WeightsNode = TI->getMetadata(LLVMContext::MD_prof); 385 if (!WeightsNode) 386 return false; 387 388 // Check that the number of successors is manageable. 389 assert(TI->getNumSuccessors() < UINT32_MAX && "Too many successors"); 390 391 // Ensure there are weights for all of the successors. Note that the first 392 // operand to the metadata node is a name, not a weight. 393 if (WeightsNode->getNumOperands() != TI->getNumSuccessors() + 1) 394 return false; 395 396 // Build up the final weights that will be used in a temporary buffer. 397 // Compute the sum of all weights to later decide whether they need to 398 // be scaled to fit in 32 bits. 399 uint64_t WeightSum = 0; 400 SmallVector<uint32_t, 2> Weights; 401 SmallVector<unsigned, 2> UnreachableIdxs; 402 SmallVector<unsigned, 2> ReachableIdxs; 403 Weights.reserve(TI->getNumSuccessors()); 404 for (unsigned I = 1, E = WeightsNode->getNumOperands(); I != E; ++I) { 405 ConstantInt *Weight = 406 mdconst::dyn_extract<ConstantInt>(WeightsNode->getOperand(I)); 407 if (!Weight) 408 return false; 409 assert(Weight->getValue().getActiveBits() <= 32 && 410 "Too many bits for uint32_t"); 411 Weights.push_back(Weight->getZExtValue()); 412 WeightSum += Weights.back(); 413 const LoopBlock SrcLoopBB = getLoopBlock(BB); 414 const LoopBlock DstLoopBB = getLoopBlock(TI->getSuccessor(I - 1)); 415 auto EstimatedWeight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB}); 416 if (EstimatedWeight && 417 EstimatedWeight.getValue() <= 418 static_cast<uint32_t>(BlockExecWeight::UNREACHABLE)) 419 UnreachableIdxs.push_back(I - 1); 420 else 421 ReachableIdxs.push_back(I - 1); 422 } 423 assert(Weights.size() == TI->getNumSuccessors() && "Checked above"); 424 425 // If the sum of weights does not fit in 32 bits, scale every weight down 426 // accordingly. 427 uint64_t ScalingFactor = 428 (WeightSum > UINT32_MAX) ? WeightSum / UINT32_MAX + 1 : 1; 429 430 if (ScalingFactor > 1) { 431 WeightSum = 0; 432 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) { 433 Weights[I] /= ScalingFactor; 434 WeightSum += Weights[I]; 435 } 436 } 437 assert(WeightSum <= UINT32_MAX && 438 "Expected weights to scale down to 32 bits"); 439 440 if (WeightSum == 0 || ReachableIdxs.size() == 0) { 441 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) 442 Weights[I] = 1; 443 WeightSum = TI->getNumSuccessors(); 444 } 445 446 // Set the probability. 447 SmallVector<BranchProbability, 2> BP; 448 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) 449 BP.push_back({ Weights[I], static_cast<uint32_t>(WeightSum) }); 450 451 // Examine the metadata against unreachable heuristic. 452 // If the unreachable heuristic is more strong then we use it for this edge. 453 if (UnreachableIdxs.size() == 0 || ReachableIdxs.size() == 0) { 454 setEdgeProbability(BB, BP); 455 return true; 456 } 457 458 auto UnreachableProb = UR_TAKEN_PROB; 459 for (auto I : UnreachableIdxs) 460 if (UnreachableProb < BP[I]) { 461 BP[I] = UnreachableProb; 462 } 463 464 // Sum of all edge probabilities must be 1.0. If we modified the probability 465 // of some edges then we must distribute the introduced difference over the 466 // reachable blocks. 467 // 468 // Proportional distribution: the relation between probabilities of the 469 // reachable edges is kept unchanged. That is for any reachable edges i and j: 470 // newBP[i] / newBP[j] == oldBP[i] / oldBP[j] => 471 // newBP[i] / oldBP[i] == newBP[j] / oldBP[j] == K 472 // Where K is independent of i,j. 473 // newBP[i] == oldBP[i] * K 474 // We need to find K. 475 // Make sum of all reachables of the left and right parts: 476 // sum_of_reachable(newBP) == K * sum_of_reachable(oldBP) 477 // Sum of newBP must be equal to 1.0: 478 // sum_of_reachable(newBP) + sum_of_unreachable(newBP) == 1.0 => 479 // sum_of_reachable(newBP) = 1.0 - sum_of_unreachable(newBP) 480 // Where sum_of_unreachable(newBP) is what has been just changed. 481 // Finally: 482 // K == sum_of_reachable(newBP) / sum_of_reachable(oldBP) => 483 // K == (1.0 - sum_of_unreachable(newBP)) / sum_of_reachable(oldBP) 484 BranchProbability NewUnreachableSum = BranchProbability::getZero(); 485 for (auto I : UnreachableIdxs) 486 NewUnreachableSum += BP[I]; 487 488 BranchProbability NewReachableSum = 489 BranchProbability::getOne() - NewUnreachableSum; 490 491 BranchProbability OldReachableSum = BranchProbability::getZero(); 492 for (auto I : ReachableIdxs) 493 OldReachableSum += BP[I]; 494 495 if (OldReachableSum != NewReachableSum) { // Anything to dsitribute? 496 if (OldReachableSum.isZero()) { 497 // If all oldBP[i] are zeroes then the proportional distribution results 498 // in all zero probabilities and the error stays big. In this case we 499 // evenly spread NewReachableSum over the reachable edges. 500 BranchProbability PerEdge = NewReachableSum / ReachableIdxs.size(); 501 for (auto I : ReachableIdxs) 502 BP[I] = PerEdge; 503 } else { 504 for (auto I : ReachableIdxs) { 505 // We use uint64_t to avoid double rounding error of the following 506 // calculation: BP[i] = BP[i] * NewReachableSum / OldReachableSum 507 // The formula is taken from the private constructor 508 // BranchProbability(uint32_t Numerator, uint32_t Denominator) 509 uint64_t Mul = static_cast<uint64_t>(NewReachableSum.getNumerator()) * 510 BP[I].getNumerator(); 511 uint32_t Div = static_cast<uint32_t>( 512 divideNearest(Mul, OldReachableSum.getNumerator())); 513 BP[I] = BranchProbability::getRaw(Div); 514 } 515 } 516 } 517 518 setEdgeProbability(BB, BP); 519 520 return true; 521 } 522 523 // Calculate Edge Weights using "Pointer Heuristics". Predict a comparison 524 // between two pointer or pointer and NULL will fail. 525 bool BranchProbabilityInfo::calcPointerHeuristics(const BasicBlock *BB) { 526 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 527 if (!BI || !BI->isConditional()) 528 return false; 529 530 Value *Cond = BI->getCondition(); 531 ICmpInst *CI = dyn_cast<ICmpInst>(Cond); 532 if (!CI || !CI->isEquality()) 533 return false; 534 535 Value *LHS = CI->getOperand(0); 536 537 if (!LHS->getType()->isPointerTy()) 538 return false; 539 540 assert(CI->getOperand(1)->getType()->isPointerTy()); 541 542 auto Search = PointerTable.find(CI->getPredicate()); 543 if (Search == PointerTable.end()) 544 return false; 545 setEdgeProbability(BB, Search->second); 546 return true; 547 } 548 549 // Compute the unlikely successors to the block BB in the loop L, specifically 550 // those that are unlikely because this is a loop, and add them to the 551 // UnlikelyBlocks set. 552 static void 553 computeUnlikelySuccessors(const BasicBlock *BB, Loop *L, 554 SmallPtrSetImpl<const BasicBlock*> &UnlikelyBlocks) { 555 // Sometimes in a loop we have a branch whose condition is made false by 556 // taking it. This is typically something like 557 // int n = 0; 558 // while (...) { 559 // if (++n >= MAX) { 560 // n = 0; 561 // } 562 // } 563 // In this sort of situation taking the branch means that at the very least it 564 // won't be taken again in the next iteration of the loop, so we should 565 // consider it less likely than a typical branch. 566 // 567 // We detect this by looking back through the graph of PHI nodes that sets the 568 // value that the condition depends on, and seeing if we can reach a successor 569 // block which can be determined to make the condition false. 570 // 571 // FIXME: We currently consider unlikely blocks to be half as likely as other 572 // blocks, but if we consider the example above the likelyhood is actually 573 // 1/MAX. We could therefore be more precise in how unlikely we consider 574 // blocks to be, but it would require more careful examination of the form 575 // of the comparison expression. 576 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 577 if (!BI || !BI->isConditional()) 578 return; 579 580 // Check if the branch is based on an instruction compared with a constant 581 CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition()); 582 if (!CI || !isa<Instruction>(CI->getOperand(0)) || 583 !isa<Constant>(CI->getOperand(1))) 584 return; 585 586 // Either the instruction must be a PHI, or a chain of operations involving 587 // constants that ends in a PHI which we can then collapse into a single value 588 // if the PHI value is known. 589 Instruction *CmpLHS = dyn_cast<Instruction>(CI->getOperand(0)); 590 PHINode *CmpPHI = dyn_cast<PHINode>(CmpLHS); 591 Constant *CmpConst = dyn_cast<Constant>(CI->getOperand(1)); 592 // Collect the instructions until we hit a PHI 593 SmallVector<BinaryOperator *, 1> InstChain; 594 while (!CmpPHI && CmpLHS && isa<BinaryOperator>(CmpLHS) && 595 isa<Constant>(CmpLHS->getOperand(1))) { 596 // Stop if the chain extends outside of the loop 597 if (!L->contains(CmpLHS)) 598 return; 599 InstChain.push_back(cast<BinaryOperator>(CmpLHS)); 600 CmpLHS = dyn_cast<Instruction>(CmpLHS->getOperand(0)); 601 if (CmpLHS) 602 CmpPHI = dyn_cast<PHINode>(CmpLHS); 603 } 604 if (!CmpPHI || !L->contains(CmpPHI)) 605 return; 606 607 // Trace the phi node to find all values that come from successors of BB 608 SmallPtrSet<PHINode*, 8> VisitedInsts; 609 SmallVector<PHINode*, 8> WorkList; 610 WorkList.push_back(CmpPHI); 611 VisitedInsts.insert(CmpPHI); 612 while (!WorkList.empty()) { 613 PHINode *P = WorkList.pop_back_val(); 614 for (BasicBlock *B : P->blocks()) { 615 // Skip blocks that aren't part of the loop 616 if (!L->contains(B)) 617 continue; 618 Value *V = P->getIncomingValueForBlock(B); 619 // If the source is a PHI add it to the work list if we haven't 620 // already visited it. 621 if (PHINode *PN = dyn_cast<PHINode>(V)) { 622 if (VisitedInsts.insert(PN).second) 623 WorkList.push_back(PN); 624 continue; 625 } 626 // If this incoming value is a constant and B is a successor of BB, then 627 // we can constant-evaluate the compare to see if it makes the branch be 628 // taken or not. 629 Constant *CmpLHSConst = dyn_cast<Constant>(V); 630 if (!CmpLHSConst || !llvm::is_contained(successors(BB), B)) 631 continue; 632 // First collapse InstChain 633 for (Instruction *I : llvm::reverse(InstChain)) { 634 CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst, 635 cast<Constant>(I->getOperand(1)), true); 636 if (!CmpLHSConst) 637 break; 638 } 639 if (!CmpLHSConst) 640 continue; 641 // Now constant-evaluate the compare 642 Constant *Result = ConstantExpr::getCompare(CI->getPredicate(), 643 CmpLHSConst, CmpConst, true); 644 // If the result means we don't branch to the block then that block is 645 // unlikely. 646 if (Result && 647 ((Result->isZeroValue() && B == BI->getSuccessor(0)) || 648 (Result->isOneValue() && B == BI->getSuccessor(1)))) 649 UnlikelyBlocks.insert(B); 650 } 651 } 652 } 653 654 Optional<uint32_t> 655 BranchProbabilityInfo::getEstimatedBlockWeight(const BasicBlock *BB) const { 656 auto WeightIt = EstimatedBlockWeight.find(BB); 657 if (WeightIt == EstimatedBlockWeight.end()) 658 return None; 659 return WeightIt->second; 660 } 661 662 Optional<uint32_t> 663 BranchProbabilityInfo::getEstimatedLoopWeight(const LoopData &L) const { 664 auto WeightIt = EstimatedLoopWeight.find(L); 665 if (WeightIt == EstimatedLoopWeight.end()) 666 return None; 667 return WeightIt->second; 668 } 669 670 Optional<uint32_t> 671 BranchProbabilityInfo::getEstimatedEdgeWeight(const LoopEdge &Edge) const { 672 // For edges entering a loop take weight of a loop rather than an individual 673 // block in the loop. 674 return isLoopEnteringEdge(Edge) 675 ? getEstimatedLoopWeight(Edge.second.getLoopData()) 676 : getEstimatedBlockWeight(Edge.second.getBlock()); 677 } 678 679 template <class IterT> 680 Optional<uint32_t> BranchProbabilityInfo::getMaxEstimatedEdgeWeight( 681 const LoopBlock &SrcLoopBB, iterator_range<IterT> Successors) const { 682 SmallVector<uint32_t, 4> Weights; 683 Optional<uint32_t> MaxWeight; 684 for (const BasicBlock *DstBB : Successors) { 685 const LoopBlock DstLoopBB = getLoopBlock(DstBB); 686 auto Weight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB}); 687 688 if (!Weight) 689 return None; 690 691 if (!MaxWeight || MaxWeight.getValue() < Weight.getValue()) 692 MaxWeight = Weight; 693 } 694 695 return MaxWeight; 696 } 697 698 // Updates \p LoopBB's weight and returns true. If \p LoopBB has already 699 // an associated weight it is unchanged and false is returned. 700 // 701 // Please note by the algorithm the weight is not expected to change once set 702 // thus 'false' status is used to track visited blocks. 703 bool BranchProbabilityInfo::updateEstimatedBlockWeight( 704 LoopBlock &LoopBB, uint32_t BBWeight, 705 SmallVectorImpl<BasicBlock *> &BlockWorkList, 706 SmallVectorImpl<LoopBlock> &LoopWorkList) { 707 BasicBlock *BB = LoopBB.getBlock(); 708 709 // In general, weight is assigned to a block when it has final value and 710 // can't/shouldn't be changed. However, there are cases when a block 711 // inherently has several (possibly "contradicting") weights. For example, 712 // "unwind" block may also contain "cold" call. In that case the first 713 // set weight is favored and all consequent weights are ignored. 714 if (!EstimatedBlockWeight.insert({BB, BBWeight}).second) 715 return false; 716 717 for (BasicBlock *PredBlock : predecessors(BB)) { 718 LoopBlock PredLoop = getLoopBlock(PredBlock); 719 // Add affected block/loop to a working list. 720 if (isLoopExitingEdge({PredLoop, LoopBB})) { 721 if (!EstimatedLoopWeight.count(PredLoop.getLoopData())) 722 LoopWorkList.push_back(PredLoop); 723 } else if (!EstimatedBlockWeight.count(PredBlock)) 724 BlockWorkList.push_back(PredBlock); 725 } 726 return true; 727 } 728 729 // Starting from \p BB traverse through dominator blocks and assign \p BBWeight 730 // to all such blocks that are post dominated by \BB. In other words to all 731 // blocks that the one is executed if and only if another one is executed. 732 // Importantly, we skip loops here for two reasons. First weights of blocks in 733 // a loop should be scaled by trip count (yet possibly unknown). Second there is 734 // no any value in doing that because that doesn't give any additional 735 // information regarding distribution of probabilities inside the loop. 736 // Exception is loop 'enter' and 'exit' edges that are handled in a special way 737 // at calcEstimatedHeuristics. 738 // 739 // In addition, \p WorkList is populated with basic blocks if at leas one 740 // successor has updated estimated weight. 741 void BranchProbabilityInfo::propagateEstimatedBlockWeight( 742 const LoopBlock &LoopBB, DominatorTree *DT, PostDominatorTree *PDT, 743 uint32_t BBWeight, SmallVectorImpl<BasicBlock *> &BlockWorkList, 744 SmallVectorImpl<LoopBlock> &LoopWorkList) { 745 const BasicBlock *BB = LoopBB.getBlock(); 746 const auto *DTStartNode = DT->getNode(BB); 747 const auto *PDTStartNode = PDT->getNode(BB); 748 749 // TODO: Consider propagating weight down the domination line as well. 750 for (const auto *DTNode = DTStartNode; DTNode != nullptr; 751 DTNode = DTNode->getIDom()) { 752 auto *DomBB = DTNode->getBlock(); 753 // Consider blocks which lie on one 'line'. 754 if (!PDT->dominates(PDTStartNode, PDT->getNode(DomBB))) 755 // If BB doesn't post dominate DomBB it will not post dominate dominators 756 // of DomBB as well. 757 break; 758 759 LoopBlock DomLoopBB = getLoopBlock(DomBB); 760 const LoopEdge Edge{DomLoopBB, LoopBB}; 761 // Don't propagate weight to blocks belonging to different loops. 762 if (!isLoopEnteringExitingEdge(Edge)) { 763 if (!updateEstimatedBlockWeight(DomLoopBB, BBWeight, BlockWorkList, 764 LoopWorkList)) 765 // If DomBB has weight set then all it's predecessors are already 766 // processed (since we propagate weight up to the top of IR each time). 767 break; 768 } else if (isLoopExitingEdge(Edge)) { 769 LoopWorkList.push_back(DomLoopBB); 770 } 771 } 772 } 773 774 Optional<uint32_t> BranchProbabilityInfo::getInitialEstimatedBlockWeight( 775 const BasicBlock *BB) { 776 // Returns true if \p BB has call marked with "NoReturn" attribute. 777 auto hasNoReturn = [&](const BasicBlock *BB) { 778 for (const auto &I : reverse(*BB)) 779 if (const CallInst *CI = dyn_cast<CallInst>(&I)) 780 if (CI->hasFnAttr(Attribute::NoReturn)) 781 return true; 782 783 return false; 784 }; 785 786 // Important note regarding the order of checks. They are ordered by weight 787 // from lowest to highest. Doing that allows to avoid "unstable" results 788 // when several conditions heuristics can be applied simultaneously. 789 if (isa<UnreachableInst>(BB->getTerminator()) || 790 // If this block is terminated by a call to 791 // @llvm.experimental.deoptimize then treat it like an unreachable 792 // since it is expected to practically never execute. 793 // TODO: Should we actually treat as never returning call? 794 BB->getTerminatingDeoptimizeCall()) 795 return hasNoReturn(BB) 796 ? static_cast<uint32_t>(BlockExecWeight::NORETURN) 797 : static_cast<uint32_t>(BlockExecWeight::UNREACHABLE); 798 799 // Check if the block is 'unwind' handler of some invoke instruction. 800 for (const auto *Pred : predecessors(BB)) 801 if (Pred) 802 if (const auto *II = dyn_cast<InvokeInst>(Pred->getTerminator())) 803 if (II->getUnwindDest() == BB) 804 return static_cast<uint32_t>(BlockExecWeight::UNWIND); 805 806 // Check if the block contains 'cold' call. 807 for (const auto &I : *BB) 808 if (const CallInst *CI = dyn_cast<CallInst>(&I)) 809 if (CI->hasFnAttr(Attribute::Cold)) 810 return static_cast<uint32_t>(BlockExecWeight::COLD); 811 812 return None; 813 } 814 815 // Does RPO traversal over all blocks in \p F and assigns weights to 816 // 'unreachable', 'noreturn', 'cold', 'unwind' blocks. In addition it does its 817 // best to propagate the weight to up/down the IR. 818 void BranchProbabilityInfo::computeEestimateBlockWeight( 819 const Function &F, DominatorTree *DT, PostDominatorTree *PDT) { 820 SmallVector<BasicBlock *, 8> BlockWorkList; 821 SmallVector<LoopBlock, 8> LoopWorkList; 822 823 // By doing RPO we make sure that all predecessors already have weights 824 // calculated before visiting theirs successors. 825 ReversePostOrderTraversal<const Function *> RPOT(&F); 826 for (const auto *BB : RPOT) 827 if (auto BBWeight = getInitialEstimatedBlockWeight(BB)) 828 // If we were able to find estimated weight for the block set it to this 829 // block and propagate up the IR. 830 propagateEstimatedBlockWeight(getLoopBlock(BB), DT, PDT, 831 BBWeight.getValue(), BlockWorkList, 832 LoopWorkList); 833 834 // BlockWorklist/LoopWorkList contains blocks/loops with at least one 835 // successor/exit having estimated weight. Try to propagate weight to such 836 // blocks/loops from successors/exits. 837 // Process loops and blocks. Order is not important. 838 do { 839 while (!LoopWorkList.empty()) { 840 const LoopBlock LoopBB = LoopWorkList.pop_back_val(); 841 842 if (EstimatedLoopWeight.count(LoopBB.getLoopData())) 843 continue; 844 845 SmallVector<BasicBlock *, 4> Exits; 846 getLoopExitBlocks(LoopBB, Exits); 847 auto LoopWeight = getMaxEstimatedEdgeWeight( 848 LoopBB, make_range(Exits.begin(), Exits.end())); 849 850 if (LoopWeight) { 851 // If we never exit the loop then we can enter it once at maximum. 852 if (LoopWeight <= static_cast<uint32_t>(BlockExecWeight::UNREACHABLE)) 853 LoopWeight = static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO); 854 855 EstimatedLoopWeight.insert( 856 {LoopBB.getLoopData(), LoopWeight.getValue()}); 857 // Add all blocks entering the loop into working list. 858 getLoopEnterBlocks(LoopBB, BlockWorkList); 859 } 860 } 861 862 while (!BlockWorkList.empty()) { 863 // We can reach here only if BlockWorkList is not empty. 864 const BasicBlock *BB = BlockWorkList.pop_back_val(); 865 if (EstimatedBlockWeight.count(BB)) 866 continue; 867 868 // We take maximum over all weights of successors. In other words we take 869 // weight of "hot" path. In theory we can probably find a better function 870 // which gives higher accuracy results (comparing to "maximum") but I 871 // can't 872 // think of any right now. And I doubt it will make any difference in 873 // practice. 874 const LoopBlock LoopBB = getLoopBlock(BB); 875 auto MaxWeight = getMaxEstimatedEdgeWeight(LoopBB, successors(BB)); 876 877 if (MaxWeight) 878 propagateEstimatedBlockWeight(LoopBB, DT, PDT, MaxWeight.getValue(), 879 BlockWorkList, LoopWorkList); 880 } 881 } while (!BlockWorkList.empty() || !LoopWorkList.empty()); 882 } 883 884 // Calculate edge probabilities based on block's estimated weight. 885 // Note that gathered weights were not scaled for loops. Thus edges entering 886 // and exiting loops requires special processing. 887 bool BranchProbabilityInfo::calcEstimatedHeuristics(const BasicBlock *BB) { 888 assert(BB->getTerminator()->getNumSuccessors() > 1 && 889 "expected more than one successor!"); 890 891 const LoopBlock LoopBB = getLoopBlock(BB); 892 893 SmallPtrSet<const BasicBlock *, 8> UnlikelyBlocks; 894 uint32_t TC = LBH_TAKEN_WEIGHT / LBH_NONTAKEN_WEIGHT; 895 if (LoopBB.getLoop()) 896 computeUnlikelySuccessors(BB, LoopBB.getLoop(), UnlikelyBlocks); 897 898 // Changed to 'true' if at least one successor has estimated weight. 899 bool FoundEstimatedWeight = false; 900 SmallVector<uint32_t, 4> SuccWeights; 901 uint64_t TotalWeight = 0; 902 // Go over all successors of BB and put their weights into SuccWeights. 903 for (const BasicBlock *SuccBB : successors(BB)) { 904 Optional<uint32_t> Weight; 905 const LoopBlock SuccLoopBB = getLoopBlock(SuccBB); 906 const LoopEdge Edge{LoopBB, SuccLoopBB}; 907 908 Weight = getEstimatedEdgeWeight(Edge); 909 910 if (isLoopExitingEdge(Edge) && 911 // Avoid adjustment of ZERO weight since it should remain unchanged. 912 Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) { 913 // Scale down loop exiting weight by trip count. 914 Weight = std::max( 915 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO), 916 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) / 917 TC); 918 } 919 bool IsUnlikelyEdge = LoopBB.getLoop() && UnlikelyBlocks.contains(SuccBB); 920 if (IsUnlikelyEdge && 921 // Avoid adjustment of ZERO weight since it should remain unchanged. 922 Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) { 923 // 'Unlikely' blocks have twice lower weight. 924 Weight = std::max( 925 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO), 926 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) / 927 2); 928 } 929 930 if (Weight) 931 FoundEstimatedWeight = true; 932 933 auto WeightVal = 934 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)); 935 TotalWeight += WeightVal; 936 SuccWeights.push_back(WeightVal); 937 } 938 939 // If non of blocks have estimated weight bail out. 940 // If TotalWeight is 0 that means weight of each successor is 0 as well and 941 // equally likely. Bail out early to not deal with devision by zero. 942 if (!FoundEstimatedWeight || TotalWeight == 0) 943 return false; 944 945 assert(SuccWeights.size() == succ_size(BB) && "Missed successor?"); 946 const unsigned SuccCount = SuccWeights.size(); 947 948 // If the sum of weights does not fit in 32 bits, scale every weight down 949 // accordingly. 950 if (TotalWeight > UINT32_MAX) { 951 uint64_t ScalingFactor = TotalWeight / UINT32_MAX + 1; 952 TotalWeight = 0; 953 for (unsigned Idx = 0; Idx < SuccCount; ++Idx) { 954 SuccWeights[Idx] /= ScalingFactor; 955 if (SuccWeights[Idx] == static_cast<uint32_t>(BlockExecWeight::ZERO)) 956 SuccWeights[Idx] = 957 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO); 958 TotalWeight += SuccWeights[Idx]; 959 } 960 assert(TotalWeight <= UINT32_MAX && "Total weight overflows"); 961 } 962 963 // Finally set probabilities to edges according to estimated block weights. 964 SmallVector<BranchProbability, 4> EdgeProbabilities( 965 SuccCount, BranchProbability::getUnknown()); 966 967 for (unsigned Idx = 0; Idx < SuccCount; ++Idx) { 968 EdgeProbabilities[Idx] = 969 BranchProbability(SuccWeights[Idx], (uint32_t)TotalWeight); 970 } 971 setEdgeProbability(BB, EdgeProbabilities); 972 return true; 973 } 974 975 bool BranchProbabilityInfo::calcZeroHeuristics(const BasicBlock *BB, 976 const TargetLibraryInfo *TLI) { 977 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 978 if (!BI || !BI->isConditional()) 979 return false; 980 981 Value *Cond = BI->getCondition(); 982 ICmpInst *CI = dyn_cast<ICmpInst>(Cond); 983 if (!CI) 984 return false; 985 986 auto GetConstantInt = [](Value *V) { 987 if (auto *I = dyn_cast<BitCastInst>(V)) 988 return dyn_cast<ConstantInt>(I->getOperand(0)); 989 return dyn_cast<ConstantInt>(V); 990 }; 991 992 Value *RHS = CI->getOperand(1); 993 ConstantInt *CV = GetConstantInt(RHS); 994 if (!CV) 995 return false; 996 997 // If the LHS is the result of AND'ing a value with a single bit bitmask, 998 // we don't have information about probabilities. 999 if (Instruction *LHS = dyn_cast<Instruction>(CI->getOperand(0))) 1000 if (LHS->getOpcode() == Instruction::And) 1001 if (ConstantInt *AndRHS = GetConstantInt(LHS->getOperand(1))) 1002 if (AndRHS->getValue().isPowerOf2()) 1003 return false; 1004 1005 // Check if the LHS is the return value of a library function 1006 LibFunc Func = NumLibFuncs; 1007 if (TLI) 1008 if (CallInst *Call = dyn_cast<CallInst>(CI->getOperand(0))) 1009 if (Function *CalledFn = Call->getCalledFunction()) 1010 TLI->getLibFunc(*CalledFn, Func); 1011 1012 ProbabilityTable::const_iterator Search; 1013 if (Func == LibFunc_strcasecmp || 1014 Func == LibFunc_strcmp || 1015 Func == LibFunc_strncasecmp || 1016 Func == LibFunc_strncmp || 1017 Func == LibFunc_memcmp || 1018 Func == LibFunc_bcmp) { 1019 Search = ICmpWithLibCallTable.find(CI->getPredicate()); 1020 if (Search == ICmpWithLibCallTable.end()) 1021 return false; 1022 } else if (CV->isZero()) { 1023 Search = ICmpWithZeroTable.find(CI->getPredicate()); 1024 if (Search == ICmpWithZeroTable.end()) 1025 return false; 1026 } else if (CV->isOne()) { 1027 Search = ICmpWithOneTable.find(CI->getPredicate()); 1028 if (Search == ICmpWithOneTable.end()) 1029 return false; 1030 } else if (CV->isMinusOne()) { 1031 Search = ICmpWithMinusOneTable.find(CI->getPredicate()); 1032 if (Search == ICmpWithMinusOneTable.end()) 1033 return false; 1034 } else { 1035 return false; 1036 } 1037 1038 setEdgeProbability(BB, Search->second); 1039 return true; 1040 } 1041 1042 bool BranchProbabilityInfo::calcFloatingPointHeuristics(const BasicBlock *BB) { 1043 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 1044 if (!BI || !BI->isConditional()) 1045 return false; 1046 1047 Value *Cond = BI->getCondition(); 1048 FCmpInst *FCmp = dyn_cast<FCmpInst>(Cond); 1049 if (!FCmp) 1050 return false; 1051 1052 ProbabilityList ProbList; 1053 if (FCmp->isEquality()) { 1054 ProbList = !FCmp->isTrueWhenEqual() ? 1055 // f1 == f2 -> Unlikely 1056 ProbabilityList({FPTakenProb, FPUntakenProb}) : 1057 // f1 != f2 -> Likely 1058 ProbabilityList({FPUntakenProb, FPTakenProb}); 1059 } else { 1060 auto Search = FCmpTable.find(FCmp->getPredicate()); 1061 if (Search == FCmpTable.end()) 1062 return false; 1063 ProbList = Search->second; 1064 } 1065 1066 setEdgeProbability(BB, ProbList); 1067 return true; 1068 } 1069 1070 void BranchProbabilityInfo::releaseMemory() { 1071 Probs.clear(); 1072 Handles.clear(); 1073 } 1074 1075 bool BranchProbabilityInfo::invalidate(Function &, const PreservedAnalyses &PA, 1076 FunctionAnalysisManager::Invalidator &) { 1077 // Check whether the analysis, all analyses on functions, or the function's 1078 // CFG have been preserved. 1079 auto PAC = PA.getChecker<BranchProbabilityAnalysis>(); 1080 return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>() || 1081 PAC.preservedSet<CFGAnalyses>()); 1082 } 1083 1084 void BranchProbabilityInfo::print(raw_ostream &OS) const { 1085 OS << "---- Branch Probabilities ----\n"; 1086 // We print the probabilities from the last function the analysis ran over, 1087 // or the function it is currently running over. 1088 assert(LastF && "Cannot print prior to running over a function"); 1089 for (const auto &BI : *LastF) { 1090 for (const BasicBlock *Succ : successors(&BI)) 1091 printEdgeProbability(OS << " ", &BI, Succ); 1092 } 1093 } 1094 1095 bool BranchProbabilityInfo:: 1096 isEdgeHot(const BasicBlock *Src, const BasicBlock *Dst) const { 1097 // Hot probability is at least 4/5 = 80% 1098 // FIXME: Compare against a static "hot" BranchProbability. 1099 return getEdgeProbability(Src, Dst) > BranchProbability(4, 5); 1100 } 1101 1102 /// Get the raw edge probability for the edge. If can't find it, return a 1103 /// default probability 1/N where N is the number of successors. Here an edge is 1104 /// specified using PredBlock and an 1105 /// index to the successors. 1106 BranchProbability 1107 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1108 unsigned IndexInSuccessors) const { 1109 auto I = Probs.find(std::make_pair(Src, IndexInSuccessors)); 1110 assert((Probs.end() == Probs.find(std::make_pair(Src, 0))) == 1111 (Probs.end() == I) && 1112 "Probability for I-th successor must always be defined along with the " 1113 "probability for the first successor"); 1114 1115 if (I != Probs.end()) 1116 return I->second; 1117 1118 return {1, static_cast<uint32_t>(succ_size(Src))}; 1119 } 1120 1121 BranchProbability 1122 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1123 const_succ_iterator Dst) const { 1124 return getEdgeProbability(Src, Dst.getSuccessorIndex()); 1125 } 1126 1127 /// Get the raw edge probability calculated for the block pair. This returns the 1128 /// sum of all raw edge probabilities from Src to Dst. 1129 BranchProbability 1130 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1131 const BasicBlock *Dst) const { 1132 if (!Probs.count(std::make_pair(Src, 0))) 1133 return BranchProbability(llvm::count(successors(Src), Dst), succ_size(Src)); 1134 1135 auto Prob = BranchProbability::getZero(); 1136 for (const_succ_iterator I = succ_begin(Src), E = succ_end(Src); I != E; ++I) 1137 if (*I == Dst) 1138 Prob += Probs.find(std::make_pair(Src, I.getSuccessorIndex()))->second; 1139 1140 return Prob; 1141 } 1142 1143 /// Set the edge probability for all edges at once. 1144 void BranchProbabilityInfo::setEdgeProbability( 1145 const BasicBlock *Src, const SmallVectorImpl<BranchProbability> &Probs) { 1146 assert(Src->getTerminator()->getNumSuccessors() == Probs.size()); 1147 eraseBlock(Src); // Erase stale data if any. 1148 if (Probs.size() == 0) 1149 return; // Nothing to set. 1150 1151 Handles.insert(BasicBlockCallbackVH(Src, this)); 1152 uint64_t TotalNumerator = 0; 1153 for (unsigned SuccIdx = 0; SuccIdx < Probs.size(); ++SuccIdx) { 1154 this->Probs[std::make_pair(Src, SuccIdx)] = Probs[SuccIdx]; 1155 LLVM_DEBUG(dbgs() << "set edge " << Src->getName() << " -> " << SuccIdx 1156 << " successor probability to " << Probs[SuccIdx] 1157 << "\n"); 1158 TotalNumerator += Probs[SuccIdx].getNumerator(); 1159 } 1160 1161 // Because of rounding errors the total probability cannot be checked to be 1162 // 1.0 exactly. That is TotalNumerator == BranchProbability::getDenominator. 1163 // Instead, every single probability in Probs must be as accurate as possible. 1164 // This results in error 1/denominator at most, thus the total absolute error 1165 // should be within Probs.size / BranchProbability::getDenominator. 1166 assert(TotalNumerator <= BranchProbability::getDenominator() + Probs.size()); 1167 assert(TotalNumerator >= BranchProbability::getDenominator() - Probs.size()); 1168 (void)TotalNumerator; 1169 } 1170 1171 void BranchProbabilityInfo::copyEdgeProbabilities(BasicBlock *Src, 1172 BasicBlock *Dst) { 1173 eraseBlock(Dst); // Erase stale data if any. 1174 unsigned NumSuccessors = Src->getTerminator()->getNumSuccessors(); 1175 assert(NumSuccessors == Dst->getTerminator()->getNumSuccessors()); 1176 if (NumSuccessors == 0) 1177 return; // Nothing to set. 1178 if (this->Probs.find(std::make_pair(Src, 0)) == this->Probs.end()) 1179 return; // No probability is set for edges from Src. Keep the same for Dst. 1180 1181 Handles.insert(BasicBlockCallbackVH(Dst, this)); 1182 for (unsigned SuccIdx = 0; SuccIdx < NumSuccessors; ++SuccIdx) { 1183 auto Prob = this->Probs[std::make_pair(Src, SuccIdx)]; 1184 this->Probs[std::make_pair(Dst, SuccIdx)] = Prob; 1185 LLVM_DEBUG(dbgs() << "set edge " << Dst->getName() << " -> " << SuccIdx 1186 << " successor probability to " << Prob << "\n"); 1187 } 1188 } 1189 1190 raw_ostream & 1191 BranchProbabilityInfo::printEdgeProbability(raw_ostream &OS, 1192 const BasicBlock *Src, 1193 const BasicBlock *Dst) const { 1194 const BranchProbability Prob = getEdgeProbability(Src, Dst); 1195 OS << "edge " << Src->getName() << " -> " << Dst->getName() 1196 << " probability is " << Prob 1197 << (isEdgeHot(Src, Dst) ? " [HOT edge]\n" : "\n"); 1198 1199 return OS; 1200 } 1201 1202 void BranchProbabilityInfo::eraseBlock(const BasicBlock *BB) { 1203 LLVM_DEBUG(dbgs() << "eraseBlock " << BB->getName() << "\n"); 1204 1205 // Note that we cannot use successors of BB because the terminator of BB may 1206 // have changed when eraseBlock is called as a BasicBlockCallbackVH callback. 1207 // Instead we remove prob data for the block by iterating successors by their 1208 // indices from 0 till the last which exists. There could not be prob data for 1209 // a pair (BB, N) if there is no data for (BB, N-1) because the data is always 1210 // set for all successors from 0 to M at once by the method 1211 // setEdgeProbability(). 1212 Handles.erase(BasicBlockCallbackVH(BB, this)); 1213 for (unsigned I = 0;; ++I) { 1214 auto MapI = Probs.find(std::make_pair(BB, I)); 1215 if (MapI == Probs.end()) { 1216 assert(Probs.count(std::make_pair(BB, I + 1)) == 0 && 1217 "Must be no more successors"); 1218 return; 1219 } 1220 Probs.erase(MapI); 1221 } 1222 } 1223 1224 void BranchProbabilityInfo::calculate(const Function &F, const LoopInfo &LoopI, 1225 const TargetLibraryInfo *TLI, 1226 DominatorTree *DT, 1227 PostDominatorTree *PDT) { 1228 LLVM_DEBUG(dbgs() << "---- Branch Probability Info : " << F.getName() 1229 << " ----\n\n"); 1230 LastF = &F; // Store the last function we ran on for printing. 1231 LI = &LoopI; 1232 1233 SccI = std::make_unique<SccInfo>(F); 1234 1235 assert(EstimatedBlockWeight.empty()); 1236 assert(EstimatedLoopWeight.empty()); 1237 1238 std::unique_ptr<DominatorTree> DTPtr; 1239 std::unique_ptr<PostDominatorTree> PDTPtr; 1240 1241 if (!DT) { 1242 DTPtr = std::make_unique<DominatorTree>(const_cast<Function &>(F)); 1243 DT = DTPtr.get(); 1244 } 1245 1246 if (!PDT) { 1247 PDTPtr = std::make_unique<PostDominatorTree>(const_cast<Function &>(F)); 1248 PDT = PDTPtr.get(); 1249 } 1250 1251 computeEestimateBlockWeight(F, DT, PDT); 1252 1253 // Walk the basic blocks in post-order so that we can build up state about 1254 // the successors of a block iteratively. 1255 for (auto BB : post_order(&F.getEntryBlock())) { 1256 LLVM_DEBUG(dbgs() << "Computing probabilities for " << BB->getName() 1257 << "\n"); 1258 // If there is no at least two successors, no sense to set probability. 1259 if (BB->getTerminator()->getNumSuccessors() < 2) 1260 continue; 1261 if (calcMetadataWeights(BB)) 1262 continue; 1263 if (calcEstimatedHeuristics(BB)) 1264 continue; 1265 if (calcPointerHeuristics(BB)) 1266 continue; 1267 if (calcZeroHeuristics(BB, TLI)) 1268 continue; 1269 if (calcFloatingPointHeuristics(BB)) 1270 continue; 1271 } 1272 1273 EstimatedLoopWeight.clear(); 1274 EstimatedBlockWeight.clear(); 1275 SccI.reset(); 1276 1277 if (PrintBranchProb && 1278 (PrintBranchProbFuncName.empty() || 1279 F.getName().equals(PrintBranchProbFuncName))) { 1280 print(dbgs()); 1281 } 1282 } 1283 1284 void BranchProbabilityInfoWrapperPass::getAnalysisUsage( 1285 AnalysisUsage &AU) const { 1286 // We require DT so it's available when LI is available. The LI updating code 1287 // asserts that DT is also present so if we don't make sure that we have DT 1288 // here, that assert will trigger. 1289 AU.addRequired<DominatorTreeWrapperPass>(); 1290 AU.addRequired<LoopInfoWrapperPass>(); 1291 AU.addRequired<TargetLibraryInfoWrapperPass>(); 1292 AU.addRequired<DominatorTreeWrapperPass>(); 1293 AU.addRequired<PostDominatorTreeWrapperPass>(); 1294 AU.setPreservesAll(); 1295 } 1296 1297 bool BranchProbabilityInfoWrapperPass::runOnFunction(Function &F) { 1298 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); 1299 const TargetLibraryInfo &TLI = 1300 getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 1301 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree(); 1302 PostDominatorTree &PDT = 1303 getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree(); 1304 BPI.calculate(F, LI, &TLI, &DT, &PDT); 1305 return false; 1306 } 1307 1308 void BranchProbabilityInfoWrapperPass::releaseMemory() { BPI.releaseMemory(); } 1309 1310 void BranchProbabilityInfoWrapperPass::print(raw_ostream &OS, 1311 const Module *) const { 1312 BPI.print(OS); 1313 } 1314 1315 AnalysisKey BranchProbabilityAnalysis::Key; 1316 BranchProbabilityInfo 1317 BranchProbabilityAnalysis::run(Function &F, FunctionAnalysisManager &AM) { 1318 BranchProbabilityInfo BPI; 1319 BPI.calculate(F, AM.getResult<LoopAnalysis>(F), 1320 &AM.getResult<TargetLibraryAnalysis>(F), 1321 &AM.getResult<DominatorTreeAnalysis>(F), 1322 &AM.getResult<PostDominatorTreeAnalysis>(F)); 1323 return BPI; 1324 } 1325 1326 PreservedAnalyses 1327 BranchProbabilityPrinterPass::run(Function &F, FunctionAnalysisManager &AM) { 1328 OS << "Printing analysis results of BPI for function " 1329 << "'" << F.getName() << "':" 1330 << "\n"; 1331 AM.getResult<BranchProbabilityAnalysis>(F).print(OS); 1332 return PreservedAnalyses::all(); 1333 } 1334