1 //===- BranchProbabilityInfo.cpp - Branch Probability Analysis ------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // Loops should be simplified before this analysis. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/Analysis/BranchProbabilityInfo.h" 14 #include "llvm/ADT/PostOrderIterator.h" 15 #include "llvm/ADT/SCCIterator.h" 16 #include "llvm/ADT/STLExtras.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/Analysis/LoopInfo.h" 19 #include "llvm/Analysis/PostDominators.h" 20 #include "llvm/Analysis/TargetLibraryInfo.h" 21 #include "llvm/IR/Attributes.h" 22 #include "llvm/IR/BasicBlock.h" 23 #include "llvm/IR/CFG.h" 24 #include "llvm/IR/Constants.h" 25 #include "llvm/IR/Dominators.h" 26 #include "llvm/IR/Function.h" 27 #include "llvm/IR/InstrTypes.h" 28 #include "llvm/IR/Instruction.h" 29 #include "llvm/IR/Instructions.h" 30 #include "llvm/IR/LLVMContext.h" 31 #include "llvm/IR/Metadata.h" 32 #include "llvm/IR/PassManager.h" 33 #include "llvm/IR/Type.h" 34 #include "llvm/IR/Value.h" 35 #include "llvm/InitializePasses.h" 36 #include "llvm/Pass.h" 37 #include "llvm/Support/BranchProbability.h" 38 #include "llvm/Support/Casting.h" 39 #include "llvm/Support/CommandLine.h" 40 #include "llvm/Support/Debug.h" 41 #include "llvm/Support/raw_ostream.h" 42 #include <cassert> 43 #include <cstdint> 44 #include <iterator> 45 #include <utility> 46 47 using namespace llvm; 48 49 #define DEBUG_TYPE "branch-prob" 50 51 static cl::opt<bool> PrintBranchProb( 52 "print-bpi", cl::init(false), cl::Hidden, 53 cl::desc("Print the branch probability info.")); 54 55 cl::opt<std::string> PrintBranchProbFuncName( 56 "print-bpi-func-name", cl::Hidden, 57 cl::desc("The option to specify the name of the function " 58 "whose branch probability info is printed.")); 59 60 INITIALIZE_PASS_BEGIN(BranchProbabilityInfoWrapperPass, "branch-prob", 61 "Branch Probability Analysis", false, true) 62 INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) 63 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 64 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) 65 INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass) 66 INITIALIZE_PASS_END(BranchProbabilityInfoWrapperPass, "branch-prob", 67 "Branch Probability Analysis", false, true) 68 69 BranchProbabilityInfoWrapperPass::BranchProbabilityInfoWrapperPass() 70 : FunctionPass(ID) { 71 initializeBranchProbabilityInfoWrapperPassPass( 72 *PassRegistry::getPassRegistry()); 73 } 74 75 char BranchProbabilityInfoWrapperPass::ID = 0; 76 77 // Weights are for internal use only. They are used by heuristics to help to 78 // estimate edges' probability. Example: 79 // 80 // Using "Loop Branch Heuristics" we predict weights of edges for the 81 // block BB2. 82 // ... 83 // | 84 // V 85 // BB1<-+ 86 // | | 87 // | | (Weight = 124) 88 // V | 89 // BB2--+ 90 // | 91 // | (Weight = 4) 92 // V 93 // BB3 94 // 95 // Probability of the edge BB2->BB1 = 124 / (124 + 4) = 0.96875 96 // Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125 97 static const uint32_t LBH_TAKEN_WEIGHT = 124; 98 static const uint32_t LBH_NONTAKEN_WEIGHT = 4; 99 100 /// Unreachable-terminating branch taken probability. 101 /// 102 /// This is the probability for a branch being taken to a block that terminates 103 /// (eventually) in unreachable. These are predicted as unlikely as possible. 104 /// All reachable probability will proportionally share the remaining part. 105 static const BranchProbability UR_TAKEN_PROB = BranchProbability::getRaw(1); 106 107 static const uint32_t PH_TAKEN_WEIGHT = 20; 108 static const uint32_t PH_NONTAKEN_WEIGHT = 12; 109 110 static const uint32_t ZH_TAKEN_WEIGHT = 20; 111 static const uint32_t ZH_NONTAKEN_WEIGHT = 12; 112 113 static const uint32_t FPH_TAKEN_WEIGHT = 20; 114 static const uint32_t FPH_NONTAKEN_WEIGHT = 12; 115 116 /// This is the probability for an ordered floating point comparison. 117 static const uint32_t FPH_ORD_WEIGHT = 1024 * 1024 - 1; 118 /// This is the probability for an unordered floating point comparison, it means 119 /// one or two of the operands are NaN. Usually it is used to test for an 120 /// exceptional case, so the result is unlikely. 121 static const uint32_t FPH_UNO_WEIGHT = 1; 122 123 /// Set of dedicated "absolute" execution weights for a block. These weights are 124 /// meaningful relative to each other and their derivatives only. 125 enum class BlockExecWeight : std::uint32_t { 126 /// Special weight used for cases with exact zero probability. 127 ZERO = 0x0, 128 /// Minimal possible non zero weight. 129 LOWEST_NON_ZERO = 0x1, 130 /// Weight to an 'unreachable' block. 131 UNREACHABLE = ZERO, 132 /// Weight to a block containing non returning call. 133 NORETURN = LOWEST_NON_ZERO, 134 /// Weight to 'unwind' block of an invoke instruction. 135 UNWIND = LOWEST_NON_ZERO, 136 /// Weight to a 'cold' block. Cold blocks are the ones containing calls marked 137 /// with attribute 'cold'. 138 COLD = 0xffff, 139 /// Default weight is used in cases when there is no dedicated execution 140 /// weight set. It is not propagated through the domination line either. 141 DEFAULT = 0xfffff 142 }; 143 144 BranchProbabilityInfo::SccInfo::SccInfo(const Function &F) { 145 // Record SCC numbers of blocks in the CFG to identify irreducible loops. 146 // FIXME: We could only calculate this if the CFG is known to be irreducible 147 // (perhaps cache this info in LoopInfo if we can easily calculate it there?). 148 int SccNum = 0; 149 for (scc_iterator<const Function *> It = scc_begin(&F); !It.isAtEnd(); 150 ++It, ++SccNum) { 151 // Ignore single-block SCCs since they either aren't loops or LoopInfo will 152 // catch them. 153 const std::vector<const BasicBlock *> &Scc = *It; 154 if (Scc.size() == 1) 155 continue; 156 157 LLVM_DEBUG(dbgs() << "BPI: SCC " << SccNum << ":"); 158 for (const auto *BB : Scc) { 159 LLVM_DEBUG(dbgs() << " " << BB->getName()); 160 SccNums[BB] = SccNum; 161 calculateSccBlockType(BB, SccNum); 162 } 163 LLVM_DEBUG(dbgs() << "\n"); 164 } 165 } 166 167 int BranchProbabilityInfo::SccInfo::getSCCNum(const BasicBlock *BB) const { 168 auto SccIt = SccNums.find(BB); 169 if (SccIt == SccNums.end()) 170 return -1; 171 return SccIt->second; 172 } 173 174 void BranchProbabilityInfo::SccInfo::getSccEnterBlocks( 175 int SccNum, SmallVectorImpl<BasicBlock *> &Enters) const { 176 177 for (auto MapIt : SccBlocks[SccNum]) { 178 const auto *BB = MapIt.first; 179 if (isSCCHeader(BB, SccNum)) 180 for (const auto *Pred : predecessors(BB)) 181 if (getSCCNum(Pred) != SccNum) 182 Enters.push_back(const_cast<BasicBlock *>(BB)); 183 } 184 } 185 186 void BranchProbabilityInfo::SccInfo::getSccExitBlocks( 187 int SccNum, SmallVectorImpl<BasicBlock *> &Exits) const { 188 for (auto MapIt : SccBlocks[SccNum]) { 189 const auto *BB = MapIt.first; 190 if (isSCCExitingBlock(BB, SccNum)) 191 for (const auto *Succ : successors(BB)) 192 if (getSCCNum(Succ) != SccNum) 193 Exits.push_back(const_cast<BasicBlock *>(BB)); 194 } 195 } 196 197 uint32_t BranchProbabilityInfo::SccInfo::getSccBlockType(const BasicBlock *BB, 198 int SccNum) const { 199 assert(getSCCNum(BB) == SccNum); 200 201 assert(SccBlocks.size() > static_cast<unsigned>(SccNum) && "Unknown SCC"); 202 const auto &SccBlockTypes = SccBlocks[SccNum]; 203 204 auto It = SccBlockTypes.find(BB); 205 if (It != SccBlockTypes.end()) { 206 return It->second; 207 } 208 return Inner; 209 } 210 211 void BranchProbabilityInfo::SccInfo::calculateSccBlockType(const BasicBlock *BB, 212 int SccNum) { 213 assert(getSCCNum(BB) == SccNum); 214 uint32_t BlockType = Inner; 215 216 if (llvm::any_of(predecessors(BB), [&](const BasicBlock *Pred) { 217 // Consider any block that is an entry point to the SCC as 218 // a header. 219 return getSCCNum(Pred) != SccNum; 220 })) 221 BlockType |= Header; 222 223 if (llvm::any_of(successors(BB), [&](const BasicBlock *Succ) { 224 return getSCCNum(Succ) != SccNum; 225 })) 226 BlockType |= Exiting; 227 228 // Lazily compute the set of headers for a given SCC and cache the results 229 // in the SccHeaderMap. 230 if (SccBlocks.size() <= static_cast<unsigned>(SccNum)) 231 SccBlocks.resize(SccNum + 1); 232 auto &SccBlockTypes = SccBlocks[SccNum]; 233 234 if (BlockType != Inner) { 235 bool IsInserted; 236 std::tie(std::ignore, IsInserted) = 237 SccBlockTypes.insert(std::make_pair(BB, BlockType)); 238 assert(IsInserted && "Duplicated block in SCC"); 239 } 240 } 241 242 BranchProbabilityInfo::LoopBlock::LoopBlock(const BasicBlock *BB, 243 const LoopInfo &LI, 244 const SccInfo &SccI) 245 : BB(BB) { 246 LD.first = LI.getLoopFor(BB); 247 if (!LD.first) { 248 LD.second = SccI.getSCCNum(BB); 249 } 250 } 251 252 bool BranchProbabilityInfo::isLoopEnteringEdge(const LoopEdge &Edge) const { 253 const auto &SrcBlock = Edge.first; 254 const auto &DstBlock = Edge.second; 255 return (DstBlock.getLoop() && 256 !DstBlock.getLoop()->contains(SrcBlock.getLoop())) || 257 // Assume that SCCs can't be nested. 258 (DstBlock.getSccNum() != -1 && 259 SrcBlock.getSccNum() != DstBlock.getSccNum()); 260 } 261 262 bool BranchProbabilityInfo::isLoopExitingEdge(const LoopEdge &Edge) const { 263 return isLoopEnteringEdge({Edge.second, Edge.first}); 264 } 265 266 bool BranchProbabilityInfo::isLoopEnteringExitingEdge( 267 const LoopEdge &Edge) const { 268 return isLoopEnteringEdge(Edge) || isLoopExitingEdge(Edge); 269 } 270 271 bool BranchProbabilityInfo::isLoopBackEdge(const LoopEdge &Edge) const { 272 const auto &SrcBlock = Edge.first; 273 const auto &DstBlock = Edge.second; 274 return SrcBlock.belongsToSameLoop(DstBlock) && 275 ((DstBlock.getLoop() && 276 DstBlock.getLoop()->getHeader() == DstBlock.getBlock()) || 277 (DstBlock.getSccNum() != -1 && 278 SccI->isSCCHeader(DstBlock.getBlock(), DstBlock.getSccNum()))); 279 } 280 281 void BranchProbabilityInfo::getLoopEnterBlocks( 282 const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Enters) const { 283 if (LB.getLoop()) { 284 auto *Header = LB.getLoop()->getHeader(); 285 Enters.append(pred_begin(Header), pred_end(Header)); 286 } else { 287 assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?"); 288 SccI->getSccEnterBlocks(LB.getSccNum(), Enters); 289 } 290 } 291 292 void BranchProbabilityInfo::getLoopExitBlocks( 293 const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Exits) const { 294 if (LB.getLoop()) { 295 LB.getLoop()->getExitBlocks(Exits); 296 } else { 297 assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?"); 298 SccI->getSccExitBlocks(LB.getSccNum(), Exits); 299 } 300 } 301 302 // Propagate existing explicit probabilities from either profile data or 303 // 'expect' intrinsic processing. Examine metadata against unreachable 304 // heuristic. The probability of the edge coming to unreachable block is 305 // set to min of metadata and unreachable heuristic. 306 bool BranchProbabilityInfo::calcMetadataWeights(const BasicBlock *BB) { 307 const Instruction *TI = BB->getTerminator(); 308 assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); 309 if (!(isa<BranchInst>(TI) || isa<SwitchInst>(TI) || isa<IndirectBrInst>(TI) || 310 isa<InvokeInst>(TI))) 311 return false; 312 313 MDNode *WeightsNode = TI->getMetadata(LLVMContext::MD_prof); 314 if (!WeightsNode) 315 return false; 316 317 // Check that the number of successors is manageable. 318 assert(TI->getNumSuccessors() < UINT32_MAX && "Too many successors"); 319 320 // Ensure there are weights for all of the successors. Note that the first 321 // operand to the metadata node is a name, not a weight. 322 if (WeightsNode->getNumOperands() != TI->getNumSuccessors() + 1) 323 return false; 324 325 // Build up the final weights that will be used in a temporary buffer. 326 // Compute the sum of all weights to later decide whether they need to 327 // be scaled to fit in 32 bits. 328 uint64_t WeightSum = 0; 329 SmallVector<uint32_t, 2> Weights; 330 SmallVector<unsigned, 2> UnreachableIdxs; 331 SmallVector<unsigned, 2> ReachableIdxs; 332 Weights.reserve(TI->getNumSuccessors()); 333 for (unsigned I = 1, E = WeightsNode->getNumOperands(); I != E; ++I) { 334 ConstantInt *Weight = 335 mdconst::dyn_extract<ConstantInt>(WeightsNode->getOperand(I)); 336 if (!Weight) 337 return false; 338 assert(Weight->getValue().getActiveBits() <= 32 && 339 "Too many bits for uint32_t"); 340 Weights.push_back(Weight->getZExtValue()); 341 WeightSum += Weights.back(); 342 const LoopBlock SrcLoopBB = getLoopBlock(BB); 343 const LoopBlock DstLoopBB = getLoopBlock(TI->getSuccessor(I - 1)); 344 auto EstimatedWeight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB}); 345 if (EstimatedWeight && 346 EstimatedWeight.getValue() <= 347 static_cast<uint32_t>(BlockExecWeight::UNREACHABLE)) 348 UnreachableIdxs.push_back(I - 1); 349 else 350 ReachableIdxs.push_back(I - 1); 351 } 352 assert(Weights.size() == TI->getNumSuccessors() && "Checked above"); 353 354 // If the sum of weights does not fit in 32 bits, scale every weight down 355 // accordingly. 356 uint64_t ScalingFactor = 357 (WeightSum > UINT32_MAX) ? WeightSum / UINT32_MAX + 1 : 1; 358 359 if (ScalingFactor > 1) { 360 WeightSum = 0; 361 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) { 362 Weights[I] /= ScalingFactor; 363 WeightSum += Weights[I]; 364 } 365 } 366 assert(WeightSum <= UINT32_MAX && 367 "Expected weights to scale down to 32 bits"); 368 369 if (WeightSum == 0 || ReachableIdxs.size() == 0) { 370 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) 371 Weights[I] = 1; 372 WeightSum = TI->getNumSuccessors(); 373 } 374 375 // Set the probability. 376 SmallVector<BranchProbability, 2> BP; 377 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) 378 BP.push_back({ Weights[I], static_cast<uint32_t>(WeightSum) }); 379 380 // Examine the metadata against unreachable heuristic. 381 // If the unreachable heuristic is more strong then we use it for this edge. 382 if (UnreachableIdxs.size() == 0 || ReachableIdxs.size() == 0) { 383 setEdgeProbability(BB, BP); 384 return true; 385 } 386 387 auto UnreachableProb = UR_TAKEN_PROB; 388 for (auto I : UnreachableIdxs) 389 if (UnreachableProb < BP[I]) { 390 BP[I] = UnreachableProb; 391 } 392 393 // Sum of all edge probabilities must be 1.0. If we modified the probability 394 // of some edges then we must distribute the introduced difference over the 395 // reachable blocks. 396 // 397 // Proportional distribution: the relation between probabilities of the 398 // reachable edges is kept unchanged. That is for any reachable edges i and j: 399 // newBP[i] / newBP[j] == oldBP[i] / oldBP[j] => 400 // newBP[i] / oldBP[i] == newBP[j] / oldBP[j] == K 401 // Where K is independent of i,j. 402 // newBP[i] == oldBP[i] * K 403 // We need to find K. 404 // Make sum of all reachables of the left and right parts: 405 // sum_of_reachable(newBP) == K * sum_of_reachable(oldBP) 406 // Sum of newBP must be equal to 1.0: 407 // sum_of_reachable(newBP) + sum_of_unreachable(newBP) == 1.0 => 408 // sum_of_reachable(newBP) = 1.0 - sum_of_unreachable(newBP) 409 // Where sum_of_unreachable(newBP) is what has been just changed. 410 // Finally: 411 // K == sum_of_reachable(newBP) / sum_of_reachable(oldBP) => 412 // K == (1.0 - sum_of_unreachable(newBP)) / sum_of_reachable(oldBP) 413 BranchProbability NewUnreachableSum = BranchProbability::getZero(); 414 for (auto I : UnreachableIdxs) 415 NewUnreachableSum += BP[I]; 416 417 BranchProbability NewReachableSum = 418 BranchProbability::getOne() - NewUnreachableSum; 419 420 BranchProbability OldReachableSum = BranchProbability::getZero(); 421 for (auto I : ReachableIdxs) 422 OldReachableSum += BP[I]; 423 424 if (OldReachableSum != NewReachableSum) { // Anything to dsitribute? 425 if (OldReachableSum.isZero()) { 426 // If all oldBP[i] are zeroes then the proportional distribution results 427 // in all zero probabilities and the error stays big. In this case we 428 // evenly spread NewReachableSum over the reachable edges. 429 BranchProbability PerEdge = NewReachableSum / ReachableIdxs.size(); 430 for (auto I : ReachableIdxs) 431 BP[I] = PerEdge; 432 } else { 433 for (auto I : ReachableIdxs) { 434 // We use uint64_t to avoid double rounding error of the following 435 // calculation: BP[i] = BP[i] * NewReachableSum / OldReachableSum 436 // The formula is taken from the private constructor 437 // BranchProbability(uint32_t Numerator, uint32_t Denominator) 438 uint64_t Mul = static_cast<uint64_t>(NewReachableSum.getNumerator()) * 439 BP[I].getNumerator(); 440 uint32_t Div = static_cast<uint32_t>( 441 divideNearest(Mul, OldReachableSum.getNumerator())); 442 BP[I] = BranchProbability::getRaw(Div); 443 } 444 } 445 } 446 447 setEdgeProbability(BB, BP); 448 449 return true; 450 } 451 452 // Calculate Edge Weights using "Pointer Heuristics". Predict a comparison 453 // between two pointer or pointer and NULL will fail. 454 bool BranchProbabilityInfo::calcPointerHeuristics(const BasicBlock *BB) { 455 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 456 if (!BI || !BI->isConditional()) 457 return false; 458 459 Value *Cond = BI->getCondition(); 460 ICmpInst *CI = dyn_cast<ICmpInst>(Cond); 461 if (!CI || !CI->isEquality()) 462 return false; 463 464 Value *LHS = CI->getOperand(0); 465 466 if (!LHS->getType()->isPointerTy()) 467 return false; 468 469 assert(CI->getOperand(1)->getType()->isPointerTy()); 470 471 BranchProbability TakenProb(PH_TAKEN_WEIGHT, 472 PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); 473 BranchProbability UntakenProb(PH_NONTAKEN_WEIGHT, 474 PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); 475 476 // p != 0 -> isProb = true 477 // p == 0 -> isProb = false 478 // p != q -> isProb = true 479 // p == q -> isProb = false; 480 bool isProb = CI->getPredicate() == ICmpInst::ICMP_NE; 481 if (!isProb) 482 std::swap(TakenProb, UntakenProb); 483 484 setEdgeProbability( 485 BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); 486 return true; 487 } 488 489 // Compute the unlikely successors to the block BB in the loop L, specifically 490 // those that are unlikely because this is a loop, and add them to the 491 // UnlikelyBlocks set. 492 static void 493 computeUnlikelySuccessors(const BasicBlock *BB, Loop *L, 494 SmallPtrSetImpl<const BasicBlock*> &UnlikelyBlocks) { 495 // Sometimes in a loop we have a branch whose condition is made false by 496 // taking it. This is typically something like 497 // int n = 0; 498 // while (...) { 499 // if (++n >= MAX) { 500 // n = 0; 501 // } 502 // } 503 // In this sort of situation taking the branch means that at the very least it 504 // won't be taken again in the next iteration of the loop, so we should 505 // consider it less likely than a typical branch. 506 // 507 // We detect this by looking back through the graph of PHI nodes that sets the 508 // value that the condition depends on, and seeing if we can reach a successor 509 // block which can be determined to make the condition false. 510 // 511 // FIXME: We currently consider unlikely blocks to be half as likely as other 512 // blocks, but if we consider the example above the likelyhood is actually 513 // 1/MAX. We could therefore be more precise in how unlikely we consider 514 // blocks to be, but it would require more careful examination of the form 515 // of the comparison expression. 516 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 517 if (!BI || !BI->isConditional()) 518 return; 519 520 // Check if the branch is based on an instruction compared with a constant 521 CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition()); 522 if (!CI || !isa<Instruction>(CI->getOperand(0)) || 523 !isa<Constant>(CI->getOperand(1))) 524 return; 525 526 // Either the instruction must be a PHI, or a chain of operations involving 527 // constants that ends in a PHI which we can then collapse into a single value 528 // if the PHI value is known. 529 Instruction *CmpLHS = dyn_cast<Instruction>(CI->getOperand(0)); 530 PHINode *CmpPHI = dyn_cast<PHINode>(CmpLHS); 531 Constant *CmpConst = dyn_cast<Constant>(CI->getOperand(1)); 532 // Collect the instructions until we hit a PHI 533 SmallVector<BinaryOperator *, 1> InstChain; 534 while (!CmpPHI && CmpLHS && isa<BinaryOperator>(CmpLHS) && 535 isa<Constant>(CmpLHS->getOperand(1))) { 536 // Stop if the chain extends outside of the loop 537 if (!L->contains(CmpLHS)) 538 return; 539 InstChain.push_back(cast<BinaryOperator>(CmpLHS)); 540 CmpLHS = dyn_cast<Instruction>(CmpLHS->getOperand(0)); 541 if (CmpLHS) 542 CmpPHI = dyn_cast<PHINode>(CmpLHS); 543 } 544 if (!CmpPHI || !L->contains(CmpPHI)) 545 return; 546 547 // Trace the phi node to find all values that come from successors of BB 548 SmallPtrSet<PHINode*, 8> VisitedInsts; 549 SmallVector<PHINode*, 8> WorkList; 550 WorkList.push_back(CmpPHI); 551 VisitedInsts.insert(CmpPHI); 552 while (!WorkList.empty()) { 553 PHINode *P = WorkList.back(); 554 WorkList.pop_back(); 555 for (BasicBlock *B : P->blocks()) { 556 // Skip blocks that aren't part of the loop 557 if (!L->contains(B)) 558 continue; 559 Value *V = P->getIncomingValueForBlock(B); 560 // If the source is a PHI add it to the work list if we haven't 561 // already visited it. 562 if (PHINode *PN = dyn_cast<PHINode>(V)) { 563 if (VisitedInsts.insert(PN).second) 564 WorkList.push_back(PN); 565 continue; 566 } 567 // If this incoming value is a constant and B is a successor of BB, then 568 // we can constant-evaluate the compare to see if it makes the branch be 569 // taken or not. 570 Constant *CmpLHSConst = dyn_cast<Constant>(V); 571 if (!CmpLHSConst || !llvm::is_contained(successors(BB), B)) 572 continue; 573 // First collapse InstChain 574 for (Instruction *I : llvm::reverse(InstChain)) { 575 CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst, 576 cast<Constant>(I->getOperand(1)), true); 577 if (!CmpLHSConst) 578 break; 579 } 580 if (!CmpLHSConst) 581 continue; 582 // Now constant-evaluate the compare 583 Constant *Result = ConstantExpr::getCompare(CI->getPredicate(), 584 CmpLHSConst, CmpConst, true); 585 // If the result means we don't branch to the block then that block is 586 // unlikely. 587 if (Result && 588 ((Result->isZeroValue() && B == BI->getSuccessor(0)) || 589 (Result->isOneValue() && B == BI->getSuccessor(1)))) 590 UnlikelyBlocks.insert(B); 591 } 592 } 593 } 594 595 Optional<uint32_t> 596 BranchProbabilityInfo::getEstimatedBlockWeight(const BasicBlock *BB) const { 597 auto WeightIt = EstimatedBlockWeight.find(BB); 598 if (WeightIt == EstimatedBlockWeight.end()) 599 return None; 600 return WeightIt->second; 601 } 602 603 Optional<uint32_t> 604 BranchProbabilityInfo::getEstimatedLoopWeight(const LoopData &L) const { 605 auto WeightIt = EstimatedLoopWeight.find(L); 606 if (WeightIt == EstimatedLoopWeight.end()) 607 return None; 608 return WeightIt->second; 609 } 610 611 Optional<uint32_t> 612 BranchProbabilityInfo::getEstimatedEdgeWeight(const LoopEdge &Edge) const { 613 // For edges entering a loop take weight of a loop rather than an individual 614 // block in the loop. 615 return isLoopEnteringEdge(Edge) 616 ? getEstimatedLoopWeight(Edge.second.getLoopData()) 617 : getEstimatedBlockWeight(Edge.second.getBlock()); 618 } 619 620 template <class IterT> 621 Optional<uint32_t> BranchProbabilityInfo::getMaxEstimatedEdgeWeight( 622 const LoopBlock &SrcLoopBB, iterator_range<IterT> Successors) const { 623 SmallVector<uint32_t, 4> Weights; 624 Optional<uint32_t> MaxWeight; 625 for (const BasicBlock *DstBB : Successors) { 626 const LoopBlock DstLoopBB = getLoopBlock(DstBB); 627 auto Weight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB}); 628 629 if (!Weight) 630 return None; 631 632 if (!MaxWeight || MaxWeight.getValue() < Weight.getValue()) 633 MaxWeight = Weight; 634 } 635 636 return MaxWeight; 637 } 638 639 // Updates \p LoopBB's weight and returns true. If \p LoopBB has already 640 // an associated weight it is unchanged and false is returned. 641 // 642 // Please note by the algorithm the weight is not expected to change once set 643 // thus 'false' status is used to track visited blocks. 644 bool BranchProbabilityInfo::updateEstimatedBlockWeight( 645 LoopBlock &LoopBB, uint32_t BBWeight, 646 SmallVectorImpl<BasicBlock *> &BlockWorkList, 647 SmallVectorImpl<LoopBlock> &LoopWorkList) { 648 BasicBlock *BB = LoopBB.getBlock(); 649 650 // In general, weight is assigned to a block when it has final value and 651 // can't/shouldn't be changed. However, there are cases when a block 652 // inherently has several (possibly "contradicting") weights. For example, 653 // "unwind" block may also contain "cold" call. In that case the first 654 // set weight is favored and all consequent weights are ignored. 655 if (!EstimatedBlockWeight.insert({BB, BBWeight}).second) 656 return false; 657 658 for (BasicBlock *PredBlock : predecessors(BB)) { 659 LoopBlock PredLoop = getLoopBlock(PredBlock); 660 // Add affected block/loop to a working list. 661 if (isLoopExitingEdge({PredLoop, LoopBB})) { 662 if (!EstimatedLoopWeight.count(PredLoop.getLoopData())) 663 LoopWorkList.push_back(PredLoop); 664 } else if (!EstimatedBlockWeight.count(PredBlock)) 665 BlockWorkList.push_back(PredBlock); 666 } 667 return true; 668 } 669 670 // Starting from \p BB traverse through dominator blocks and assign \p BBWeight 671 // to all such blocks that are post dominated by \BB. In other words to all 672 // blocks that the one is executed if and only if another one is executed. 673 // Importantly, we skip loops here for two reasons. First weights of blocks in 674 // a loop should be scaled by trip count (yet possibly unknown). Second there is 675 // no any value in doing that because that doesn't give any additional 676 // information regarding distribution of probabilities inside the loop. 677 // Exception is loop 'enter' and 'exit' edges that are handled in a special way 678 // at calcEstimatedHeuristics. 679 // 680 // In addition, \p WorkList is populated with basic blocks if at leas one 681 // successor has updated estimated weight. 682 void BranchProbabilityInfo::propagateEstimatedBlockWeight( 683 const LoopBlock &LoopBB, DominatorTree *DT, PostDominatorTree *PDT, 684 uint32_t BBWeight, SmallVectorImpl<BasicBlock *> &BlockWorkList, 685 SmallVectorImpl<LoopBlock> &LoopWorkList) { 686 const BasicBlock *BB = LoopBB.getBlock(); 687 const auto *DTStartNode = DT->getNode(BB); 688 const auto *PDTStartNode = PDT->getNode(BB); 689 690 // TODO: Consider propagating weight down the domination line as well. 691 for (const auto *DTNode = DTStartNode; DTNode != nullptr; 692 DTNode = DTNode->getIDom()) { 693 auto *DomBB = DTNode->getBlock(); 694 // Consider blocks which lie on one 'line'. 695 if (!PDT->dominates(PDTStartNode, PDT->getNode(DomBB))) 696 // If BB doesn't post dominate DomBB it will not post dominate dominators 697 // of DomBB as well. 698 break; 699 700 LoopBlock DomLoopBB = getLoopBlock(DomBB); 701 const LoopEdge Edge{DomLoopBB, LoopBB}; 702 // Don't propagate weight to blocks belonging to different loops. 703 if (!isLoopEnteringExitingEdge(Edge)) { 704 if (!updateEstimatedBlockWeight(DomLoopBB, BBWeight, BlockWorkList, 705 LoopWorkList)) 706 // If DomBB has weight set then all it's predecessors are already 707 // processed (since we propagate weight up to the top of IR each time). 708 break; 709 } else if (isLoopExitingEdge(Edge)) { 710 LoopWorkList.push_back(DomLoopBB); 711 } 712 } 713 } 714 715 Optional<uint32_t> BranchProbabilityInfo::getInitialEstimatedBlockWeight( 716 const BasicBlock *BB) { 717 // Returns true if \p BB has call marked with "NoReturn" attribute. 718 auto hasNoReturn = [&](const BasicBlock *BB) { 719 for (const auto &I : reverse(*BB)) 720 if (const CallInst *CI = dyn_cast<CallInst>(&I)) 721 if (CI->hasFnAttr(Attribute::NoReturn)) 722 return true; 723 724 return false; 725 }; 726 727 // Important note regarding the order of checks. They are ordered by weight 728 // from lowest to highest. Doing that allows to avoid "unstable" results 729 // when several conditions heuristics can be applied simultaneously. 730 if (isa<UnreachableInst>(BB->getTerminator()) || 731 // If this block is terminated by a call to 732 // @llvm.experimental.deoptimize then treat it like an unreachable 733 // since it is expected to practically never execute. 734 // TODO: Should we actually treat as never returning call? 735 BB->getTerminatingDeoptimizeCall()) 736 return hasNoReturn(BB) 737 ? static_cast<uint32_t>(BlockExecWeight::NORETURN) 738 : static_cast<uint32_t>(BlockExecWeight::UNREACHABLE); 739 740 // Check if the block is 'unwind' handler of some invoke instruction. 741 for (const auto *Pred : predecessors(BB)) 742 if (Pred) 743 if (const auto *II = dyn_cast<InvokeInst>(Pred->getTerminator())) 744 if (II->getUnwindDest() == BB) 745 return static_cast<uint32_t>(BlockExecWeight::UNWIND); 746 747 // Check if the block contains 'cold' call. 748 for (const auto &I : *BB) 749 if (const CallInst *CI = dyn_cast<CallInst>(&I)) 750 if (CI->hasFnAttr(Attribute::Cold)) 751 return static_cast<uint32_t>(BlockExecWeight::COLD); 752 753 return None; 754 } 755 756 // Does RPO traversal over all blocks in \p F and assigns weights to 757 // 'unreachable', 'noreturn', 'cold', 'unwind' blocks. In addition it does its 758 // best to propagate the weight to up/down the IR. 759 void BranchProbabilityInfo::computeEestimateBlockWeight( 760 const Function &F, DominatorTree *DT, PostDominatorTree *PDT) { 761 SmallVector<BasicBlock *, 8> BlockWorkList; 762 SmallVector<LoopBlock, 8> LoopWorkList; 763 764 // By doing RPO we make sure that all predecessors already have weights 765 // calculated before visiting theirs successors. 766 ReversePostOrderTraversal<const Function *> RPOT(&F); 767 for (const auto *BB : RPOT) 768 if (auto BBWeight = getInitialEstimatedBlockWeight(BB)) 769 // If we were able to find estimated weight for the block set it to this 770 // block and propagate up the IR. 771 propagateEstimatedBlockWeight(getLoopBlock(BB), DT, PDT, 772 BBWeight.getValue(), BlockWorkList, 773 LoopWorkList); 774 775 // BlockWorklist/LoopWorkList contains blocks/loops with at least one 776 // successor/exit having estimated weight. Try to propagate weight to such 777 // blocks/loops from successors/exits. 778 // Process loops and blocks. Order is not important. 779 do { 780 while (!LoopWorkList.empty()) { 781 const LoopBlock LoopBB = LoopWorkList.pop_back_val(); 782 783 if (EstimatedLoopWeight.count(LoopBB.getLoopData())) 784 continue; 785 786 SmallVector<BasicBlock *, 4> Exits; 787 getLoopExitBlocks(LoopBB, Exits); 788 auto LoopWeight = getMaxEstimatedEdgeWeight( 789 LoopBB, make_range(Exits.begin(), Exits.end())); 790 791 if (LoopWeight) { 792 // If we never exit the loop then we can enter it once at maximum. 793 if (LoopWeight <= static_cast<uint32_t>(BlockExecWeight::UNREACHABLE)) 794 LoopWeight = static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO); 795 796 EstimatedLoopWeight.insert( 797 {LoopBB.getLoopData(), LoopWeight.getValue()}); 798 // Add all blocks entering the loop into working list. 799 getLoopEnterBlocks(LoopBB, BlockWorkList); 800 } 801 } 802 803 while (!BlockWorkList.empty()) { 804 // We can reach here only if BlockWorkList is not empty. 805 const BasicBlock *BB = BlockWorkList.pop_back_val(); 806 if (EstimatedBlockWeight.count(BB)) 807 continue; 808 809 // We take maximum over all weights of successors. In other words we take 810 // weight of "hot" path. In theory we can probably find a better function 811 // which gives higher accuracy results (comparing to "maximum") but I 812 // can't 813 // think of any right now. And I doubt it will make any difference in 814 // practice. 815 const LoopBlock LoopBB = getLoopBlock(BB); 816 auto MaxWeight = getMaxEstimatedEdgeWeight(LoopBB, successors(BB)); 817 818 if (MaxWeight) 819 propagateEstimatedBlockWeight(LoopBB, DT, PDT, MaxWeight.getValue(), 820 BlockWorkList, LoopWorkList); 821 } 822 } while (!BlockWorkList.empty() || !LoopWorkList.empty()); 823 } 824 825 // Calculate edge probabilities based on block's estimated weight. 826 // Note that gathered weights were not scaled for loops. Thus edges entering 827 // and exiting loops requires special processing. 828 bool BranchProbabilityInfo::calcEstimatedHeuristics(const BasicBlock *BB) { 829 assert(BB->getTerminator()->getNumSuccessors() > 1 && 830 "expected more than one successor!"); 831 832 const LoopBlock LoopBB = getLoopBlock(BB); 833 834 SmallPtrSet<const BasicBlock *, 8> UnlikelyBlocks; 835 uint32_t TC = LBH_TAKEN_WEIGHT / LBH_NONTAKEN_WEIGHT; 836 if (LoopBB.getLoop()) 837 computeUnlikelySuccessors(BB, LoopBB.getLoop(), UnlikelyBlocks); 838 839 // Changed to 'true' if at least one successor has estimated weight. 840 bool FoundEstimatedWeight = false; 841 SmallVector<uint32_t, 4> SuccWeights; 842 uint64_t TotalWeight = 0; 843 // Go over all successors of BB and put their weights into SuccWeights. 844 for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) { 845 const BasicBlock *SuccBB = *I; 846 Optional<uint32_t> Weight; 847 const LoopBlock SuccLoopBB = getLoopBlock(SuccBB); 848 const LoopEdge Edge{LoopBB, SuccLoopBB}; 849 850 Weight = getEstimatedEdgeWeight(Edge); 851 852 if (isLoopExitingEdge(Edge) && 853 // Avoid adjustment of ZERO weight since it should remain unchanged. 854 Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) { 855 // Scale down loop exiting weight by trip count. 856 Weight = std::max( 857 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO), 858 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) / 859 TC); 860 } 861 bool IsUnlikelyEdge = LoopBB.getLoop() && UnlikelyBlocks.contains(SuccBB); 862 if (IsUnlikelyEdge && 863 // Avoid adjustment of ZERO weight since it should remain unchanged. 864 Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) { 865 // 'Unlikely' blocks have twice lower weight. 866 Weight = std::max( 867 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO), 868 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) / 869 2); 870 } 871 872 if (Weight) 873 FoundEstimatedWeight = true; 874 875 auto WeightVal = 876 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)); 877 TotalWeight += WeightVal; 878 SuccWeights.push_back(WeightVal); 879 } 880 881 // If non of blocks have estimated weight bail out. 882 // If TotalWeight is 0 that means weight of each successor is 0 as well and 883 // equally likely. Bail out early to not deal with devision by zero. 884 if (!FoundEstimatedWeight || TotalWeight == 0) 885 return false; 886 887 assert(SuccWeights.size() == succ_size(BB) && "Missed successor?"); 888 const unsigned SuccCount = SuccWeights.size(); 889 890 // If the sum of weights does not fit in 32 bits, scale every weight down 891 // accordingly. 892 if (TotalWeight > UINT32_MAX) { 893 uint64_t ScalingFactor = TotalWeight / UINT32_MAX + 1; 894 TotalWeight = 0; 895 for (unsigned Idx = 0; Idx < SuccCount; ++Idx) { 896 SuccWeights[Idx] /= ScalingFactor; 897 if (SuccWeights[Idx] == static_cast<uint32_t>(BlockExecWeight::ZERO)) 898 SuccWeights[Idx] = 899 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO); 900 TotalWeight += SuccWeights[Idx]; 901 } 902 assert(TotalWeight <= UINT32_MAX && "Total weight overflows"); 903 } 904 905 // Finally set probabilities to edges according to estimated block weights. 906 SmallVector<BranchProbability, 4> EdgeProbabilities( 907 SuccCount, BranchProbability::getUnknown()); 908 909 for (unsigned Idx = 0; Idx < SuccCount; ++Idx) { 910 EdgeProbabilities[Idx] = 911 BranchProbability(SuccWeights[Idx], (uint32_t)TotalWeight); 912 } 913 setEdgeProbability(BB, EdgeProbabilities); 914 return true; 915 } 916 917 bool BranchProbabilityInfo::calcZeroHeuristics(const BasicBlock *BB, 918 const TargetLibraryInfo *TLI) { 919 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 920 if (!BI || !BI->isConditional()) 921 return false; 922 923 Value *Cond = BI->getCondition(); 924 ICmpInst *CI = dyn_cast<ICmpInst>(Cond); 925 if (!CI) 926 return false; 927 928 auto GetConstantInt = [](Value *V) { 929 if (auto *I = dyn_cast<BitCastInst>(V)) 930 return dyn_cast<ConstantInt>(I->getOperand(0)); 931 return dyn_cast<ConstantInt>(V); 932 }; 933 934 Value *RHS = CI->getOperand(1); 935 ConstantInt *CV = GetConstantInt(RHS); 936 if (!CV) 937 return false; 938 939 // If the LHS is the result of AND'ing a value with a single bit bitmask, 940 // we don't have information about probabilities. 941 if (Instruction *LHS = dyn_cast<Instruction>(CI->getOperand(0))) 942 if (LHS->getOpcode() == Instruction::And) 943 if (ConstantInt *AndRHS = GetConstantInt(LHS->getOperand(1))) 944 if (AndRHS->getValue().isPowerOf2()) 945 return false; 946 947 // Check if the LHS is the return value of a library function 948 LibFunc Func = NumLibFuncs; 949 if (TLI) 950 if (CallInst *Call = dyn_cast<CallInst>(CI->getOperand(0))) 951 if (Function *CalledFn = Call->getCalledFunction()) 952 TLI->getLibFunc(*CalledFn, Func); 953 954 bool isProb; 955 if (Func == LibFunc_strcasecmp || 956 Func == LibFunc_strcmp || 957 Func == LibFunc_strncasecmp || 958 Func == LibFunc_strncmp || 959 Func == LibFunc_memcmp || 960 Func == LibFunc_bcmp) { 961 // strcmp and similar functions return zero, negative, or positive, if the 962 // first string is equal, less, or greater than the second. We consider it 963 // likely that the strings are not equal, so a comparison with zero is 964 // probably false, but also a comparison with any other number is also 965 // probably false given that what exactly is returned for nonzero values is 966 // not specified. Any kind of comparison other than equality we know 967 // nothing about. 968 switch (CI->getPredicate()) { 969 case CmpInst::ICMP_EQ: 970 isProb = false; 971 break; 972 case CmpInst::ICMP_NE: 973 isProb = true; 974 break; 975 default: 976 return false; 977 } 978 } else if (CV->isZero()) { 979 switch (CI->getPredicate()) { 980 case CmpInst::ICMP_EQ: 981 // X == 0 -> Unlikely 982 isProb = false; 983 break; 984 case CmpInst::ICMP_NE: 985 // X != 0 -> Likely 986 isProb = true; 987 break; 988 case CmpInst::ICMP_SLT: 989 // X < 0 -> Unlikely 990 isProb = false; 991 break; 992 case CmpInst::ICMP_SGT: 993 // X > 0 -> Likely 994 isProb = true; 995 break; 996 default: 997 return false; 998 } 999 } else if (CV->isOne() && CI->getPredicate() == CmpInst::ICMP_SLT) { 1000 // InstCombine canonicalizes X <= 0 into X < 1. 1001 // X <= 0 -> Unlikely 1002 isProb = false; 1003 } else if (CV->isMinusOne()) { 1004 switch (CI->getPredicate()) { 1005 case CmpInst::ICMP_EQ: 1006 // X == -1 -> Unlikely 1007 isProb = false; 1008 break; 1009 case CmpInst::ICMP_NE: 1010 // X != -1 -> Likely 1011 isProb = true; 1012 break; 1013 case CmpInst::ICMP_SGT: 1014 // InstCombine canonicalizes X >= 0 into X > -1. 1015 // X >= 0 -> Likely 1016 isProb = true; 1017 break; 1018 default: 1019 return false; 1020 } 1021 } else { 1022 return false; 1023 } 1024 1025 BranchProbability TakenProb(ZH_TAKEN_WEIGHT, 1026 ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); 1027 BranchProbability UntakenProb(ZH_NONTAKEN_WEIGHT, 1028 ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); 1029 if (!isProb) 1030 std::swap(TakenProb, UntakenProb); 1031 1032 setEdgeProbability( 1033 BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); 1034 return true; 1035 } 1036 1037 bool BranchProbabilityInfo::calcFloatingPointHeuristics(const BasicBlock *BB) { 1038 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 1039 if (!BI || !BI->isConditional()) 1040 return false; 1041 1042 Value *Cond = BI->getCondition(); 1043 FCmpInst *FCmp = dyn_cast<FCmpInst>(Cond); 1044 if (!FCmp) 1045 return false; 1046 1047 uint32_t TakenWeight = FPH_TAKEN_WEIGHT; 1048 uint32_t NontakenWeight = FPH_NONTAKEN_WEIGHT; 1049 bool isProb; 1050 if (FCmp->isEquality()) { 1051 // f1 == f2 -> Unlikely 1052 // f1 != f2 -> Likely 1053 isProb = !FCmp->isTrueWhenEqual(); 1054 } else if (FCmp->getPredicate() == FCmpInst::FCMP_ORD) { 1055 // !isnan -> Likely 1056 isProb = true; 1057 TakenWeight = FPH_ORD_WEIGHT; 1058 NontakenWeight = FPH_UNO_WEIGHT; 1059 } else if (FCmp->getPredicate() == FCmpInst::FCMP_UNO) { 1060 // isnan -> Unlikely 1061 isProb = false; 1062 TakenWeight = FPH_ORD_WEIGHT; 1063 NontakenWeight = FPH_UNO_WEIGHT; 1064 } else { 1065 return false; 1066 } 1067 1068 BranchProbability TakenProb(TakenWeight, TakenWeight + NontakenWeight); 1069 BranchProbability UntakenProb(NontakenWeight, TakenWeight + NontakenWeight); 1070 if (!isProb) 1071 std::swap(TakenProb, UntakenProb); 1072 1073 setEdgeProbability( 1074 BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb})); 1075 return true; 1076 } 1077 1078 void BranchProbabilityInfo::releaseMemory() { 1079 Probs.clear(); 1080 Handles.clear(); 1081 } 1082 1083 bool BranchProbabilityInfo::invalidate(Function &, const PreservedAnalyses &PA, 1084 FunctionAnalysisManager::Invalidator &) { 1085 // Check whether the analysis, all analyses on functions, or the function's 1086 // CFG have been preserved. 1087 auto PAC = PA.getChecker<BranchProbabilityAnalysis>(); 1088 return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>() || 1089 PAC.preservedSet<CFGAnalyses>()); 1090 } 1091 1092 void BranchProbabilityInfo::print(raw_ostream &OS) const { 1093 OS << "---- Branch Probabilities ----\n"; 1094 // We print the probabilities from the last function the analysis ran over, 1095 // or the function it is currently running over. 1096 assert(LastF && "Cannot print prior to running over a function"); 1097 for (const auto &BI : *LastF) { 1098 for (const_succ_iterator SI = succ_begin(&BI), SE = succ_end(&BI); SI != SE; 1099 ++SI) { 1100 printEdgeProbability(OS << " ", &BI, *SI); 1101 } 1102 } 1103 } 1104 1105 bool BranchProbabilityInfo:: 1106 isEdgeHot(const BasicBlock *Src, const BasicBlock *Dst) const { 1107 // Hot probability is at least 4/5 = 80% 1108 // FIXME: Compare against a static "hot" BranchProbability. 1109 return getEdgeProbability(Src, Dst) > BranchProbability(4, 5); 1110 } 1111 1112 const BasicBlock * 1113 BranchProbabilityInfo::getHotSucc(const BasicBlock *BB) const { 1114 auto MaxProb = BranchProbability::getZero(); 1115 const BasicBlock *MaxSucc = nullptr; 1116 1117 for (const auto *Succ : successors(BB)) { 1118 auto Prob = getEdgeProbability(BB, Succ); 1119 if (Prob > MaxProb) { 1120 MaxProb = Prob; 1121 MaxSucc = Succ; 1122 } 1123 } 1124 1125 // Hot probability is at least 4/5 = 80% 1126 if (MaxProb > BranchProbability(4, 5)) 1127 return MaxSucc; 1128 1129 return nullptr; 1130 } 1131 1132 /// Get the raw edge probability for the edge. If can't find it, return a 1133 /// default probability 1/N where N is the number of successors. Here an edge is 1134 /// specified using PredBlock and an 1135 /// index to the successors. 1136 BranchProbability 1137 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1138 unsigned IndexInSuccessors) const { 1139 auto I = Probs.find(std::make_pair(Src, IndexInSuccessors)); 1140 assert((Probs.end() == Probs.find(std::make_pair(Src, 0))) == 1141 (Probs.end() == I) && 1142 "Probability for I-th successor must always be defined along with the " 1143 "probability for the first successor"); 1144 1145 if (I != Probs.end()) 1146 return I->second; 1147 1148 return {1, static_cast<uint32_t>(succ_size(Src))}; 1149 } 1150 1151 BranchProbability 1152 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1153 const_succ_iterator Dst) const { 1154 return getEdgeProbability(Src, Dst.getSuccessorIndex()); 1155 } 1156 1157 /// Get the raw edge probability calculated for the block pair. This returns the 1158 /// sum of all raw edge probabilities from Src to Dst. 1159 BranchProbability 1160 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1161 const BasicBlock *Dst) const { 1162 if (!Probs.count(std::make_pair(Src, 0))) 1163 return BranchProbability(llvm::count(successors(Src), Dst), succ_size(Src)); 1164 1165 auto Prob = BranchProbability::getZero(); 1166 for (const_succ_iterator I = succ_begin(Src), E = succ_end(Src); I != E; ++I) 1167 if (*I == Dst) 1168 Prob += Probs.find(std::make_pair(Src, I.getSuccessorIndex()))->second; 1169 1170 return Prob; 1171 } 1172 1173 /// Set the edge probability for all edges at once. 1174 void BranchProbabilityInfo::setEdgeProbability( 1175 const BasicBlock *Src, const SmallVectorImpl<BranchProbability> &Probs) { 1176 assert(Src->getTerminator()->getNumSuccessors() == Probs.size()); 1177 eraseBlock(Src); // Erase stale data if any. 1178 if (Probs.size() == 0) 1179 return; // Nothing to set. 1180 1181 Handles.insert(BasicBlockCallbackVH(Src, this)); 1182 uint64_t TotalNumerator = 0; 1183 for (unsigned SuccIdx = 0; SuccIdx < Probs.size(); ++SuccIdx) { 1184 this->Probs[std::make_pair(Src, SuccIdx)] = Probs[SuccIdx]; 1185 LLVM_DEBUG(dbgs() << "set edge " << Src->getName() << " -> " << SuccIdx 1186 << " successor probability to " << Probs[SuccIdx] 1187 << "\n"); 1188 TotalNumerator += Probs[SuccIdx].getNumerator(); 1189 } 1190 1191 // Because of rounding errors the total probability cannot be checked to be 1192 // 1.0 exactly. That is TotalNumerator == BranchProbability::getDenominator. 1193 // Instead, every single probability in Probs must be as accurate as possible. 1194 // This results in error 1/denominator at most, thus the total absolute error 1195 // should be within Probs.size / BranchProbability::getDenominator. 1196 assert(TotalNumerator <= BranchProbability::getDenominator() + Probs.size()); 1197 assert(TotalNumerator >= BranchProbability::getDenominator() - Probs.size()); 1198 } 1199 1200 void BranchProbabilityInfo::copyEdgeProbabilities(BasicBlock *Src, 1201 BasicBlock *Dst) { 1202 eraseBlock(Dst); // Erase stale data if any. 1203 unsigned NumSuccessors = Src->getTerminator()->getNumSuccessors(); 1204 assert(NumSuccessors == Dst->getTerminator()->getNumSuccessors()); 1205 if (NumSuccessors == 0) 1206 return; // Nothing to set. 1207 if (this->Probs.find(std::make_pair(Src, 0)) == this->Probs.end()) 1208 return; // No probability is set for edges from Src. Keep the same for Dst. 1209 1210 Handles.insert(BasicBlockCallbackVH(Dst, this)); 1211 for (unsigned SuccIdx = 0; SuccIdx < NumSuccessors; ++SuccIdx) { 1212 auto Prob = this->Probs[std::make_pair(Src, SuccIdx)]; 1213 this->Probs[std::make_pair(Dst, SuccIdx)] = Prob; 1214 LLVM_DEBUG(dbgs() << "set edge " << Dst->getName() << " -> " << SuccIdx 1215 << " successor probability to " << Prob << "\n"); 1216 } 1217 } 1218 1219 raw_ostream & 1220 BranchProbabilityInfo::printEdgeProbability(raw_ostream &OS, 1221 const BasicBlock *Src, 1222 const BasicBlock *Dst) const { 1223 const BranchProbability Prob = getEdgeProbability(Src, Dst); 1224 OS << "edge " << Src->getName() << " -> " << Dst->getName() 1225 << " probability is " << Prob 1226 << (isEdgeHot(Src, Dst) ? " [HOT edge]\n" : "\n"); 1227 1228 return OS; 1229 } 1230 1231 void BranchProbabilityInfo::eraseBlock(const BasicBlock *BB) { 1232 LLVM_DEBUG(dbgs() << "eraseBlock " << BB->getName() << "\n"); 1233 1234 // Note that we cannot use successors of BB because the terminator of BB may 1235 // have changed when eraseBlock is called as a BasicBlockCallbackVH callback. 1236 // Instead we remove prob data for the block by iterating successors by their 1237 // indices from 0 till the last which exists. There could not be prob data for 1238 // a pair (BB, N) if there is no data for (BB, N-1) because the data is always 1239 // set for all successors from 0 to M at once by the method 1240 // setEdgeProbability(). 1241 Handles.erase(BasicBlockCallbackVH(BB, this)); 1242 for (unsigned I = 0;; ++I) { 1243 auto MapI = Probs.find(std::make_pair(BB, I)); 1244 if (MapI == Probs.end()) { 1245 assert(Probs.count(std::make_pair(BB, I + 1)) == 0 && 1246 "Must be no more successors"); 1247 return; 1248 } 1249 Probs.erase(MapI); 1250 } 1251 } 1252 1253 void BranchProbabilityInfo::calculate(const Function &F, const LoopInfo &LoopI, 1254 const TargetLibraryInfo *TLI, 1255 DominatorTree *DT, 1256 PostDominatorTree *PDT) { 1257 LLVM_DEBUG(dbgs() << "---- Branch Probability Info : " << F.getName() 1258 << " ----\n\n"); 1259 LastF = &F; // Store the last function we ran on for printing. 1260 LI = &LoopI; 1261 1262 SccI = std::make_unique<SccInfo>(F); 1263 1264 assert(EstimatedBlockWeight.empty()); 1265 assert(EstimatedLoopWeight.empty()); 1266 1267 std::unique_ptr<DominatorTree> DTPtr; 1268 std::unique_ptr<PostDominatorTree> PDTPtr; 1269 1270 if (!DT) { 1271 DTPtr = std::make_unique<DominatorTree>(const_cast<Function &>(F)); 1272 DT = DTPtr.get(); 1273 } 1274 1275 if (!PDT) { 1276 PDTPtr = std::make_unique<PostDominatorTree>(const_cast<Function &>(F)); 1277 PDT = PDTPtr.get(); 1278 } 1279 1280 computeEestimateBlockWeight(F, DT, PDT); 1281 1282 // Walk the basic blocks in post-order so that we can build up state about 1283 // the successors of a block iteratively. 1284 for (auto BB : post_order(&F.getEntryBlock())) { 1285 LLVM_DEBUG(dbgs() << "Computing probabilities for " << BB->getName() 1286 << "\n"); 1287 // If there is no at least two successors, no sense to set probability. 1288 if (BB->getTerminator()->getNumSuccessors() < 2) 1289 continue; 1290 if (calcMetadataWeights(BB)) 1291 continue; 1292 if (calcEstimatedHeuristics(BB)) 1293 continue; 1294 if (calcPointerHeuristics(BB)) 1295 continue; 1296 if (calcZeroHeuristics(BB, TLI)) 1297 continue; 1298 if (calcFloatingPointHeuristics(BB)) 1299 continue; 1300 } 1301 1302 EstimatedLoopWeight.clear(); 1303 EstimatedBlockWeight.clear(); 1304 SccI.reset(); 1305 1306 if (PrintBranchProb && 1307 (PrintBranchProbFuncName.empty() || 1308 F.getName().equals(PrintBranchProbFuncName))) { 1309 print(dbgs()); 1310 } 1311 } 1312 1313 void BranchProbabilityInfoWrapperPass::getAnalysisUsage( 1314 AnalysisUsage &AU) const { 1315 // We require DT so it's available when LI is available. The LI updating code 1316 // asserts that DT is also present so if we don't make sure that we have DT 1317 // here, that assert will trigger. 1318 AU.addRequired<DominatorTreeWrapperPass>(); 1319 AU.addRequired<LoopInfoWrapperPass>(); 1320 AU.addRequired<TargetLibraryInfoWrapperPass>(); 1321 AU.addRequired<DominatorTreeWrapperPass>(); 1322 AU.addRequired<PostDominatorTreeWrapperPass>(); 1323 AU.setPreservesAll(); 1324 } 1325 1326 bool BranchProbabilityInfoWrapperPass::runOnFunction(Function &F) { 1327 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); 1328 const TargetLibraryInfo &TLI = 1329 getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 1330 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree(); 1331 PostDominatorTree &PDT = 1332 getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree(); 1333 BPI.calculate(F, LI, &TLI, &DT, &PDT); 1334 return false; 1335 } 1336 1337 void BranchProbabilityInfoWrapperPass::releaseMemory() { BPI.releaseMemory(); } 1338 1339 void BranchProbabilityInfoWrapperPass::print(raw_ostream &OS, 1340 const Module *) const { 1341 BPI.print(OS); 1342 } 1343 1344 AnalysisKey BranchProbabilityAnalysis::Key; 1345 BranchProbabilityInfo 1346 BranchProbabilityAnalysis::run(Function &F, FunctionAnalysisManager &AM) { 1347 BranchProbabilityInfo BPI; 1348 BPI.calculate(F, AM.getResult<LoopAnalysis>(F), 1349 &AM.getResult<TargetLibraryAnalysis>(F), 1350 &AM.getResult<DominatorTreeAnalysis>(F), 1351 &AM.getResult<PostDominatorTreeAnalysis>(F)); 1352 return BPI; 1353 } 1354 1355 PreservedAnalyses 1356 BranchProbabilityPrinterPass::run(Function &F, FunctionAnalysisManager &AM) { 1357 OS << "Printing analysis results of BPI for function " 1358 << "'" << F.getName() << "':" 1359 << "\n"; 1360 AM.getResult<BranchProbabilityAnalysis>(F).print(OS); 1361 return PreservedAnalyses::all(); 1362 } 1363