1 //===- BranchProbabilityInfo.cpp - Branch Probability Analysis ------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // Loops should be simplified before this analysis. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/Analysis/BranchProbabilityInfo.h" 14 #include "llvm/ADT/PostOrderIterator.h" 15 #include "llvm/ADT/SCCIterator.h" 16 #include "llvm/ADT/STLExtras.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/Analysis/LoopInfo.h" 19 #include "llvm/Analysis/PostDominators.h" 20 #include "llvm/Analysis/TargetLibraryInfo.h" 21 #include "llvm/IR/Attributes.h" 22 #include "llvm/IR/BasicBlock.h" 23 #include "llvm/IR/CFG.h" 24 #include "llvm/IR/Constants.h" 25 #include "llvm/IR/Dominators.h" 26 #include "llvm/IR/Function.h" 27 #include "llvm/IR/InstrTypes.h" 28 #include "llvm/IR/Instruction.h" 29 #include "llvm/IR/Instructions.h" 30 #include "llvm/IR/LLVMContext.h" 31 #include "llvm/IR/Metadata.h" 32 #include "llvm/IR/PassManager.h" 33 #include "llvm/IR/Type.h" 34 #include "llvm/IR/Value.h" 35 #include "llvm/InitializePasses.h" 36 #include "llvm/Pass.h" 37 #include "llvm/Support/BranchProbability.h" 38 #include "llvm/Support/Casting.h" 39 #include "llvm/Support/CommandLine.h" 40 #include "llvm/Support/Debug.h" 41 #include "llvm/Support/raw_ostream.h" 42 #include <cassert> 43 #include <cstdint> 44 #include <iterator> 45 #include <utility> 46 47 using namespace llvm; 48 49 #define DEBUG_TYPE "branch-prob" 50 51 static cl::opt<bool> PrintBranchProb( 52 "print-bpi", cl::init(false), cl::Hidden, 53 cl::desc("Print the branch probability info.")); 54 55 cl::opt<std::string> PrintBranchProbFuncName( 56 "print-bpi-func-name", cl::Hidden, 57 cl::desc("The option to specify the name of the function " 58 "whose branch probability info is printed.")); 59 60 INITIALIZE_PASS_BEGIN(BranchProbabilityInfoWrapperPass, "branch-prob", 61 "Branch Probability Analysis", false, true) 62 INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) 63 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 64 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) 65 INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass) 66 INITIALIZE_PASS_END(BranchProbabilityInfoWrapperPass, "branch-prob", 67 "Branch Probability Analysis", false, true) 68 69 BranchProbabilityInfoWrapperPass::BranchProbabilityInfoWrapperPass() 70 : FunctionPass(ID) { 71 initializeBranchProbabilityInfoWrapperPassPass( 72 *PassRegistry::getPassRegistry()); 73 } 74 75 char BranchProbabilityInfoWrapperPass::ID = 0; 76 77 // Weights are for internal use only. They are used by heuristics to help to 78 // estimate edges' probability. Example: 79 // 80 // Using "Loop Branch Heuristics" we predict weights of edges for the 81 // block BB2. 82 // ... 83 // | 84 // V 85 // BB1<-+ 86 // | | 87 // | | (Weight = 124) 88 // V | 89 // BB2--+ 90 // | 91 // | (Weight = 4) 92 // V 93 // BB3 94 // 95 // Probability of the edge BB2->BB1 = 124 / (124 + 4) = 0.96875 96 // Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125 97 static const uint32_t LBH_TAKEN_WEIGHT = 124; 98 static const uint32_t LBH_NONTAKEN_WEIGHT = 4; 99 100 /// Unreachable-terminating branch taken probability. 101 /// 102 /// This is the probability for a branch being taken to a block that terminates 103 /// (eventually) in unreachable. These are predicted as unlikely as possible. 104 /// All reachable probability will proportionally share the remaining part. 105 static const BranchProbability UR_TAKEN_PROB = BranchProbability::getRaw(1); 106 107 /// Heuristics and lookup tables for non-loop branches: 108 /// Pointer Heuristics (PH) 109 static const uint32_t PH_TAKEN_WEIGHT = 20; 110 static const uint32_t PH_NONTAKEN_WEIGHT = 12; 111 static const BranchProbability 112 PtrTakenProb(PH_TAKEN_WEIGHT, PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); 113 static const BranchProbability 114 PtrUntakenProb(PH_NONTAKEN_WEIGHT, PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT); 115 116 using ProbabilityList = SmallVector<BranchProbability>; 117 using ProbabilityTable = std::map<CmpInst::Predicate, ProbabilityList>; 118 119 /// Pointer comparisons: 120 static const ProbabilityTable PointerTable{ 121 {ICmpInst::ICMP_NE, {PtrTakenProb, PtrUntakenProb}}, /// p != q -> Likely 122 {ICmpInst::ICMP_EQ, {PtrUntakenProb, PtrTakenProb}}, /// p == q -> Unlikely 123 }; 124 125 /// Zero Heuristics (ZH) 126 static const uint32_t ZH_TAKEN_WEIGHT = 20; 127 static const uint32_t ZH_NONTAKEN_WEIGHT = 12; 128 static const BranchProbability 129 ZeroTakenProb(ZH_TAKEN_WEIGHT, ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); 130 static const BranchProbability 131 ZeroUntakenProb(ZH_NONTAKEN_WEIGHT, ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT); 132 133 /// Integer compares with 0: 134 static const ProbabilityTable ICmpWithZeroTable{ 135 {CmpInst::ICMP_EQ, {ZeroUntakenProb, ZeroTakenProb}}, /// X == 0 -> Unlikely 136 {CmpInst::ICMP_NE, {ZeroTakenProb, ZeroUntakenProb}}, /// X != 0 -> Likely 137 {CmpInst::ICMP_SLT, {ZeroUntakenProb, ZeroTakenProb}}, /// X < 0 -> Unlikely 138 {CmpInst::ICMP_SGT, {ZeroTakenProb, ZeroUntakenProb}}, /// X > 0 -> Likely 139 }; 140 141 /// Integer compares with -1: 142 static const ProbabilityTable ICmpWithMinusOneTable{ 143 {CmpInst::ICMP_EQ, {ZeroUntakenProb, ZeroTakenProb}}, /// X == -1 -> Unlikely 144 {CmpInst::ICMP_NE, {ZeroTakenProb, ZeroUntakenProb}}, /// X != -1 -> Likely 145 // InstCombine canonicalizes X >= 0 into X > -1 146 {CmpInst::ICMP_SGT, {ZeroTakenProb, ZeroUntakenProb}}, /// X >= 0 -> Likely 147 }; 148 149 /// Integer compares with 1: 150 static const ProbabilityTable ICmpWithOneTable{ 151 // InstCombine canonicalizes X <= 0 into X < 1 152 {CmpInst::ICMP_SLT, {ZeroUntakenProb, ZeroTakenProb}}, /// X <= 0 -> Unlikely 153 }; 154 155 /// strcmp and similar functions return zero, negative, or positive, if the 156 /// first string is equal, less, or greater than the second. We consider it 157 /// likely that the strings are not equal, so a comparison with zero is 158 /// probably false, but also a comparison with any other number is also 159 /// probably false given that what exactly is returned for nonzero values is 160 /// not specified. Any kind of comparison other than equality we know 161 /// nothing about. 162 static const ProbabilityTable ICmpWithLibCallTable{ 163 {CmpInst::ICMP_EQ, {ZeroUntakenProb, ZeroTakenProb}}, 164 {CmpInst::ICMP_NE, {ZeroTakenProb, ZeroUntakenProb}}, 165 }; 166 167 // Floating-Point Heuristics (FPH) 168 static const uint32_t FPH_TAKEN_WEIGHT = 20; 169 static const uint32_t FPH_NONTAKEN_WEIGHT = 12; 170 171 /// This is the probability for an ordered floating point comparison. 172 static const uint32_t FPH_ORD_WEIGHT = 1024 * 1024 - 1; 173 /// This is the probability for an unordered floating point comparison, it means 174 /// one or two of the operands are NaN. Usually it is used to test for an 175 /// exceptional case, so the result is unlikely. 176 static const uint32_t FPH_UNO_WEIGHT = 1; 177 178 static const BranchProbability FPOrdTakenProb(FPH_ORD_WEIGHT, 179 FPH_ORD_WEIGHT + FPH_UNO_WEIGHT); 180 static const BranchProbability 181 FPOrdUntakenProb(FPH_UNO_WEIGHT, FPH_ORD_WEIGHT + FPH_UNO_WEIGHT); 182 static const BranchProbability 183 FPTakenProb(FPH_TAKEN_WEIGHT, FPH_TAKEN_WEIGHT + FPH_NONTAKEN_WEIGHT); 184 static const BranchProbability 185 FPUntakenProb(FPH_NONTAKEN_WEIGHT, FPH_TAKEN_WEIGHT + FPH_NONTAKEN_WEIGHT); 186 187 /// Floating-Point compares: 188 static const ProbabilityTable FCmpTable{ 189 {FCmpInst::FCMP_ORD, {FPOrdTakenProb, FPOrdUntakenProb}}, /// !isnan -> Likely 190 {FCmpInst::FCMP_UNO, {FPOrdUntakenProb, FPOrdTakenProb}}, /// isnan -> Unlikely 191 }; 192 193 /// Set of dedicated "absolute" execution weights for a block. These weights are 194 /// meaningful relative to each other and their derivatives only. 195 enum class BlockExecWeight : std::uint32_t { 196 /// Special weight used for cases with exact zero probability. 197 ZERO = 0x0, 198 /// Minimal possible non zero weight. 199 LOWEST_NON_ZERO = 0x1, 200 /// Weight to an 'unreachable' block. 201 UNREACHABLE = ZERO, 202 /// Weight to a block containing non returning call. 203 NORETURN = LOWEST_NON_ZERO, 204 /// Weight to 'unwind' block of an invoke instruction. 205 UNWIND = LOWEST_NON_ZERO, 206 /// Weight to a 'cold' block. Cold blocks are the ones containing calls marked 207 /// with attribute 'cold'. 208 COLD = 0xffff, 209 /// Default weight is used in cases when there is no dedicated execution 210 /// weight set. It is not propagated through the domination line either. 211 DEFAULT = 0xfffff 212 }; 213 214 BranchProbabilityInfo::SccInfo::SccInfo(const Function &F) { 215 // Record SCC numbers of blocks in the CFG to identify irreducible loops. 216 // FIXME: We could only calculate this if the CFG is known to be irreducible 217 // (perhaps cache this info in LoopInfo if we can easily calculate it there?). 218 int SccNum = 0; 219 for (scc_iterator<const Function *> It = scc_begin(&F); !It.isAtEnd(); 220 ++It, ++SccNum) { 221 // Ignore single-block SCCs since they either aren't loops or LoopInfo will 222 // catch them. 223 const std::vector<const BasicBlock *> &Scc = *It; 224 if (Scc.size() == 1) 225 continue; 226 227 LLVM_DEBUG(dbgs() << "BPI: SCC " << SccNum << ":"); 228 for (const auto *BB : Scc) { 229 LLVM_DEBUG(dbgs() << " " << BB->getName()); 230 SccNums[BB] = SccNum; 231 calculateSccBlockType(BB, SccNum); 232 } 233 LLVM_DEBUG(dbgs() << "\n"); 234 } 235 } 236 237 int BranchProbabilityInfo::SccInfo::getSCCNum(const BasicBlock *BB) const { 238 auto SccIt = SccNums.find(BB); 239 if (SccIt == SccNums.end()) 240 return -1; 241 return SccIt->second; 242 } 243 244 void BranchProbabilityInfo::SccInfo::getSccEnterBlocks( 245 int SccNum, SmallVectorImpl<BasicBlock *> &Enters) const { 246 247 for (auto MapIt : SccBlocks[SccNum]) { 248 const auto *BB = MapIt.first; 249 if (isSCCHeader(BB, SccNum)) 250 for (const auto *Pred : predecessors(BB)) 251 if (getSCCNum(Pred) != SccNum) 252 Enters.push_back(const_cast<BasicBlock *>(BB)); 253 } 254 } 255 256 void BranchProbabilityInfo::SccInfo::getSccExitBlocks( 257 int SccNum, SmallVectorImpl<BasicBlock *> &Exits) const { 258 for (auto MapIt : SccBlocks[SccNum]) { 259 const auto *BB = MapIt.first; 260 if (isSCCExitingBlock(BB, SccNum)) 261 for (const auto *Succ : successors(BB)) 262 if (getSCCNum(Succ) != SccNum) 263 Exits.push_back(const_cast<BasicBlock *>(Succ)); 264 } 265 } 266 267 uint32_t BranchProbabilityInfo::SccInfo::getSccBlockType(const BasicBlock *BB, 268 int SccNum) const { 269 assert(getSCCNum(BB) == SccNum); 270 271 assert(SccBlocks.size() > static_cast<unsigned>(SccNum) && "Unknown SCC"); 272 const auto &SccBlockTypes = SccBlocks[SccNum]; 273 274 auto It = SccBlockTypes.find(BB); 275 if (It != SccBlockTypes.end()) { 276 return It->second; 277 } 278 return Inner; 279 } 280 281 void BranchProbabilityInfo::SccInfo::calculateSccBlockType(const BasicBlock *BB, 282 int SccNum) { 283 assert(getSCCNum(BB) == SccNum); 284 uint32_t BlockType = Inner; 285 286 if (llvm::any_of(predecessors(BB), [&](const BasicBlock *Pred) { 287 // Consider any block that is an entry point to the SCC as 288 // a header. 289 return getSCCNum(Pred) != SccNum; 290 })) 291 BlockType |= Header; 292 293 if (llvm::any_of(successors(BB), [&](const BasicBlock *Succ) { 294 return getSCCNum(Succ) != SccNum; 295 })) 296 BlockType |= Exiting; 297 298 // Lazily compute the set of headers for a given SCC and cache the results 299 // in the SccHeaderMap. 300 if (SccBlocks.size() <= static_cast<unsigned>(SccNum)) 301 SccBlocks.resize(SccNum + 1); 302 auto &SccBlockTypes = SccBlocks[SccNum]; 303 304 if (BlockType != Inner) { 305 bool IsInserted; 306 std::tie(std::ignore, IsInserted) = 307 SccBlockTypes.insert(std::make_pair(BB, BlockType)); 308 assert(IsInserted && "Duplicated block in SCC"); 309 } 310 } 311 312 BranchProbabilityInfo::LoopBlock::LoopBlock(const BasicBlock *BB, 313 const LoopInfo &LI, 314 const SccInfo &SccI) 315 : BB(BB) { 316 LD.first = LI.getLoopFor(BB); 317 if (!LD.first) { 318 LD.second = SccI.getSCCNum(BB); 319 } 320 } 321 322 bool BranchProbabilityInfo::isLoopEnteringEdge(const LoopEdge &Edge) const { 323 const auto &SrcBlock = Edge.first; 324 const auto &DstBlock = Edge.second; 325 return (DstBlock.getLoop() && 326 !DstBlock.getLoop()->contains(SrcBlock.getLoop())) || 327 // Assume that SCCs can't be nested. 328 (DstBlock.getSccNum() != -1 && 329 SrcBlock.getSccNum() != DstBlock.getSccNum()); 330 } 331 332 bool BranchProbabilityInfo::isLoopExitingEdge(const LoopEdge &Edge) const { 333 return isLoopEnteringEdge({Edge.second, Edge.first}); 334 } 335 336 bool BranchProbabilityInfo::isLoopEnteringExitingEdge( 337 const LoopEdge &Edge) const { 338 return isLoopEnteringEdge(Edge) || isLoopExitingEdge(Edge); 339 } 340 341 bool BranchProbabilityInfo::isLoopBackEdge(const LoopEdge &Edge) const { 342 const auto &SrcBlock = Edge.first; 343 const auto &DstBlock = Edge.second; 344 return SrcBlock.belongsToSameLoop(DstBlock) && 345 ((DstBlock.getLoop() && 346 DstBlock.getLoop()->getHeader() == DstBlock.getBlock()) || 347 (DstBlock.getSccNum() != -1 && 348 SccI->isSCCHeader(DstBlock.getBlock(), DstBlock.getSccNum()))); 349 } 350 351 void BranchProbabilityInfo::getLoopEnterBlocks( 352 const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Enters) const { 353 if (LB.getLoop()) { 354 auto *Header = LB.getLoop()->getHeader(); 355 Enters.append(pred_begin(Header), pred_end(Header)); 356 } else { 357 assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?"); 358 SccI->getSccEnterBlocks(LB.getSccNum(), Enters); 359 } 360 } 361 362 void BranchProbabilityInfo::getLoopExitBlocks( 363 const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Exits) const { 364 if (LB.getLoop()) { 365 LB.getLoop()->getExitBlocks(Exits); 366 } else { 367 assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?"); 368 SccI->getSccExitBlocks(LB.getSccNum(), Exits); 369 } 370 } 371 372 // Propagate existing explicit probabilities from either profile data or 373 // 'expect' intrinsic processing. Examine metadata against unreachable 374 // heuristic. The probability of the edge coming to unreachable block is 375 // set to min of metadata and unreachable heuristic. 376 bool BranchProbabilityInfo::calcMetadataWeights(const BasicBlock *BB) { 377 const Instruction *TI = BB->getTerminator(); 378 assert(TI->getNumSuccessors() > 1 && "expected more than one successor!"); 379 if (!(isa<BranchInst>(TI) || isa<SwitchInst>(TI) || isa<IndirectBrInst>(TI) || 380 isa<InvokeInst>(TI))) 381 return false; 382 383 MDNode *WeightsNode = TI->getMetadata(LLVMContext::MD_prof); 384 if (!WeightsNode) 385 return false; 386 387 // Check that the number of successors is manageable. 388 assert(TI->getNumSuccessors() < UINT32_MAX && "Too many successors"); 389 390 // Ensure there are weights for all of the successors. Note that the first 391 // operand to the metadata node is a name, not a weight. 392 if (WeightsNode->getNumOperands() != TI->getNumSuccessors() + 1) 393 return false; 394 395 // Build up the final weights that will be used in a temporary buffer. 396 // Compute the sum of all weights to later decide whether they need to 397 // be scaled to fit in 32 bits. 398 uint64_t WeightSum = 0; 399 SmallVector<uint32_t, 2> Weights; 400 SmallVector<unsigned, 2> UnreachableIdxs; 401 SmallVector<unsigned, 2> ReachableIdxs; 402 Weights.reserve(TI->getNumSuccessors()); 403 for (unsigned I = 1, E = WeightsNode->getNumOperands(); I != E; ++I) { 404 ConstantInt *Weight = 405 mdconst::dyn_extract<ConstantInt>(WeightsNode->getOperand(I)); 406 if (!Weight) 407 return false; 408 assert(Weight->getValue().getActiveBits() <= 32 && 409 "Too many bits for uint32_t"); 410 Weights.push_back(Weight->getZExtValue()); 411 WeightSum += Weights.back(); 412 const LoopBlock SrcLoopBB = getLoopBlock(BB); 413 const LoopBlock DstLoopBB = getLoopBlock(TI->getSuccessor(I - 1)); 414 auto EstimatedWeight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB}); 415 if (EstimatedWeight && 416 EstimatedWeight.getValue() <= 417 static_cast<uint32_t>(BlockExecWeight::UNREACHABLE)) 418 UnreachableIdxs.push_back(I - 1); 419 else 420 ReachableIdxs.push_back(I - 1); 421 } 422 assert(Weights.size() == TI->getNumSuccessors() && "Checked above"); 423 424 // If the sum of weights does not fit in 32 bits, scale every weight down 425 // accordingly. 426 uint64_t ScalingFactor = 427 (WeightSum > UINT32_MAX) ? WeightSum / UINT32_MAX + 1 : 1; 428 429 if (ScalingFactor > 1) { 430 WeightSum = 0; 431 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) { 432 Weights[I] /= ScalingFactor; 433 WeightSum += Weights[I]; 434 } 435 } 436 assert(WeightSum <= UINT32_MAX && 437 "Expected weights to scale down to 32 bits"); 438 439 if (WeightSum == 0 || ReachableIdxs.size() == 0) { 440 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) 441 Weights[I] = 1; 442 WeightSum = TI->getNumSuccessors(); 443 } 444 445 // Set the probability. 446 SmallVector<BranchProbability, 2> BP; 447 for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) 448 BP.push_back({ Weights[I], static_cast<uint32_t>(WeightSum) }); 449 450 // Examine the metadata against unreachable heuristic. 451 // If the unreachable heuristic is more strong then we use it for this edge. 452 if (UnreachableIdxs.size() == 0 || ReachableIdxs.size() == 0) { 453 setEdgeProbability(BB, BP); 454 return true; 455 } 456 457 auto UnreachableProb = UR_TAKEN_PROB; 458 for (auto I : UnreachableIdxs) 459 if (UnreachableProb < BP[I]) { 460 BP[I] = UnreachableProb; 461 } 462 463 // Sum of all edge probabilities must be 1.0. If we modified the probability 464 // of some edges then we must distribute the introduced difference over the 465 // reachable blocks. 466 // 467 // Proportional distribution: the relation between probabilities of the 468 // reachable edges is kept unchanged. That is for any reachable edges i and j: 469 // newBP[i] / newBP[j] == oldBP[i] / oldBP[j] => 470 // newBP[i] / oldBP[i] == newBP[j] / oldBP[j] == K 471 // Where K is independent of i,j. 472 // newBP[i] == oldBP[i] * K 473 // We need to find K. 474 // Make sum of all reachables of the left and right parts: 475 // sum_of_reachable(newBP) == K * sum_of_reachable(oldBP) 476 // Sum of newBP must be equal to 1.0: 477 // sum_of_reachable(newBP) + sum_of_unreachable(newBP) == 1.0 => 478 // sum_of_reachable(newBP) = 1.0 - sum_of_unreachable(newBP) 479 // Where sum_of_unreachable(newBP) is what has been just changed. 480 // Finally: 481 // K == sum_of_reachable(newBP) / sum_of_reachable(oldBP) => 482 // K == (1.0 - sum_of_unreachable(newBP)) / sum_of_reachable(oldBP) 483 BranchProbability NewUnreachableSum = BranchProbability::getZero(); 484 for (auto I : UnreachableIdxs) 485 NewUnreachableSum += BP[I]; 486 487 BranchProbability NewReachableSum = 488 BranchProbability::getOne() - NewUnreachableSum; 489 490 BranchProbability OldReachableSum = BranchProbability::getZero(); 491 for (auto I : ReachableIdxs) 492 OldReachableSum += BP[I]; 493 494 if (OldReachableSum != NewReachableSum) { // Anything to dsitribute? 495 if (OldReachableSum.isZero()) { 496 // If all oldBP[i] are zeroes then the proportional distribution results 497 // in all zero probabilities and the error stays big. In this case we 498 // evenly spread NewReachableSum over the reachable edges. 499 BranchProbability PerEdge = NewReachableSum / ReachableIdxs.size(); 500 for (auto I : ReachableIdxs) 501 BP[I] = PerEdge; 502 } else { 503 for (auto I : ReachableIdxs) { 504 // We use uint64_t to avoid double rounding error of the following 505 // calculation: BP[i] = BP[i] * NewReachableSum / OldReachableSum 506 // The formula is taken from the private constructor 507 // BranchProbability(uint32_t Numerator, uint32_t Denominator) 508 uint64_t Mul = static_cast<uint64_t>(NewReachableSum.getNumerator()) * 509 BP[I].getNumerator(); 510 uint32_t Div = static_cast<uint32_t>( 511 divideNearest(Mul, OldReachableSum.getNumerator())); 512 BP[I] = BranchProbability::getRaw(Div); 513 } 514 } 515 } 516 517 setEdgeProbability(BB, BP); 518 519 return true; 520 } 521 522 // Calculate Edge Weights using "Pointer Heuristics". Predict a comparison 523 // between two pointer or pointer and NULL will fail. 524 bool BranchProbabilityInfo::calcPointerHeuristics(const BasicBlock *BB) { 525 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 526 if (!BI || !BI->isConditional()) 527 return false; 528 529 Value *Cond = BI->getCondition(); 530 ICmpInst *CI = dyn_cast<ICmpInst>(Cond); 531 if (!CI || !CI->isEquality()) 532 return false; 533 534 Value *LHS = CI->getOperand(0); 535 536 if (!LHS->getType()->isPointerTy()) 537 return false; 538 539 assert(CI->getOperand(1)->getType()->isPointerTy()); 540 541 auto Search = PointerTable.find(CI->getPredicate()); 542 if (Search == PointerTable.end()) 543 return false; 544 setEdgeProbability(BB, Search->second); 545 return true; 546 } 547 548 // Compute the unlikely successors to the block BB in the loop L, specifically 549 // those that are unlikely because this is a loop, and add them to the 550 // UnlikelyBlocks set. 551 static void 552 computeUnlikelySuccessors(const BasicBlock *BB, Loop *L, 553 SmallPtrSetImpl<const BasicBlock*> &UnlikelyBlocks) { 554 // Sometimes in a loop we have a branch whose condition is made false by 555 // taking it. This is typically something like 556 // int n = 0; 557 // while (...) { 558 // if (++n >= MAX) { 559 // n = 0; 560 // } 561 // } 562 // In this sort of situation taking the branch means that at the very least it 563 // won't be taken again in the next iteration of the loop, so we should 564 // consider it less likely than a typical branch. 565 // 566 // We detect this by looking back through the graph of PHI nodes that sets the 567 // value that the condition depends on, and seeing if we can reach a successor 568 // block which can be determined to make the condition false. 569 // 570 // FIXME: We currently consider unlikely blocks to be half as likely as other 571 // blocks, but if we consider the example above the likelyhood is actually 572 // 1/MAX. We could therefore be more precise in how unlikely we consider 573 // blocks to be, but it would require more careful examination of the form 574 // of the comparison expression. 575 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 576 if (!BI || !BI->isConditional()) 577 return; 578 579 // Check if the branch is based on an instruction compared with a constant 580 CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition()); 581 if (!CI || !isa<Instruction>(CI->getOperand(0)) || 582 !isa<Constant>(CI->getOperand(1))) 583 return; 584 585 // Either the instruction must be a PHI, or a chain of operations involving 586 // constants that ends in a PHI which we can then collapse into a single value 587 // if the PHI value is known. 588 Instruction *CmpLHS = dyn_cast<Instruction>(CI->getOperand(0)); 589 PHINode *CmpPHI = dyn_cast<PHINode>(CmpLHS); 590 Constant *CmpConst = dyn_cast<Constant>(CI->getOperand(1)); 591 // Collect the instructions until we hit a PHI 592 SmallVector<BinaryOperator *, 1> InstChain; 593 while (!CmpPHI && CmpLHS && isa<BinaryOperator>(CmpLHS) && 594 isa<Constant>(CmpLHS->getOperand(1))) { 595 // Stop if the chain extends outside of the loop 596 if (!L->contains(CmpLHS)) 597 return; 598 InstChain.push_back(cast<BinaryOperator>(CmpLHS)); 599 CmpLHS = dyn_cast<Instruction>(CmpLHS->getOperand(0)); 600 if (CmpLHS) 601 CmpPHI = dyn_cast<PHINode>(CmpLHS); 602 } 603 if (!CmpPHI || !L->contains(CmpPHI)) 604 return; 605 606 // Trace the phi node to find all values that come from successors of BB 607 SmallPtrSet<PHINode*, 8> VisitedInsts; 608 SmallVector<PHINode*, 8> WorkList; 609 WorkList.push_back(CmpPHI); 610 VisitedInsts.insert(CmpPHI); 611 while (!WorkList.empty()) { 612 PHINode *P = WorkList.pop_back_val(); 613 for (BasicBlock *B : P->blocks()) { 614 // Skip blocks that aren't part of the loop 615 if (!L->contains(B)) 616 continue; 617 Value *V = P->getIncomingValueForBlock(B); 618 // If the source is a PHI add it to the work list if we haven't 619 // already visited it. 620 if (PHINode *PN = dyn_cast<PHINode>(V)) { 621 if (VisitedInsts.insert(PN).second) 622 WorkList.push_back(PN); 623 continue; 624 } 625 // If this incoming value is a constant and B is a successor of BB, then 626 // we can constant-evaluate the compare to see if it makes the branch be 627 // taken or not. 628 Constant *CmpLHSConst = dyn_cast<Constant>(V); 629 if (!CmpLHSConst || !llvm::is_contained(successors(BB), B)) 630 continue; 631 // First collapse InstChain 632 for (Instruction *I : llvm::reverse(InstChain)) { 633 CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst, 634 cast<Constant>(I->getOperand(1)), true); 635 if (!CmpLHSConst) 636 break; 637 } 638 if (!CmpLHSConst) 639 continue; 640 // Now constant-evaluate the compare 641 Constant *Result = ConstantExpr::getCompare(CI->getPredicate(), 642 CmpLHSConst, CmpConst, true); 643 // If the result means we don't branch to the block then that block is 644 // unlikely. 645 if (Result && 646 ((Result->isZeroValue() && B == BI->getSuccessor(0)) || 647 (Result->isOneValue() && B == BI->getSuccessor(1)))) 648 UnlikelyBlocks.insert(B); 649 } 650 } 651 } 652 653 Optional<uint32_t> 654 BranchProbabilityInfo::getEstimatedBlockWeight(const BasicBlock *BB) const { 655 auto WeightIt = EstimatedBlockWeight.find(BB); 656 if (WeightIt == EstimatedBlockWeight.end()) 657 return None; 658 return WeightIt->second; 659 } 660 661 Optional<uint32_t> 662 BranchProbabilityInfo::getEstimatedLoopWeight(const LoopData &L) const { 663 auto WeightIt = EstimatedLoopWeight.find(L); 664 if (WeightIt == EstimatedLoopWeight.end()) 665 return None; 666 return WeightIt->second; 667 } 668 669 Optional<uint32_t> 670 BranchProbabilityInfo::getEstimatedEdgeWeight(const LoopEdge &Edge) const { 671 // For edges entering a loop take weight of a loop rather than an individual 672 // block in the loop. 673 return isLoopEnteringEdge(Edge) 674 ? getEstimatedLoopWeight(Edge.second.getLoopData()) 675 : getEstimatedBlockWeight(Edge.second.getBlock()); 676 } 677 678 template <class IterT> 679 Optional<uint32_t> BranchProbabilityInfo::getMaxEstimatedEdgeWeight( 680 const LoopBlock &SrcLoopBB, iterator_range<IterT> Successors) const { 681 SmallVector<uint32_t, 4> Weights; 682 Optional<uint32_t> MaxWeight; 683 for (const BasicBlock *DstBB : Successors) { 684 const LoopBlock DstLoopBB = getLoopBlock(DstBB); 685 auto Weight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB}); 686 687 if (!Weight) 688 return None; 689 690 if (!MaxWeight || MaxWeight.getValue() < Weight.getValue()) 691 MaxWeight = Weight; 692 } 693 694 return MaxWeight; 695 } 696 697 // Updates \p LoopBB's weight and returns true. If \p LoopBB has already 698 // an associated weight it is unchanged and false is returned. 699 // 700 // Please note by the algorithm the weight is not expected to change once set 701 // thus 'false' status is used to track visited blocks. 702 bool BranchProbabilityInfo::updateEstimatedBlockWeight( 703 LoopBlock &LoopBB, uint32_t BBWeight, 704 SmallVectorImpl<BasicBlock *> &BlockWorkList, 705 SmallVectorImpl<LoopBlock> &LoopWorkList) { 706 BasicBlock *BB = LoopBB.getBlock(); 707 708 // In general, weight is assigned to a block when it has final value and 709 // can't/shouldn't be changed. However, there are cases when a block 710 // inherently has several (possibly "contradicting") weights. For example, 711 // "unwind" block may also contain "cold" call. In that case the first 712 // set weight is favored and all consequent weights are ignored. 713 if (!EstimatedBlockWeight.insert({BB, BBWeight}).second) 714 return false; 715 716 for (BasicBlock *PredBlock : predecessors(BB)) { 717 LoopBlock PredLoop = getLoopBlock(PredBlock); 718 // Add affected block/loop to a working list. 719 if (isLoopExitingEdge({PredLoop, LoopBB})) { 720 if (!EstimatedLoopWeight.count(PredLoop.getLoopData())) 721 LoopWorkList.push_back(PredLoop); 722 } else if (!EstimatedBlockWeight.count(PredBlock)) 723 BlockWorkList.push_back(PredBlock); 724 } 725 return true; 726 } 727 728 // Starting from \p BB traverse through dominator blocks and assign \p BBWeight 729 // to all such blocks that are post dominated by \BB. In other words to all 730 // blocks that the one is executed if and only if another one is executed. 731 // Importantly, we skip loops here for two reasons. First weights of blocks in 732 // a loop should be scaled by trip count (yet possibly unknown). Second there is 733 // no any value in doing that because that doesn't give any additional 734 // information regarding distribution of probabilities inside the loop. 735 // Exception is loop 'enter' and 'exit' edges that are handled in a special way 736 // at calcEstimatedHeuristics. 737 // 738 // In addition, \p WorkList is populated with basic blocks if at leas one 739 // successor has updated estimated weight. 740 void BranchProbabilityInfo::propagateEstimatedBlockWeight( 741 const LoopBlock &LoopBB, DominatorTree *DT, PostDominatorTree *PDT, 742 uint32_t BBWeight, SmallVectorImpl<BasicBlock *> &BlockWorkList, 743 SmallVectorImpl<LoopBlock> &LoopWorkList) { 744 const BasicBlock *BB = LoopBB.getBlock(); 745 const auto *DTStartNode = DT->getNode(BB); 746 const auto *PDTStartNode = PDT->getNode(BB); 747 748 // TODO: Consider propagating weight down the domination line as well. 749 for (const auto *DTNode = DTStartNode; DTNode != nullptr; 750 DTNode = DTNode->getIDom()) { 751 auto *DomBB = DTNode->getBlock(); 752 // Consider blocks which lie on one 'line'. 753 if (!PDT->dominates(PDTStartNode, PDT->getNode(DomBB))) 754 // If BB doesn't post dominate DomBB it will not post dominate dominators 755 // of DomBB as well. 756 break; 757 758 LoopBlock DomLoopBB = getLoopBlock(DomBB); 759 const LoopEdge Edge{DomLoopBB, LoopBB}; 760 // Don't propagate weight to blocks belonging to different loops. 761 if (!isLoopEnteringExitingEdge(Edge)) { 762 if (!updateEstimatedBlockWeight(DomLoopBB, BBWeight, BlockWorkList, 763 LoopWorkList)) 764 // If DomBB has weight set then all it's predecessors are already 765 // processed (since we propagate weight up to the top of IR each time). 766 break; 767 } else if (isLoopExitingEdge(Edge)) { 768 LoopWorkList.push_back(DomLoopBB); 769 } 770 } 771 } 772 773 Optional<uint32_t> BranchProbabilityInfo::getInitialEstimatedBlockWeight( 774 const BasicBlock *BB) { 775 // Returns true if \p BB has call marked with "NoReturn" attribute. 776 auto hasNoReturn = [&](const BasicBlock *BB) { 777 for (const auto &I : reverse(*BB)) 778 if (const CallInst *CI = dyn_cast<CallInst>(&I)) 779 if (CI->hasFnAttr(Attribute::NoReturn)) 780 return true; 781 782 return false; 783 }; 784 785 // Important note regarding the order of checks. They are ordered by weight 786 // from lowest to highest. Doing that allows to avoid "unstable" results 787 // when several conditions heuristics can be applied simultaneously. 788 if (isa<UnreachableInst>(BB->getTerminator()) || 789 // If this block is terminated by a call to 790 // @llvm.experimental.deoptimize then treat it like an unreachable 791 // since it is expected to practically never execute. 792 // TODO: Should we actually treat as never returning call? 793 BB->getTerminatingDeoptimizeCall()) 794 return hasNoReturn(BB) 795 ? static_cast<uint32_t>(BlockExecWeight::NORETURN) 796 : static_cast<uint32_t>(BlockExecWeight::UNREACHABLE); 797 798 // Check if the block is 'unwind' handler of some invoke instruction. 799 for (const auto *Pred : predecessors(BB)) 800 if (Pred) 801 if (const auto *II = dyn_cast<InvokeInst>(Pred->getTerminator())) 802 if (II->getUnwindDest() == BB) 803 return static_cast<uint32_t>(BlockExecWeight::UNWIND); 804 805 // Check if the block contains 'cold' call. 806 for (const auto &I : *BB) 807 if (const CallInst *CI = dyn_cast<CallInst>(&I)) 808 if (CI->hasFnAttr(Attribute::Cold)) 809 return static_cast<uint32_t>(BlockExecWeight::COLD); 810 811 return None; 812 } 813 814 // Does RPO traversal over all blocks in \p F and assigns weights to 815 // 'unreachable', 'noreturn', 'cold', 'unwind' blocks. In addition it does its 816 // best to propagate the weight to up/down the IR. 817 void BranchProbabilityInfo::computeEestimateBlockWeight( 818 const Function &F, DominatorTree *DT, PostDominatorTree *PDT) { 819 SmallVector<BasicBlock *, 8> BlockWorkList; 820 SmallVector<LoopBlock, 8> LoopWorkList; 821 822 // By doing RPO we make sure that all predecessors already have weights 823 // calculated before visiting theirs successors. 824 ReversePostOrderTraversal<const Function *> RPOT(&F); 825 for (const auto *BB : RPOT) 826 if (auto BBWeight = getInitialEstimatedBlockWeight(BB)) 827 // If we were able to find estimated weight for the block set it to this 828 // block and propagate up the IR. 829 propagateEstimatedBlockWeight(getLoopBlock(BB), DT, PDT, 830 BBWeight.getValue(), BlockWorkList, 831 LoopWorkList); 832 833 // BlockWorklist/LoopWorkList contains blocks/loops with at least one 834 // successor/exit having estimated weight. Try to propagate weight to such 835 // blocks/loops from successors/exits. 836 // Process loops and blocks. Order is not important. 837 do { 838 while (!LoopWorkList.empty()) { 839 const LoopBlock LoopBB = LoopWorkList.pop_back_val(); 840 841 if (EstimatedLoopWeight.count(LoopBB.getLoopData())) 842 continue; 843 844 SmallVector<BasicBlock *, 4> Exits; 845 getLoopExitBlocks(LoopBB, Exits); 846 auto LoopWeight = getMaxEstimatedEdgeWeight( 847 LoopBB, make_range(Exits.begin(), Exits.end())); 848 849 if (LoopWeight) { 850 // If we never exit the loop then we can enter it once at maximum. 851 if (LoopWeight <= static_cast<uint32_t>(BlockExecWeight::UNREACHABLE)) 852 LoopWeight = static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO); 853 854 EstimatedLoopWeight.insert( 855 {LoopBB.getLoopData(), LoopWeight.getValue()}); 856 // Add all blocks entering the loop into working list. 857 getLoopEnterBlocks(LoopBB, BlockWorkList); 858 } 859 } 860 861 while (!BlockWorkList.empty()) { 862 // We can reach here only if BlockWorkList is not empty. 863 const BasicBlock *BB = BlockWorkList.pop_back_val(); 864 if (EstimatedBlockWeight.count(BB)) 865 continue; 866 867 // We take maximum over all weights of successors. In other words we take 868 // weight of "hot" path. In theory we can probably find a better function 869 // which gives higher accuracy results (comparing to "maximum") but I 870 // can't 871 // think of any right now. And I doubt it will make any difference in 872 // practice. 873 const LoopBlock LoopBB = getLoopBlock(BB); 874 auto MaxWeight = getMaxEstimatedEdgeWeight(LoopBB, successors(BB)); 875 876 if (MaxWeight) 877 propagateEstimatedBlockWeight(LoopBB, DT, PDT, MaxWeight.getValue(), 878 BlockWorkList, LoopWorkList); 879 } 880 } while (!BlockWorkList.empty() || !LoopWorkList.empty()); 881 } 882 883 // Calculate edge probabilities based on block's estimated weight. 884 // Note that gathered weights were not scaled for loops. Thus edges entering 885 // and exiting loops requires special processing. 886 bool BranchProbabilityInfo::calcEstimatedHeuristics(const BasicBlock *BB) { 887 assert(BB->getTerminator()->getNumSuccessors() > 1 && 888 "expected more than one successor!"); 889 890 const LoopBlock LoopBB = getLoopBlock(BB); 891 892 SmallPtrSet<const BasicBlock *, 8> UnlikelyBlocks; 893 uint32_t TC = LBH_TAKEN_WEIGHT / LBH_NONTAKEN_WEIGHT; 894 if (LoopBB.getLoop()) 895 computeUnlikelySuccessors(BB, LoopBB.getLoop(), UnlikelyBlocks); 896 897 // Changed to 'true' if at least one successor has estimated weight. 898 bool FoundEstimatedWeight = false; 899 SmallVector<uint32_t, 4> SuccWeights; 900 uint64_t TotalWeight = 0; 901 // Go over all successors of BB and put their weights into SuccWeights. 902 for (const BasicBlock *SuccBB : successors(BB)) { 903 Optional<uint32_t> Weight; 904 const LoopBlock SuccLoopBB = getLoopBlock(SuccBB); 905 const LoopEdge Edge{LoopBB, SuccLoopBB}; 906 907 Weight = getEstimatedEdgeWeight(Edge); 908 909 if (isLoopExitingEdge(Edge) && 910 // Avoid adjustment of ZERO weight since it should remain unchanged. 911 Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) { 912 // Scale down loop exiting weight by trip count. 913 Weight = std::max( 914 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO), 915 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) / 916 TC); 917 } 918 bool IsUnlikelyEdge = LoopBB.getLoop() && UnlikelyBlocks.contains(SuccBB); 919 if (IsUnlikelyEdge && 920 // Avoid adjustment of ZERO weight since it should remain unchanged. 921 Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) { 922 // 'Unlikely' blocks have twice lower weight. 923 Weight = std::max( 924 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO), 925 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) / 926 2); 927 } 928 929 if (Weight) 930 FoundEstimatedWeight = true; 931 932 auto WeightVal = 933 Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)); 934 TotalWeight += WeightVal; 935 SuccWeights.push_back(WeightVal); 936 } 937 938 // If non of blocks have estimated weight bail out. 939 // If TotalWeight is 0 that means weight of each successor is 0 as well and 940 // equally likely. Bail out early to not deal with devision by zero. 941 if (!FoundEstimatedWeight || TotalWeight == 0) 942 return false; 943 944 assert(SuccWeights.size() == succ_size(BB) && "Missed successor?"); 945 const unsigned SuccCount = SuccWeights.size(); 946 947 // If the sum of weights does not fit in 32 bits, scale every weight down 948 // accordingly. 949 if (TotalWeight > UINT32_MAX) { 950 uint64_t ScalingFactor = TotalWeight / UINT32_MAX + 1; 951 TotalWeight = 0; 952 for (unsigned Idx = 0; Idx < SuccCount; ++Idx) { 953 SuccWeights[Idx] /= ScalingFactor; 954 if (SuccWeights[Idx] == static_cast<uint32_t>(BlockExecWeight::ZERO)) 955 SuccWeights[Idx] = 956 static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO); 957 TotalWeight += SuccWeights[Idx]; 958 } 959 assert(TotalWeight <= UINT32_MAX && "Total weight overflows"); 960 } 961 962 // Finally set probabilities to edges according to estimated block weights. 963 SmallVector<BranchProbability, 4> EdgeProbabilities( 964 SuccCount, BranchProbability::getUnknown()); 965 966 for (unsigned Idx = 0; Idx < SuccCount; ++Idx) { 967 EdgeProbabilities[Idx] = 968 BranchProbability(SuccWeights[Idx], (uint32_t)TotalWeight); 969 } 970 setEdgeProbability(BB, EdgeProbabilities); 971 return true; 972 } 973 974 bool BranchProbabilityInfo::calcZeroHeuristics(const BasicBlock *BB, 975 const TargetLibraryInfo *TLI) { 976 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 977 if (!BI || !BI->isConditional()) 978 return false; 979 980 Value *Cond = BI->getCondition(); 981 ICmpInst *CI = dyn_cast<ICmpInst>(Cond); 982 if (!CI) 983 return false; 984 985 auto GetConstantInt = [](Value *V) { 986 if (auto *I = dyn_cast<BitCastInst>(V)) 987 return dyn_cast<ConstantInt>(I->getOperand(0)); 988 return dyn_cast<ConstantInt>(V); 989 }; 990 991 Value *RHS = CI->getOperand(1); 992 ConstantInt *CV = GetConstantInt(RHS); 993 if (!CV) 994 return false; 995 996 // If the LHS is the result of AND'ing a value with a single bit bitmask, 997 // we don't have information about probabilities. 998 if (Instruction *LHS = dyn_cast<Instruction>(CI->getOperand(0))) 999 if (LHS->getOpcode() == Instruction::And) 1000 if (ConstantInt *AndRHS = GetConstantInt(LHS->getOperand(1))) 1001 if (AndRHS->getValue().isPowerOf2()) 1002 return false; 1003 1004 // Check if the LHS is the return value of a library function 1005 LibFunc Func = NumLibFuncs; 1006 if (TLI) 1007 if (CallInst *Call = dyn_cast<CallInst>(CI->getOperand(0))) 1008 if (Function *CalledFn = Call->getCalledFunction()) 1009 TLI->getLibFunc(*CalledFn, Func); 1010 1011 ProbabilityTable::const_iterator Search; 1012 if (Func == LibFunc_strcasecmp || 1013 Func == LibFunc_strcmp || 1014 Func == LibFunc_strncasecmp || 1015 Func == LibFunc_strncmp || 1016 Func == LibFunc_memcmp || 1017 Func == LibFunc_bcmp) { 1018 Search = ICmpWithLibCallTable.find(CI->getPredicate()); 1019 if (Search == ICmpWithLibCallTable.end()) 1020 return false; 1021 } else if (CV->isZero()) { 1022 Search = ICmpWithZeroTable.find(CI->getPredicate()); 1023 if (Search == ICmpWithZeroTable.end()) 1024 return false; 1025 } else if (CV->isOne()) { 1026 Search = ICmpWithOneTable.find(CI->getPredicate()); 1027 if (Search == ICmpWithOneTable.end()) 1028 return false; 1029 } else if (CV->isMinusOne()) { 1030 Search = ICmpWithMinusOneTable.find(CI->getPredicate()); 1031 if (Search == ICmpWithMinusOneTable.end()) 1032 return false; 1033 } else { 1034 return false; 1035 } 1036 1037 setEdgeProbability(BB, Search->second); 1038 return true; 1039 } 1040 1041 bool BranchProbabilityInfo::calcFloatingPointHeuristics(const BasicBlock *BB) { 1042 const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator()); 1043 if (!BI || !BI->isConditional()) 1044 return false; 1045 1046 Value *Cond = BI->getCondition(); 1047 FCmpInst *FCmp = dyn_cast<FCmpInst>(Cond); 1048 if (!FCmp) 1049 return false; 1050 1051 ProbabilityList ProbList; 1052 if (FCmp->isEquality()) { 1053 ProbList = !FCmp->isTrueWhenEqual() ? 1054 // f1 == f2 -> Unlikely 1055 ProbabilityList({FPTakenProb, FPUntakenProb}) : 1056 // f1 != f2 -> Likely 1057 ProbabilityList({FPUntakenProb, FPTakenProb}); 1058 } else { 1059 auto Search = FCmpTable.find(FCmp->getPredicate()); 1060 if (Search == FCmpTable.end()) 1061 return false; 1062 ProbList = Search->second; 1063 } 1064 1065 setEdgeProbability(BB, ProbList); 1066 return true; 1067 } 1068 1069 void BranchProbabilityInfo::releaseMemory() { 1070 Probs.clear(); 1071 Handles.clear(); 1072 } 1073 1074 bool BranchProbabilityInfo::invalidate(Function &, const PreservedAnalyses &PA, 1075 FunctionAnalysisManager::Invalidator &) { 1076 // Check whether the analysis, all analyses on functions, or the function's 1077 // CFG have been preserved. 1078 auto PAC = PA.getChecker<BranchProbabilityAnalysis>(); 1079 return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>() || 1080 PAC.preservedSet<CFGAnalyses>()); 1081 } 1082 1083 void BranchProbabilityInfo::print(raw_ostream &OS) const { 1084 OS << "---- Branch Probabilities ----\n"; 1085 // We print the probabilities from the last function the analysis ran over, 1086 // or the function it is currently running over. 1087 assert(LastF && "Cannot print prior to running over a function"); 1088 for (const auto &BI : *LastF) { 1089 for (const BasicBlock *Succ : successors(&BI)) 1090 printEdgeProbability(OS << " ", &BI, Succ); 1091 } 1092 } 1093 1094 bool BranchProbabilityInfo:: 1095 isEdgeHot(const BasicBlock *Src, const BasicBlock *Dst) const { 1096 // Hot probability is at least 4/5 = 80% 1097 // FIXME: Compare against a static "hot" BranchProbability. 1098 return getEdgeProbability(Src, Dst) > BranchProbability(4, 5); 1099 } 1100 1101 /// Get the raw edge probability for the edge. If can't find it, return a 1102 /// default probability 1/N where N is the number of successors. Here an edge is 1103 /// specified using PredBlock and an 1104 /// index to the successors. 1105 BranchProbability 1106 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1107 unsigned IndexInSuccessors) const { 1108 auto I = Probs.find(std::make_pair(Src, IndexInSuccessors)); 1109 assert((Probs.end() == Probs.find(std::make_pair(Src, 0))) == 1110 (Probs.end() == I) && 1111 "Probability for I-th successor must always be defined along with the " 1112 "probability for the first successor"); 1113 1114 if (I != Probs.end()) 1115 return I->second; 1116 1117 return {1, static_cast<uint32_t>(succ_size(Src))}; 1118 } 1119 1120 BranchProbability 1121 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1122 const_succ_iterator Dst) const { 1123 return getEdgeProbability(Src, Dst.getSuccessorIndex()); 1124 } 1125 1126 /// Get the raw edge probability calculated for the block pair. This returns the 1127 /// sum of all raw edge probabilities from Src to Dst. 1128 BranchProbability 1129 BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src, 1130 const BasicBlock *Dst) const { 1131 if (!Probs.count(std::make_pair(Src, 0))) 1132 return BranchProbability(llvm::count(successors(Src), Dst), succ_size(Src)); 1133 1134 auto Prob = BranchProbability::getZero(); 1135 for (const_succ_iterator I = succ_begin(Src), E = succ_end(Src); I != E; ++I) 1136 if (*I == Dst) 1137 Prob += Probs.find(std::make_pair(Src, I.getSuccessorIndex()))->second; 1138 1139 return Prob; 1140 } 1141 1142 /// Set the edge probability for all edges at once. 1143 void BranchProbabilityInfo::setEdgeProbability( 1144 const BasicBlock *Src, const SmallVectorImpl<BranchProbability> &Probs) { 1145 assert(Src->getTerminator()->getNumSuccessors() == Probs.size()); 1146 eraseBlock(Src); // Erase stale data if any. 1147 if (Probs.size() == 0) 1148 return; // Nothing to set. 1149 1150 Handles.insert(BasicBlockCallbackVH(Src, this)); 1151 uint64_t TotalNumerator = 0; 1152 for (unsigned SuccIdx = 0; SuccIdx < Probs.size(); ++SuccIdx) { 1153 this->Probs[std::make_pair(Src, SuccIdx)] = Probs[SuccIdx]; 1154 LLVM_DEBUG(dbgs() << "set edge " << Src->getName() << " -> " << SuccIdx 1155 << " successor probability to " << Probs[SuccIdx] 1156 << "\n"); 1157 TotalNumerator += Probs[SuccIdx].getNumerator(); 1158 } 1159 1160 // Because of rounding errors the total probability cannot be checked to be 1161 // 1.0 exactly. That is TotalNumerator == BranchProbability::getDenominator. 1162 // Instead, every single probability in Probs must be as accurate as possible. 1163 // This results in error 1/denominator at most, thus the total absolute error 1164 // should be within Probs.size / BranchProbability::getDenominator. 1165 assert(TotalNumerator <= BranchProbability::getDenominator() + Probs.size()); 1166 assert(TotalNumerator >= BranchProbability::getDenominator() - Probs.size()); 1167 (void)TotalNumerator; 1168 } 1169 1170 void BranchProbabilityInfo::copyEdgeProbabilities(BasicBlock *Src, 1171 BasicBlock *Dst) { 1172 eraseBlock(Dst); // Erase stale data if any. 1173 unsigned NumSuccessors = Src->getTerminator()->getNumSuccessors(); 1174 assert(NumSuccessors == Dst->getTerminator()->getNumSuccessors()); 1175 if (NumSuccessors == 0) 1176 return; // Nothing to set. 1177 if (this->Probs.find(std::make_pair(Src, 0)) == this->Probs.end()) 1178 return; // No probability is set for edges from Src. Keep the same for Dst. 1179 1180 Handles.insert(BasicBlockCallbackVH(Dst, this)); 1181 for (unsigned SuccIdx = 0; SuccIdx < NumSuccessors; ++SuccIdx) { 1182 auto Prob = this->Probs[std::make_pair(Src, SuccIdx)]; 1183 this->Probs[std::make_pair(Dst, SuccIdx)] = Prob; 1184 LLVM_DEBUG(dbgs() << "set edge " << Dst->getName() << " -> " << SuccIdx 1185 << " successor probability to " << Prob << "\n"); 1186 } 1187 } 1188 1189 raw_ostream & 1190 BranchProbabilityInfo::printEdgeProbability(raw_ostream &OS, 1191 const BasicBlock *Src, 1192 const BasicBlock *Dst) const { 1193 const BranchProbability Prob = getEdgeProbability(Src, Dst); 1194 OS << "edge " << Src->getName() << " -> " << Dst->getName() 1195 << " probability is " << Prob 1196 << (isEdgeHot(Src, Dst) ? " [HOT edge]\n" : "\n"); 1197 1198 return OS; 1199 } 1200 1201 void BranchProbabilityInfo::eraseBlock(const BasicBlock *BB) { 1202 LLVM_DEBUG(dbgs() << "eraseBlock " << BB->getName() << "\n"); 1203 1204 // Note that we cannot use successors of BB because the terminator of BB may 1205 // have changed when eraseBlock is called as a BasicBlockCallbackVH callback. 1206 // Instead we remove prob data for the block by iterating successors by their 1207 // indices from 0 till the last which exists. There could not be prob data for 1208 // a pair (BB, N) if there is no data for (BB, N-1) because the data is always 1209 // set for all successors from 0 to M at once by the method 1210 // setEdgeProbability(). 1211 Handles.erase(BasicBlockCallbackVH(BB, this)); 1212 for (unsigned I = 0;; ++I) { 1213 auto MapI = Probs.find(std::make_pair(BB, I)); 1214 if (MapI == Probs.end()) { 1215 assert(Probs.count(std::make_pair(BB, I + 1)) == 0 && 1216 "Must be no more successors"); 1217 return; 1218 } 1219 Probs.erase(MapI); 1220 } 1221 } 1222 1223 void BranchProbabilityInfo::calculate(const Function &F, const LoopInfo &LoopI, 1224 const TargetLibraryInfo *TLI, 1225 DominatorTree *DT, 1226 PostDominatorTree *PDT) { 1227 LLVM_DEBUG(dbgs() << "---- Branch Probability Info : " << F.getName() 1228 << " ----\n\n"); 1229 LastF = &F; // Store the last function we ran on for printing. 1230 LI = &LoopI; 1231 1232 SccI = std::make_unique<SccInfo>(F); 1233 1234 assert(EstimatedBlockWeight.empty()); 1235 assert(EstimatedLoopWeight.empty()); 1236 1237 std::unique_ptr<DominatorTree> DTPtr; 1238 std::unique_ptr<PostDominatorTree> PDTPtr; 1239 1240 if (!DT) { 1241 DTPtr = std::make_unique<DominatorTree>(const_cast<Function &>(F)); 1242 DT = DTPtr.get(); 1243 } 1244 1245 if (!PDT) { 1246 PDTPtr = std::make_unique<PostDominatorTree>(const_cast<Function &>(F)); 1247 PDT = PDTPtr.get(); 1248 } 1249 1250 computeEestimateBlockWeight(F, DT, PDT); 1251 1252 // Walk the basic blocks in post-order so that we can build up state about 1253 // the successors of a block iteratively. 1254 for (auto BB : post_order(&F.getEntryBlock())) { 1255 LLVM_DEBUG(dbgs() << "Computing probabilities for " << BB->getName() 1256 << "\n"); 1257 // If there is no at least two successors, no sense to set probability. 1258 if (BB->getTerminator()->getNumSuccessors() < 2) 1259 continue; 1260 if (calcMetadataWeights(BB)) 1261 continue; 1262 if (calcEstimatedHeuristics(BB)) 1263 continue; 1264 if (calcPointerHeuristics(BB)) 1265 continue; 1266 if (calcZeroHeuristics(BB, TLI)) 1267 continue; 1268 if (calcFloatingPointHeuristics(BB)) 1269 continue; 1270 } 1271 1272 EstimatedLoopWeight.clear(); 1273 EstimatedBlockWeight.clear(); 1274 SccI.reset(); 1275 1276 if (PrintBranchProb && 1277 (PrintBranchProbFuncName.empty() || 1278 F.getName().equals(PrintBranchProbFuncName))) { 1279 print(dbgs()); 1280 } 1281 } 1282 1283 void BranchProbabilityInfoWrapperPass::getAnalysisUsage( 1284 AnalysisUsage &AU) const { 1285 // We require DT so it's available when LI is available. The LI updating code 1286 // asserts that DT is also present so if we don't make sure that we have DT 1287 // here, that assert will trigger. 1288 AU.addRequired<DominatorTreeWrapperPass>(); 1289 AU.addRequired<LoopInfoWrapperPass>(); 1290 AU.addRequired<TargetLibraryInfoWrapperPass>(); 1291 AU.addRequired<DominatorTreeWrapperPass>(); 1292 AU.addRequired<PostDominatorTreeWrapperPass>(); 1293 AU.setPreservesAll(); 1294 } 1295 1296 bool BranchProbabilityInfoWrapperPass::runOnFunction(Function &F) { 1297 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); 1298 const TargetLibraryInfo &TLI = 1299 getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 1300 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree(); 1301 PostDominatorTree &PDT = 1302 getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree(); 1303 BPI.calculate(F, LI, &TLI, &DT, &PDT); 1304 return false; 1305 } 1306 1307 void BranchProbabilityInfoWrapperPass::releaseMemory() { BPI.releaseMemory(); } 1308 1309 void BranchProbabilityInfoWrapperPass::print(raw_ostream &OS, 1310 const Module *) const { 1311 BPI.print(OS); 1312 } 1313 1314 AnalysisKey BranchProbabilityAnalysis::Key; 1315 BranchProbabilityInfo 1316 BranchProbabilityAnalysis::run(Function &F, FunctionAnalysisManager &AM) { 1317 BranchProbabilityInfo BPI; 1318 BPI.calculate(F, AM.getResult<LoopAnalysis>(F), 1319 &AM.getResult<TargetLibraryAnalysis>(F), 1320 &AM.getResult<DominatorTreeAnalysis>(F), 1321 &AM.getResult<PostDominatorTreeAnalysis>(F)); 1322 return BPI; 1323 } 1324 1325 PreservedAnalyses 1326 BranchProbabilityPrinterPass::run(Function &F, FunctionAnalysisManager &AM) { 1327 OS << "Printing analysis results of BPI for function " 1328 << "'" << F.getName() << "':" 1329 << "\n"; 1330 AM.getResult<BranchProbabilityAnalysis>(F).print(OS); 1331 return PreservedAnalyses::all(); 1332 } 1333