diff --git a/lib/Analysis/BranchProbabilityInfo.cpp b/lib/Analysis/BranchProbabilityInfo.cpp index 58ccad89d50..4247d9a3528 100644 --- a/lib/Analysis/BranchProbabilityInfo.cpp +++ b/lib/Analysis/BranchProbabilityInfo.cpp @@ -85,6 +85,8 @@ char BranchProbabilityInfoWrapperPass::ID = 0; // Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125 static const uint32_t LBH_TAKEN_WEIGHT = 124; static const uint32_t LBH_NONTAKEN_WEIGHT = 4; +// Unlikely edges within a loop are half as likely as other edges +static const uint32_t LBH_UNLIKELY_WEIGHT = 62; /// \brief Unreachable-terminating branch taken probability. /// @@ -457,6 +459,114 @@ static bool isSCCHeader(const BasicBlock *BB, int SccNum, return HeaderMapIt->second; } +// Compute the unlikely successors to the block BB in the loop L, specifically +// those that are unlikely because this is a loop, and add them to the +// UnlikelyBlocks set. +static void +computeUnlikelySuccessors(const BasicBlock *BB, Loop *L, + SmallPtrSetImpl &UnlikelyBlocks) { + // Sometimes in a loop we have a branch whose condition is made false by + // taking it. This is typically something like + // int n = 0; + // while (...) { + // if (++n >= MAX) { + // n = 0; + // } + // } + // In this sort of situation taking the branch means that at the very least it + // won't be taken again in the next iteration of the loop, so we should + // consider it less likely than a typical branch. + // + // We detect this by looking back through the graph of PHI nodes that sets the + // value that the condition depends on, and seeing if we can reach a successor + // block which can be determined to make the condition false. + // + // FIXME: We currently consider unlikely blocks to be half as likely as other + // blocks, but if we consider the example above the likelyhood is actually + // 1/MAX. We could therefore be more precise in how unlikely we consider + // blocks to be, but it would require more careful examination of the form + // of the comparison expression. + const BranchInst *BI = dyn_cast(BB->getTerminator()); + if (!BI || !BI->isConditional()) + return; + + // Check if the branch is based on an instruction compared with a constant + CmpInst *CI = dyn_cast(BI->getCondition()); + if (!CI || !isa(CI->getOperand(0)) || + !isa(CI->getOperand(1))) + return; + + // Either the instruction must be a PHI, or a chain of operations involving + // constants that ends in a PHI which we can then collapse into a single value + // if the PHI value is known. + Instruction *CmpLHS = dyn_cast(CI->getOperand(0)); + PHINode *CmpPHI = dyn_cast(CmpLHS); + Constant *CmpConst = dyn_cast(CI->getOperand(1)); + // Collect the instructions until we hit a PHI + std::list InstChain; + while (!CmpPHI && CmpLHS && isa(CmpLHS) && + isa(CmpLHS->getOperand(1))) { + // Stop if the chain extends outside of the loop + if (!L->contains(CmpLHS)) + return; + InstChain.push_front(dyn_cast(CmpLHS)); + CmpLHS = dyn_cast(CmpLHS->getOperand(0)); + if (CmpLHS) + CmpPHI = dyn_cast(CmpLHS); + } + if (!CmpPHI || !L->contains(CmpPHI)) + return; + + // Trace the phi node to find all values that come from successors of BB + SmallPtrSet VisitedInsts; + SmallVector WorkList; + WorkList.push_back(CmpPHI); + VisitedInsts.insert(CmpPHI); + while (!WorkList.empty()) { + PHINode *P = WorkList.back(); + WorkList.pop_back(); + for (BasicBlock *B : P->blocks()) { + // Skip blocks that aren't part of the loop + if (!L->contains(B)) + continue; + Value *V = P->getIncomingValueForBlock(B); + // If the source is a PHI add it to the work list if we haven't + // already visited it. + if (PHINode *PN = dyn_cast(V)) { + if (VisitedInsts.insert(PN).second) + WorkList.push_back(PN); + continue; + } + // If this incoming value is a constant and B is a successor of BB, then + // we can constant-evaluate the compare to see if it makes the branch be + // taken or not. + Constant *CmpLHSConst = dyn_cast(V); + if (!CmpLHSConst || + std::find(succ_begin(BB), succ_end(BB), B) == succ_end(BB)) + continue; + // First collapse InstChain + for (Instruction *I : InstChain) { + CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst, + dyn_cast(I->getOperand(1)), + true); + if (!CmpLHSConst) + break; + } + if (!CmpLHSConst) + continue; + // Now constant-evaluate the compare + Constant *Result = ConstantExpr::getCompare(CI->getPredicate(), + CmpLHSConst, CmpConst, true); + // If the result means we don't branch to the block then that block is + // unlikely. + if (Result && + ((Result->isZeroValue() && B == BI->getSuccessor(0)) || + (Result->isOneValue() && B == BI->getSuccessor(1)))) + UnlikelyBlocks.insert(B); + } + } +} + // Calculate Edge Weights using "Loop Branch Heuristics". Predict backedges // as taken, exiting edges as not-taken. bool BranchProbabilityInfo::calcLoopBranchHeuristics(const BasicBlock *BB, @@ -470,15 +580,22 @@ bool BranchProbabilityInfo::calcLoopBranchHeuristics(const BasicBlock *BB, return false; } + SmallPtrSet UnlikelyBlocks; + if (L) + computeUnlikelySuccessors(BB, L, UnlikelyBlocks); + SmallVector BackEdges; SmallVector ExitingEdges; SmallVector InEdges; // Edges from header to the loop. + SmallVector UnlikelyEdges; for (succ_const_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) { // Use LoopInfo if we have it, otherwise fall-back to SCC info to catch // irreducible loops. if (L) { - if (!L->contains(*I)) + if (UnlikelyBlocks.count(*I) != 0) + UnlikelyEdges.push_back(I.getSuccessorIndex()); + else if (!L->contains(*I)) ExitingEdges.push_back(I.getSuccessorIndex()); else if (L->getHeader() == *I) BackEdges.push_back(I.getSuccessorIndex()); @@ -494,42 +611,46 @@ bool BranchProbabilityInfo::calcLoopBranchHeuristics(const BasicBlock *BB, } } - if (BackEdges.empty() && ExitingEdges.empty()) + if (BackEdges.empty() && ExitingEdges.empty() && UnlikelyEdges.empty()) return false; // Collect the sum of probabilities of back-edges/in-edges/exiting-edges, and // normalize them so that they sum up to one. - BranchProbability Probs[] = {BranchProbability::getZero(), - BranchProbability::getZero(), - BranchProbability::getZero()}; unsigned Denom = (BackEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) + (InEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) + + (UnlikelyEdges.empty() ? 0 : LBH_UNLIKELY_WEIGHT) + (ExitingEdges.empty() ? 0 : LBH_NONTAKEN_WEIGHT); - if (!BackEdges.empty()) - Probs[0] = BranchProbability(LBH_TAKEN_WEIGHT, Denom); - if (!InEdges.empty()) - Probs[1] = BranchProbability(LBH_TAKEN_WEIGHT, Denom); - if (!ExitingEdges.empty()) - Probs[2] = BranchProbability(LBH_NONTAKEN_WEIGHT, Denom); if (uint32_t numBackEdges = BackEdges.size()) { - auto Prob = Probs[0] / numBackEdges; + BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom); + auto Prob = TakenProb / numBackEdges; for (unsigned SuccIdx : BackEdges) setEdgeProbability(BB, SuccIdx, Prob); } if (uint32_t numInEdges = InEdges.size()) { - auto Prob = Probs[1] / numInEdges; + BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom); + auto Prob = TakenProb / numInEdges; for (unsigned SuccIdx : InEdges) setEdgeProbability(BB, SuccIdx, Prob); } if (uint32_t numExitingEdges = ExitingEdges.size()) { - auto Prob = Probs[2] / numExitingEdges; + BranchProbability NotTakenProb = BranchProbability(LBH_NONTAKEN_WEIGHT, + Denom); + auto Prob = NotTakenProb / numExitingEdges; for (unsigned SuccIdx : ExitingEdges) setEdgeProbability(BB, SuccIdx, Prob); } + if (uint32_t numUnlikelyEdges = UnlikelyEdges.size()) { + BranchProbability UnlikelyProb = BranchProbability(LBH_UNLIKELY_WEIGHT, + Denom); + auto Prob = UnlikelyProb / numUnlikelyEdges; + for (unsigned SuccIdx : UnlikelyEdges) + setEdgeProbability(BB, SuccIdx, Prob); + } + return true; } diff --git a/test/Analysis/BranchProbabilityInfo/loop.ll b/test/Analysis/BranchProbabilityInfo/loop.ll index 3a1b01c000f..e8bf19061d6 100644 --- a/test/Analysis/BranchProbabilityInfo/loop.ll +++ b/test/Analysis/BranchProbabilityInfo/loop.ll @@ -401,3 +401,91 @@ for.inc: end: ret void } + +; Check that the for.body -> if.then edge is considered unlikely due to making +; the if-condition false for the next iteration of the loop. +define i32 @test10(i32 %n, i32* %p) { +entry: + br label %for.cond +; CHECK: edge entry -> for.cond probability is 0x80000000 / 0x80000000 = 100.00% [HOT edge] + +for.cond: + %count.0 = phi i32 [ 0, %entry ], [ %count.1, %for.inc ] + %sum.0 = phi i32 [ 0, %entry ], [ %sum.1, %for.inc ] + %i.0 = phi i32 [ 0, %entry ], [ %inc3, %for.inc ] + %cmp = icmp slt i32 %i.0, %n + br i1 %cmp, label %for.body, label %for.cond.cleanup +; CHECK: edge for.cond -> for.body probability is 0x7c000000 / 0x80000000 = 96.88% [HOT edge] +; CHECK: edge for.cond -> for.cond.cleanup probability is 0x04000000 / 0x80000000 = 3.12% + +for.cond.cleanup: + ret i32 %sum.0 + +for.body: + %arrayidx = getelementptr inbounds i32, i32* %p, i32 %i.0 + %0 = load i32, i32* %arrayidx, align 4 + %add = add nsw i32 %sum.0, %0 + %inc = add nsw i32 %count.0, 1 + %cmp1 = icmp sgt i32 %count.0, 6 + br i1 %cmp1, label %if.then, label %for.inc +; CHECK: edge for.body -> if.then probability is 0x2aaaaaab / 0x80000000 = 33.33% +; CHECK: edge for.body -> for.inc probability is 0x55555555 / 0x80000000 = 66.67% + +if.then: + store i32 %add, i32* %arrayidx, align 4 + br label %for.inc +; CHECK: edge if.then -> for.inc probability is 0x80000000 / 0x80000000 = 100.00% [HOT edge] + +for.inc: + %count.1 = phi i32 [ 0, %if.then ], [ %inc, %for.body ] + %sum.1 = phi i32 [ 0, %if.then ], [ %add, %for.body ] + %inc3 = add nsw i32 %i.0, 1 + br label %for.cond +; CHECK: edge for.inc -> for.cond probability is 0x80000000 / 0x80000000 = 100.00% [HOT edge] +} + +; Each successor to for.body makes itself not be taken in the next iteration, so +; both should be equally likely +define i32 @test11(i32 %n, i32* %p) { +entry: + br label %for.cond +; CHECK: edge entry -> for.cond probability is 0x80000000 / 0x80000000 = 100.00% [HOT edge] + +for.cond: + %flip.0 = phi i32 [ 0, %entry ], [ %flip.1, %for.inc ] + %sum.0 = phi i32 [ 0, %entry ], [ %sum.1, %for.inc ] + %i.0 = phi i32 [ 0, %entry ], [ %inc, %for.inc ] + %cmp = icmp slt i32 %i.0, %n + br i1 %cmp, label %for.body, label %for.cond.cleanup +; CHECK: edge for.cond -> for.body probability is 0x7c000000 / 0x80000000 = 96.88% [HOT edge] +; CHECK: edge for.cond -> for.cond.cleanup probability is 0x04000000 / 0x80000000 = 3.12% + +for.cond.cleanup: + ret i32 %sum.0 + +for.body: + %tobool = icmp eq i32 %flip.0, 0 + %arrayidx1 = getelementptr inbounds i32, i32* %p, i32 %i.0 + %0 = load i32, i32* %arrayidx1, align 4 + br i1 %tobool, label %if.else, label %if.then +; CHECK: edge for.body -> if.else probability is 0x40000000 / 0x80000000 = 50.00% +; CHECK: edge for.body -> if.then probability is 0x40000000 / 0x80000000 = 50.00% + +if.then: + %add = add nsw i32 %0, %sum.0 + store i32 %add, i32* %arrayidx1, align 4 + br label %for.inc +; CHECK: edge if.then -> for.inc probability is 0x80000000 / 0x80000000 = 100.00% [HOT edge] + +if.else: + %add2 = add nsw i32 %sum.0, %0 + br label %for.inc +; CHECK: edge if.else -> for.inc probability is 0x80000000 / 0x80000000 = 100.00% [HOT edge] + +for.inc: + %flip.1 = phi i32 [ 0, %if.then ], [ 1, %if.else ] + %sum.1 = phi i32 [ %sum.0, %if.then ], [ %add2, %if.else ] + %inc = add nsw i32 %i.0, 1 + br label %for.cond +; CHECK: edge for.inc -> for.cond probability is 0x80000000 / 0x80000000 = 100.00% [HOT edge] +}