2017-07-21 23:37:46 +02:00
|
|
|
//===- BranchProbabilityInfo.cpp - Branch Probability Analysis ------------===//
|
2011-06-04 03:16:30 +02:00
|
|
|
//
|
2019-01-19 09:50:56 +01:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2011-06-04 03:16:30 +02:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// Loops should be simplified before this analysis.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2012-12-03 17:50:05 +01:00
|
|
|
#include "llvm/Analysis/BranchProbabilityInfo.h"
|
|
|
|
#include "llvm/ADT/PostOrderIterator.h"
|
2017-11-01 16:16:50 +01:00
|
|
|
#include "llvm/ADT/SCCIterator.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/ADT/STLExtras.h"
|
|
|
|
#include "llvm/ADT/SmallVector.h"
|
2012-12-03 17:50:05 +01:00
|
|
|
#include "llvm/Analysis/LoopInfo.h"
|
2019-12-02 19:15:22 +01:00
|
|
|
#include "llvm/Analysis/PostDominators.h"
|
2017-06-08 11:44:40 +02:00
|
|
|
#include "llvm/Analysis/TargetLibraryInfo.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/IR/Attributes.h"
|
|
|
|
#include "llvm/IR/BasicBlock.h"
|
2014-03-04 12:45:46 +01:00
|
|
|
#include "llvm/IR/CFG.h"
|
2013-01-02 12:36:10 +01:00
|
|
|
#include "llvm/IR/Constants.h"
|
2018-05-17 11:05:40 +02:00
|
|
|
#include "llvm/IR/Dominators.h"
|
2013-01-02 12:36:10 +01:00
|
|
|
#include "llvm/IR/Function.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/IR/InstrTypes.h"
|
|
|
|
#include "llvm/IR/Instruction.h"
|
2013-01-02 12:36:10 +01:00
|
|
|
#include "llvm/IR/Instructions.h"
|
|
|
|
#include "llvm/IR/LLVMContext.h"
|
|
|
|
#include "llvm/IR/Metadata.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/IR/PassManager.h"
|
|
|
|
#include "llvm/IR/Type.h"
|
|
|
|
#include "llvm/IR/Value.h"
|
Sink all InitializePasses.h includes
This file lists every pass in LLVM, and is included by Pass.h, which is
very popular. Every time we add, remove, or rename a pass in LLVM, it
caused lots of recompilation.
I found this fact by looking at this table, which is sorted by the
number of times a file was changed over the last 100,000 git commits
multiplied by the number of object files that depend on it in the
current checkout:
recompiles touches affected_files header
342380 95 3604 llvm/include/llvm/ADT/STLExtras.h
314730 234 1345 llvm/include/llvm/InitializePasses.h
307036 118 2602 llvm/include/llvm/ADT/APInt.h
213049 59 3611 llvm/include/llvm/Support/MathExtras.h
170422 47 3626 llvm/include/llvm/Support/Compiler.h
162225 45 3605 llvm/include/llvm/ADT/Optional.h
158319 63 2513 llvm/include/llvm/ADT/Triple.h
140322 39 3598 llvm/include/llvm/ADT/StringRef.h
137647 59 2333 llvm/include/llvm/Support/Error.h
131619 73 1803 llvm/include/llvm/Support/FileSystem.h
Before this change, touching InitializePasses.h would cause 1345 files
to recompile. After this change, touching it only causes 550 compiles in
an incremental rebuild.
Reviewers: bkramer, asbirlea, bollu, jdoerfert
Differential Revision: https://reviews.llvm.org/D70211
2019-11-13 22:15:01 +01:00
|
|
|
#include "llvm/InitializePasses.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/Pass.h"
|
|
|
|
#include "llvm/Support/BranchProbability.h"
|
|
|
|
#include "llvm/Support/Casting.h"
|
2019-11-15 00:15:48 +01:00
|
|
|
#include "llvm/Support/CommandLine.h"
|
2011-06-11 03:05:22 +02:00
|
|
|
#include "llvm/Support/Debug.h"
|
2015-03-23 19:07:13 +01:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include <cassert>
|
|
|
|
#include <cstdint>
|
|
|
|
#include <iterator>
|
|
|
|
#include <utility>
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
using namespace llvm;
|
|
|
|
|
2014-04-22 04:48:03 +02:00
|
|
|
#define DEBUG_TYPE "branch-prob"
|
|
|
|
|
2017-08-26 02:31:00 +02:00
|
|
|
static cl::opt<bool> PrintBranchProb(
|
|
|
|
"print-bpi", cl::init(false), cl::Hidden,
|
|
|
|
cl::desc("Print the branch probability info."));
|
|
|
|
|
|
|
|
cl::opt<std::string> PrintBranchProbFuncName(
|
|
|
|
"print-bpi-func-name", cl::Hidden,
|
|
|
|
cl::desc("The option to specify the name of the function "
|
|
|
|
"whose branch probability info is printed."));
|
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
INITIALIZE_PASS_BEGIN(BranchProbabilityInfoWrapperPass, "branch-prob",
|
2011-06-04 03:16:30 +02:00
|
|
|
"Branch Probability Analysis", false, true)
|
2015-01-17 15:16:18 +01:00
|
|
|
INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass)
|
2017-06-08 11:44:40 +02:00
|
|
|
INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
|
2020-04-28 11:31:20 +02:00
|
|
|
INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass)
|
2015-07-16 00:48:29 +02:00
|
|
|
INITIALIZE_PASS_END(BranchProbabilityInfoWrapperPass, "branch-prob",
|
2011-06-04 03:16:30 +02:00
|
|
|
"Branch Probability Analysis", false, true)
|
|
|
|
|
Sink all InitializePasses.h includes
This file lists every pass in LLVM, and is included by Pass.h, which is
very popular. Every time we add, remove, or rename a pass in LLVM, it
caused lots of recompilation.
I found this fact by looking at this table, which is sorted by the
number of times a file was changed over the last 100,000 git commits
multiplied by the number of object files that depend on it in the
current checkout:
recompiles touches affected_files header
342380 95 3604 llvm/include/llvm/ADT/STLExtras.h
314730 234 1345 llvm/include/llvm/InitializePasses.h
307036 118 2602 llvm/include/llvm/ADT/APInt.h
213049 59 3611 llvm/include/llvm/Support/MathExtras.h
170422 47 3626 llvm/include/llvm/Support/Compiler.h
162225 45 3605 llvm/include/llvm/ADT/Optional.h
158319 63 2513 llvm/include/llvm/ADT/Triple.h
140322 39 3598 llvm/include/llvm/ADT/StringRef.h
137647 59 2333 llvm/include/llvm/Support/Error.h
131619 73 1803 llvm/include/llvm/Support/FileSystem.h
Before this change, touching InitializePasses.h would cause 1345 files
to recompile. After this change, touching it only causes 550 compiles in
an incremental rebuild.
Reviewers: bkramer, asbirlea, bollu, jdoerfert
Differential Revision: https://reviews.llvm.org/D70211
2019-11-13 22:15:01 +01:00
|
|
|
BranchProbabilityInfoWrapperPass::BranchProbabilityInfoWrapperPass()
|
|
|
|
: FunctionPass(ID) {
|
|
|
|
initializeBranchProbabilityInfoWrapperPassPass(
|
|
|
|
*PassRegistry::getPassRegistry());
|
|
|
|
}
|
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
char BranchProbabilityInfoWrapperPass::ID = 0;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2011-10-24 03:40:45 +02:00
|
|
|
// Weights are for internal use only. They are used by heuristics to help to
|
|
|
|
// estimate edges' probability. Example:
|
|
|
|
//
|
|
|
|
// Using "Loop Branch Heuristics" we predict weights of edges for the
|
|
|
|
// block BB2.
|
|
|
|
// ...
|
|
|
|
// |
|
|
|
|
// V
|
|
|
|
// BB1<-+
|
|
|
|
// | |
|
|
|
|
// | | (Weight = 124)
|
|
|
|
// V |
|
|
|
|
// BB2--+
|
|
|
|
// |
|
|
|
|
// | (Weight = 4)
|
|
|
|
// V
|
|
|
|
// BB3
|
|
|
|
//
|
|
|
|
// Probability of the edge BB2->BB1 = 124 / (124 + 4) = 0.96875
|
|
|
|
// Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125
|
|
|
|
static const uint32_t LBH_TAKEN_WEIGHT = 124;
|
|
|
|
static const uint32_t LBH_NONTAKEN_WEIGHT = 4;
|
2018-02-23 18:17:31 +01:00
|
|
|
// Unlikely edges within a loop are half as likely as other edges
|
|
|
|
static const uint32_t LBH_UNLIKELY_WEIGHT = 62;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Unreachable-terminating branch taken probability.
|
2011-10-24 14:01:08 +02:00
|
|
|
///
|
2017-05-18 08:11:56 +02:00
|
|
|
/// This is the probability for a branch being taken to a block that terminates
|
2011-10-24 14:01:08 +02:00
|
|
|
/// (eventually) in unreachable. These are predicted as unlikely as possible.
|
2020-06-02 06:28:12 +02:00
|
|
|
/// All reachable probability will proportionally share the remaining part.
|
2017-05-18 08:11:56 +02:00
|
|
|
static const BranchProbability UR_TAKEN_PROB = BranchProbability::getRaw(1);
|
2017-04-17 06:33:04 +02:00
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Weight for a branch taken going into a cold block.
|
2013-05-24 14:26:52 +02:00
|
|
|
///
|
|
|
|
/// This is the weight for a branch taken toward a block marked
|
|
|
|
/// cold. A block is marked cold if it's postdominated by a
|
|
|
|
/// block containing a call to a cold function. Cold functions
|
|
|
|
/// are those marked with attribute 'cold'.
|
|
|
|
static const uint32_t CC_TAKEN_WEIGHT = 4;
|
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Weight for a branch not-taken into a cold block.
|
2013-05-24 14:26:52 +02:00
|
|
|
///
|
|
|
|
/// This is the weight for a branch not taken toward a block marked
|
|
|
|
/// cold.
|
|
|
|
static const uint32_t CC_NONTAKEN_WEIGHT = 64;
|
|
|
|
|
2011-10-24 03:40:45 +02:00
|
|
|
static const uint32_t PH_TAKEN_WEIGHT = 20;
|
|
|
|
static const uint32_t PH_NONTAKEN_WEIGHT = 12;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-08-17 20:42:57 +02:00
|
|
|
static const uint32_t ZH_TAKEN_WEIGHT = 20;
|
|
|
|
static const uint32_t ZH_NONTAKEN_WEIGHT = 12;
|
2011-10-24 03:40:45 +02:00
|
|
|
|
|
|
|
static const uint32_t FPH_TAKEN_WEIGHT = 20;
|
|
|
|
static const uint32_t FPH_NONTAKEN_WEIGHT = 12;
|
|
|
|
|
2019-09-10 19:25:11 +02:00
|
|
|
/// This is the probability for an ordered floating point comparison.
|
|
|
|
static const uint32_t FPH_ORD_WEIGHT = 1024 * 1024 - 1;
|
|
|
|
/// This is the probability for an unordered floating point comparison, it means
|
|
|
|
/// one or two of the operands are NaN. Usually it is used to test for an
|
|
|
|
/// exceptional case, so the result is unlikely.
|
|
|
|
static const uint32_t FPH_UNO_WEIGHT = 1;
|
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Invoke-terminating normal branch taken weight
|
2012-08-15 14:22:35 +02:00
|
|
|
///
|
|
|
|
/// This is the weight for branching to the normal destination of an invoke
|
|
|
|
/// instruction. We expect this to happen most of the time. Set the weight to an
|
|
|
|
/// absurdly high value so that nested loops subsume it.
|
|
|
|
static const uint32_t IH_TAKEN_WEIGHT = 1024 * 1024 - 1;
|
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Invoke-terminating normal branch not-taken weight.
|
2012-08-15 14:22:35 +02:00
|
|
|
///
|
|
|
|
/// This is the weight for branching to the unwind destination of an invoke
|
|
|
|
/// instruction. This is essentially never taken.
|
|
|
|
static const uint32_t IH_NONTAKEN_WEIGHT = 1;
|
|
|
|
|
2020-07-24 13:57:10 +02:00
|
|
|
BranchProbabilityInfo::SccInfo::SccInfo(const Function &F) {
|
|
|
|
// Record SCC numbers of blocks in the CFG to identify irreducible loops.
|
|
|
|
// FIXME: We could only calculate this if the CFG is known to be irreducible
|
|
|
|
// (perhaps cache this info in LoopInfo if we can easily calculate it there?).
|
|
|
|
int SccNum = 0;
|
|
|
|
for (scc_iterator<const Function *> It = scc_begin(&F); !It.isAtEnd();
|
|
|
|
++It, ++SccNum) {
|
|
|
|
// Ignore single-block SCCs since they either aren't loops or LoopInfo will
|
|
|
|
// catch them.
|
|
|
|
const std::vector<const BasicBlock *> &Scc = *It;
|
|
|
|
if (Scc.size() == 1)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
LLVM_DEBUG(dbgs() << "BPI: SCC " << SccNum << ":");
|
|
|
|
for (const auto *BB : Scc) {
|
|
|
|
LLVM_DEBUG(dbgs() << " " << BB->getName());
|
|
|
|
SccNums[BB] = SccNum;
|
|
|
|
calculateSccBlockType(BB, SccNum);
|
|
|
|
}
|
|
|
|
LLVM_DEBUG(dbgs() << "\n");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
int BranchProbabilityInfo::SccInfo::getSCCNum(const BasicBlock *BB) const {
|
|
|
|
auto SccIt = SccNums.find(BB);
|
|
|
|
if (SccIt == SccNums.end())
|
|
|
|
return -1;
|
|
|
|
return SccIt->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::SccInfo::getSccEnterBlocks(
|
|
|
|
int SccNum, SmallVectorImpl<BasicBlock *> &Enters) const {
|
|
|
|
|
|
|
|
for (auto MapIt : SccBlocks[SccNum]) {
|
|
|
|
const auto *BB = MapIt.first;
|
|
|
|
if (isSCCHeader(BB, SccNum))
|
|
|
|
for (const auto *Pred : predecessors(BB))
|
|
|
|
if (getSCCNum(Pred) != SccNum)
|
|
|
|
Enters.push_back(const_cast<BasicBlock *>(BB));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::SccInfo::getSccExitBlocks(
|
|
|
|
int SccNum, SmallVectorImpl<BasicBlock *> &Exits) const {
|
|
|
|
for (auto MapIt : SccBlocks[SccNum]) {
|
|
|
|
const auto *BB = MapIt.first;
|
|
|
|
if (isSCCExitingBlock(BB, SccNum))
|
|
|
|
for (const auto *Succ : successors(BB))
|
|
|
|
if (getSCCNum(Succ) != SccNum)
|
|
|
|
Exits.push_back(const_cast<BasicBlock *>(BB));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t BranchProbabilityInfo::SccInfo::getSccBlockType(const BasicBlock *BB,
|
|
|
|
int SccNum) const {
|
|
|
|
assert(getSCCNum(BB) == SccNum);
|
|
|
|
|
|
|
|
assert(SccBlocks.size() > static_cast<unsigned>(SccNum) && "Unknown SCC");
|
|
|
|
const auto &SccBlockTypes = SccBlocks[SccNum];
|
|
|
|
|
|
|
|
auto It = SccBlockTypes.find(BB);
|
|
|
|
if (It != SccBlockTypes.end()) {
|
|
|
|
return It->second;
|
|
|
|
}
|
|
|
|
return Inner;
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::SccInfo::calculateSccBlockType(const BasicBlock *BB,
|
|
|
|
int SccNum) {
|
|
|
|
assert(getSCCNum(BB) == SccNum);
|
|
|
|
uint32_t BlockType = Inner;
|
|
|
|
|
2020-11-16 04:26:38 +01:00
|
|
|
if (llvm::any_of(predecessors(BB), [&](const BasicBlock *Pred) {
|
2020-07-24 13:57:10 +02:00
|
|
|
// Consider any block that is an entry point to the SCC as
|
|
|
|
// a header.
|
|
|
|
return getSCCNum(Pred) != SccNum;
|
|
|
|
}))
|
|
|
|
BlockType |= Header;
|
|
|
|
|
2020-11-16 04:26:38 +01:00
|
|
|
if (llvm::any_of(successors(BB), [&](const BasicBlock *Succ) {
|
|
|
|
return getSCCNum(Succ) != SccNum;
|
|
|
|
}))
|
2020-07-24 13:57:10 +02:00
|
|
|
BlockType |= Exiting;
|
|
|
|
|
|
|
|
// Lazily compute the set of headers for a given SCC and cache the results
|
|
|
|
// in the SccHeaderMap.
|
|
|
|
if (SccBlocks.size() <= static_cast<unsigned>(SccNum))
|
|
|
|
SccBlocks.resize(SccNum + 1);
|
|
|
|
auto &SccBlockTypes = SccBlocks[SccNum];
|
|
|
|
|
|
|
|
if (BlockType != Inner) {
|
|
|
|
bool IsInserted;
|
|
|
|
std::tie(std::ignore, IsInserted) =
|
|
|
|
SccBlockTypes.insert(std::make_pair(BB, BlockType));
|
|
|
|
assert(IsInserted && "Duplicated block in SCC");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-29 14:19:00 +02:00
|
|
|
BranchProbabilityInfo::LoopBlock::LoopBlock(const BasicBlock *BB,
|
|
|
|
const LoopInfo &LI,
|
|
|
|
const SccInfo &SccI)
|
|
|
|
: BB(BB) {
|
|
|
|
LD.first = LI.getLoopFor(BB);
|
|
|
|
if (!LD.first) {
|
|
|
|
LD.second = SccI.getSCCNum(BB);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopEnteringEdge(const LoopEdge &Edge) const {
|
|
|
|
const auto &SrcBlock = Edge.first;
|
|
|
|
const auto &DstBlock = Edge.second;
|
|
|
|
return (DstBlock.getLoop() &&
|
|
|
|
!DstBlock.getLoop()->contains(SrcBlock.getLoop())) ||
|
|
|
|
// Assume that SCCs can't be nested.
|
|
|
|
(DstBlock.getSccNum() != -1 &&
|
|
|
|
SrcBlock.getSccNum() != DstBlock.getSccNum());
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopExitingEdge(const LoopEdge &Edge) const {
|
|
|
|
return isLoopEnteringEdge({Edge.second, Edge.first});
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopEnteringExitingEdge(
|
|
|
|
const LoopEdge &Edge) const {
|
|
|
|
return isLoopEnteringEdge(Edge) || isLoopExitingEdge(Edge);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopBackEdge(const LoopEdge &Edge) const {
|
|
|
|
const auto &SrcBlock = Edge.first;
|
|
|
|
const auto &DstBlock = Edge.second;
|
|
|
|
return SrcBlock.belongsToSameLoop(DstBlock) &&
|
|
|
|
((DstBlock.getLoop() &&
|
|
|
|
DstBlock.getLoop()->getHeader() == DstBlock.getBlock()) ||
|
|
|
|
(DstBlock.getSccNum() != -1 &&
|
|
|
|
SccI->isSCCHeader(DstBlock.getBlock(), DstBlock.getSccNum())));
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::getLoopEnterBlocks(
|
|
|
|
const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Enters) const {
|
|
|
|
if (LB.getLoop()) {
|
|
|
|
auto *Header = LB.getLoop()->getHeader();
|
|
|
|
Enters.append(pred_begin(Header), pred_end(Header));
|
|
|
|
} else {
|
|
|
|
assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?");
|
|
|
|
SccI->getSccEnterBlocks(LB.getSccNum(), Enters);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::getLoopExitBlocks(
|
|
|
|
const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Exits) const {
|
|
|
|
if (LB.getLoop()) {
|
|
|
|
LB.getLoop()->getExitBlocks(Exits);
|
|
|
|
} else {
|
|
|
|
assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?");
|
|
|
|
SccI->getSccExitBlocks(LB.getSccNum(), Exits);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-02 19:15:22 +01:00
|
|
|
static void UpdatePDTWorklist(const BasicBlock *BB, PostDominatorTree *PDT,
|
|
|
|
SmallVectorImpl<const BasicBlock *> &WorkList,
|
|
|
|
SmallPtrSetImpl<const BasicBlock *> &TargetSet) {
|
|
|
|
SmallVector<BasicBlock *, 8> Descendants;
|
|
|
|
SmallPtrSet<const BasicBlock *, 16> NewItems;
|
|
|
|
|
|
|
|
PDT->getDescendants(const_cast<BasicBlock *>(BB), Descendants);
|
|
|
|
for (auto *BB : Descendants)
|
|
|
|
if (TargetSet.insert(BB).second)
|
|
|
|
for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
|
|
|
|
if (!TargetSet.count(*PI))
|
|
|
|
NewItems.insert(*PI);
|
|
|
|
WorkList.insert(WorkList.end(), NewItems.begin(), NewItems.end());
|
|
|
|
}
|
2011-10-24 03:40:45 +02:00
|
|
|
|
2019-12-02 19:15:22 +01:00
|
|
|
/// Compute a set of basic blocks that are post-dominated by unreachables.
|
|
|
|
void BranchProbabilityInfo::computePostDominatedByUnreachable(
|
|
|
|
const Function &F, PostDominatorTree *PDT) {
|
|
|
|
SmallVector<const BasicBlock *, 8> WorkList;
|
|
|
|
for (auto &BB : F) {
|
|
|
|
const Instruction *TI = BB.getTerminator();
|
|
|
|
if (TI->getNumSuccessors() == 0) {
|
|
|
|
if (isa<UnreachableInst>(TI) ||
|
|
|
|
// If this block is terminated by a call to
|
|
|
|
// @llvm.experimental.deoptimize then treat it like an unreachable
|
|
|
|
// since the @llvm.experimental.deoptimize call is expected to
|
|
|
|
// practically never execute.
|
|
|
|
BB.getTerminatingDeoptimizeCall())
|
|
|
|
UpdatePDTWorklist(&BB, PDT, WorkList, PostDominatedByUnreachable);
|
|
|
|
}
|
[BPI] Improve unreachable/ColdCall heurstics to handle loops.
Summary:
While updatePostDominatedByUnreachable attemps to find basic blocks that are post-domianted by unreachable blocks, it currently cannot handle loops precisely, because it doesn't use the actual post dominator tree analysis but relies on heuristics of visiting basic blocks in post-order. More precisely, when the entire loop is post-dominated by the unreachable block, current algorithm fails to detect the entire loop as post-dominated by the unreachable because when the algorithm reaches to the loop latch it fails to tell all its successors (including the loop header) will "eventually" be post-domianted by the unreachable block, because the algorithm hasn't visited the loop header yet. This makes BPI for the loop latch to assume that loop backedges are taken with 100% of probability. And because of this, block frequency info sometimes marks virtually dead loops (which are post dominated by unreachable blocks) super hot, because 100% backedge-taken probability makes the loop iteration count the max value. updatePostDominatedByColdCall has the exact same problem as well.
To address this problem, this patch makes PostDominatedByUnreachable/PostDominatedByColdCall to be computed with the actual post-dominator tree.
Reviewers: skatkov, chandlerc, manmanren
Reviewed By: skatkov
Subscribers: manmanren, vsk, apilipenko, Carrot, qcolombet, hiraditya, llvm-commits
Tags: #llvm
Differential Revision: https://reviews.llvm.org/D70104
2019-11-27 19:18:01 +01:00
|
|
|
}
|
2019-11-27 20:17:10 +01:00
|
|
|
|
2019-12-02 19:15:22 +01:00
|
|
|
while (!WorkList.empty()) {
|
|
|
|
const BasicBlock *BB = WorkList.pop_back_val();
|
|
|
|
if (PostDominatedByUnreachable.count(BB))
|
|
|
|
continue;
|
|
|
|
// If the terminator is an InvokeInst, check only the normal destination
|
|
|
|
// block as the unwind edge of InvokeInst is also very unlikely taken.
|
|
|
|
if (auto *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
|
|
|
|
if (PostDominatedByUnreachable.count(II->getNormalDest()))
|
|
|
|
UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByUnreachable);
|
|
|
|
}
|
|
|
|
// If all the successors are unreachable, BB is unreachable as well.
|
|
|
|
else if (!successors(BB).empty() &&
|
|
|
|
llvm::all_of(successors(BB), [this](const BasicBlock *Succ) {
|
|
|
|
return PostDominatedByUnreachable.count(Succ);
|
|
|
|
}))
|
|
|
|
UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByUnreachable);
|
|
|
|
}
|
2017-04-12 07:42:14 +02:00
|
|
|
}
|
|
|
|
|
2019-12-02 19:15:22 +01:00
|
|
|
/// compute a set of basic blocks that are post-dominated by ColdCalls.
|
|
|
|
void BranchProbabilityInfo::computePostDominatedByColdCall(
|
|
|
|
const Function &F, PostDominatorTree *PDT) {
|
|
|
|
SmallVector<const BasicBlock *, 8> WorkList;
|
|
|
|
for (auto &BB : F)
|
|
|
|
for (auto &I : BB)
|
|
|
|
if (const CallInst *CI = dyn_cast<CallInst>(&I))
|
|
|
|
if (CI->hasFnAttr(Attribute::Cold))
|
|
|
|
UpdatePDTWorklist(&BB, PDT, WorkList, PostDominatedByColdCall);
|
2017-04-12 07:42:14 +02:00
|
|
|
|
2019-12-02 19:15:22 +01:00
|
|
|
while (!WorkList.empty()) {
|
|
|
|
const BasicBlock *BB = WorkList.pop_back_val();
|
2017-04-12 07:42:14 +02:00
|
|
|
|
2019-12-02 19:15:22 +01:00
|
|
|
// If the terminator is an InvokeInst, check only the normal destination
|
|
|
|
// block as the unwind edge of InvokeInst is also very unlikely taken.
|
|
|
|
if (auto *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
|
|
|
|
if (PostDominatedByColdCall.count(II->getNormalDest()))
|
|
|
|
UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByColdCall);
|
2017-04-12 07:42:14 +02:00
|
|
|
}
|
2019-12-02 19:15:22 +01:00
|
|
|
// If all of successor are post dominated then BB is also done.
|
|
|
|
else if (!successors(BB).empty() &&
|
|
|
|
llvm::all_of(successors(BB), [this](const BasicBlock *Succ) {
|
|
|
|
return PostDominatedByColdCall.count(Succ);
|
|
|
|
}))
|
|
|
|
UpdatePDTWorklist(BB, PDT, WorkList, PostDominatedByColdCall);
|
|
|
|
}
|
2017-04-12 07:42:14 +02:00
|
|
|
}
|
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Calculate edge weights for successors lead to unreachable.
|
2017-04-12 07:42:14 +02:00
|
|
|
///
|
|
|
|
/// Predict that a successor which leads necessarily to an
|
|
|
|
/// unreachable-terminated block as extremely unlikely.
|
|
|
|
bool BranchProbabilityInfo::calcUnreachableHeuristics(const BasicBlock *BB) {
|
2018-10-15 12:04:59 +02:00
|
|
|
const Instruction *TI = BB->getTerminator();
|
2018-06-08 15:03:21 +02:00
|
|
|
(void) TI;
|
2017-04-17 08:39:47 +02:00
|
|
|
assert(TI->getNumSuccessors() > 1 && "expected more than one successor!");
|
2018-06-08 15:03:21 +02:00
|
|
|
assert(!isa<InvokeInst>(TI) &&
|
|
|
|
"Invokes should have already been handled by calcInvokeHeuristics");
|
2017-04-12 07:42:14 +02:00
|
|
|
|
2012-08-24 20:14:27 +02:00
|
|
|
SmallVector<unsigned, 4> UnreachableEdges;
|
|
|
|
SmallVector<unsigned, 4> ReachableEdges;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-03-10 19:33:02 +01:00
|
|
|
for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I)
|
2011-10-24 14:01:08 +02:00
|
|
|
if (PostDominatedByUnreachable.count(*I))
|
2012-08-24 20:14:27 +02:00
|
|
|
UnreachableEdges.push_back(I.getSuccessorIndex());
|
2011-10-24 14:01:08 +02:00
|
|
|
else
|
2012-08-24 20:14:27 +02:00
|
|
|
ReachableEdges.push_back(I.getSuccessorIndex());
|
2011-10-24 03:40:45 +02:00
|
|
|
|
2017-04-17 08:39:47 +02:00
|
|
|
// Skip probabilities if all were reachable.
|
|
|
|
if (UnreachableEdges.empty())
|
2017-04-12 07:42:14 +02:00
|
|
|
return false;
|
2015-12-21 23:00:51 +01:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
SmallVector<BranchProbability, 4> EdgeProbabilities(
|
|
|
|
BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown());
|
2015-12-22 19:56:14 +01:00
|
|
|
if (ReachableEdges.empty()) {
|
|
|
|
BranchProbability Prob(1, UnreachableEdges.size());
|
|
|
|
for (unsigned SuccIdx : UnreachableEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = Prob;
|
|
|
|
setEdgeProbability(BB, EdgeProbabilities);
|
2011-10-24 14:01:08 +02:00
|
|
|
return true;
|
2015-12-22 19:56:14 +01:00
|
|
|
}
|
|
|
|
|
2017-05-18 08:11:56 +02:00
|
|
|
auto UnreachableProb = UR_TAKEN_PROB;
|
|
|
|
auto ReachableProb =
|
|
|
|
(BranchProbability::getOne() - UR_TAKEN_PROB * UnreachableEdges.size()) /
|
|
|
|
ReachableEdges.size();
|
2015-12-22 19:56:14 +01:00
|
|
|
|
|
|
|
for (unsigned SuccIdx : UnreachableEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = UnreachableProb;
|
2015-12-22 19:56:14 +01:00
|
|
|
for (unsigned SuccIdx : ReachableEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = ReachableProb;
|
2011-10-24 14:01:08 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(BB, EdgeProbabilities);
|
2011-10-24 14:01:08 +02:00
|
|
|
return true;
|
|
|
|
}
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2011-10-19 12:30:30 +02:00
|
|
|
// Propagate existing explicit probabilities from either profile data or
|
2017-04-17 06:33:04 +02:00
|
|
|
// 'expect' intrinsic processing. Examine metadata against unreachable
|
|
|
|
// heuristic. The probability of the edge coming to unreachable block is
|
|
|
|
// set to min of metadata and unreachable heuristic.
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcMetadataWeights(const BasicBlock *BB) {
|
2018-10-15 12:04:59 +02:00
|
|
|
const Instruction *TI = BB->getTerminator();
|
2017-04-17 08:39:47 +02:00
|
|
|
assert(TI->getNumSuccessors() > 1 && "expected more than one successor!");
|
2020-06-04 10:34:14 +02:00
|
|
|
if (!(isa<BranchInst>(TI) || isa<SwitchInst>(TI) || isa<IndirectBrInst>(TI) ||
|
|
|
|
isa<InvokeInst>(TI)))
|
2011-10-19 12:30:30 +02:00
|
|
|
return false;
|
|
|
|
|
2014-11-11 22:30:22 +01:00
|
|
|
MDNode *WeightsNode = TI->getMetadata(LLVMContext::MD_prof);
|
2011-10-19 12:32:19 +02:00
|
|
|
if (!WeightsNode)
|
2011-10-19 12:30:30 +02:00
|
|
|
return false;
|
|
|
|
|
2015-05-07 19:22:06 +02:00
|
|
|
// Check that the number of successors is manageable.
|
|
|
|
assert(TI->getNumSuccessors() < UINT32_MAX && "Too many successors");
|
|
|
|
|
2011-10-19 12:32:19 +02:00
|
|
|
// Ensure there are weights for all of the successors. Note that the first
|
|
|
|
// operand to the metadata node is a name, not a weight.
|
|
|
|
if (WeightsNode->getNumOperands() != TI->getNumSuccessors() + 1)
|
2011-10-19 12:30:30 +02:00
|
|
|
return false;
|
|
|
|
|
2015-05-07 19:22:06 +02:00
|
|
|
// Build up the final weights that will be used in a temporary buffer.
|
2020-10-31 08:15:46 +01:00
|
|
|
// Compute the sum of all weights to later decide whether they need to
|
|
|
|
// be scaled to fit in 32 bits.
|
|
|
|
uint64_t WeightSum = 0;
|
|
|
|
SmallVector<uint32_t, 2> Weights;
|
2017-04-17 06:33:04 +02:00
|
|
|
SmallVector<unsigned, 2> UnreachableIdxs;
|
|
|
|
SmallVector<unsigned, 2> ReachableIdxs;
|
2011-10-19 12:32:19 +02:00
|
|
|
Weights.reserve(TI->getNumSuccessors());
|
2020-06-02 05:55:27 +02:00
|
|
|
for (unsigned I = 1, E = WeightsNode->getNumOperands(); I != E; ++I) {
|
IR: Split Metadata from Value
Split `Metadata` away from the `Value` class hierarchy, as part of
PR21532. Assembly and bitcode changes are in the wings, but this is the
bulk of the change for the IR C++ API.
I have a follow-up patch prepared for `clang`. If this breaks other
sub-projects, I apologize in advance :(. Help me compile it on Darwin
I'll try to fix it. FWIW, the errors should be easy to fix, so it may
be simpler to just fix it yourself.
This breaks the build for all metadata-related code that's out-of-tree.
Rest assured the transition is mechanical and the compiler should catch
almost all of the problems.
Here's a quick guide for updating your code:
- `Metadata` is the root of a class hierarchy with three main classes:
`MDNode`, `MDString`, and `ValueAsMetadata`. It is distinct from
the `Value` class hierarchy. It is typeless -- i.e., instances do
*not* have a `Type`.
- `MDNode`'s operands are all `Metadata *` (instead of `Value *`).
- `TrackingVH<MDNode>` and `WeakVH` referring to metadata can be
replaced with `TrackingMDNodeRef` and `TrackingMDRef`, respectively.
If you're referring solely to resolved `MDNode`s -- post graph
construction -- just use `MDNode*`.
- `MDNode` (and the rest of `Metadata`) have only limited support for
`replaceAllUsesWith()`.
As long as an `MDNode` is pointing at a forward declaration -- the
result of `MDNode::getTemporary()` -- it maintains a side map of its
uses and can RAUW itself. Once the forward declarations are fully
resolved RAUW support is dropped on the ground. This means that
uniquing collisions on changing operands cause nodes to become
"distinct". (This already happened fairly commonly, whenever an
operand went to null.)
If you're constructing complex (non self-reference) `MDNode` cycles,
you need to call `MDNode::resolveCycles()` on each node (or on a
top-level node that somehow references all of the nodes). Also,
don't do that. Metadata cycles (and the RAUW machinery needed to
construct them) are expensive.
- An `MDNode` can only refer to a `Constant` through a bridge called
`ConstantAsMetadata` (one of the subclasses of `ValueAsMetadata`).
As a side effect, accessing an operand of an `MDNode` that is known
to be, e.g., `ConstantInt`, takes three steps: first, cast from
`Metadata` to `ConstantAsMetadata`; second, extract the `Constant`;
third, cast down to `ConstantInt`.
The eventual goal is to introduce `MDInt`/`MDFloat`/etc. and have
metadata schema owners transition away from using `Constant`s when
the type isn't important (and they don't care about referring to
`GlobalValue`s).
In the meantime, I've added transitional API to the `mdconst`
namespace that matches semantics with the old code, in order to
avoid adding the error-prone three-step equivalent to every call
site. If your old code was:
MDNode *N = foo();
bar(isa <ConstantInt>(N->getOperand(0)));
baz(cast <ConstantInt>(N->getOperand(1)));
bak(cast_or_null <ConstantInt>(N->getOperand(2)));
bat(dyn_cast <ConstantInt>(N->getOperand(3)));
bay(dyn_cast_or_null<ConstantInt>(N->getOperand(4)));
you can trivially match its semantics with:
MDNode *N = foo();
bar(mdconst::hasa <ConstantInt>(N->getOperand(0)));
baz(mdconst::extract <ConstantInt>(N->getOperand(1)));
bak(mdconst::extract_or_null <ConstantInt>(N->getOperand(2)));
bat(mdconst::dyn_extract <ConstantInt>(N->getOperand(3)));
bay(mdconst::dyn_extract_or_null<ConstantInt>(N->getOperand(4)));
and when you transition your metadata schema to `MDInt`:
MDNode *N = foo();
bar(isa <MDInt>(N->getOperand(0)));
baz(cast <MDInt>(N->getOperand(1)));
bak(cast_or_null <MDInt>(N->getOperand(2)));
bat(dyn_cast <MDInt>(N->getOperand(3)));
bay(dyn_cast_or_null<MDInt>(N->getOperand(4)));
- A `CallInst` -- specifically, intrinsic instructions -- can refer to
metadata through a bridge called `MetadataAsValue`. This is a
subclass of `Value` where `getType()->isMetadataTy()`.
`MetadataAsValue` is the *only* class that can legally refer to a
`LocalAsMetadata`, which is a bridged form of non-`Constant` values
like `Argument` and `Instruction`. It can also refer to any other
`Metadata` subclass.
(I'll break all your testcases in a follow-up commit, when I propagate
this change to assembly.)
llvm-svn: 223802
2014-12-09 19:38:53 +01:00
|
|
|
ConstantInt *Weight =
|
2020-06-02 05:55:27 +02:00
|
|
|
mdconst::dyn_extract<ConstantInt>(WeightsNode->getOperand(I));
|
2011-10-19 12:32:19 +02:00
|
|
|
if (!Weight)
|
|
|
|
return false;
|
2020-10-31 08:15:46 +01:00
|
|
|
assert(Weight->getValue().getActiveBits() <= 32 &&
|
|
|
|
"Too many bits for uint32_t");
|
|
|
|
Weights.push_back(Weight->getZExtValue());
|
|
|
|
WeightSum += Weights.back();
|
2020-06-02 05:55:27 +02:00
|
|
|
if (PostDominatedByUnreachable.count(TI->getSuccessor(I - 1)))
|
|
|
|
UnreachableIdxs.push_back(I - 1);
|
2017-04-17 06:33:04 +02:00
|
|
|
else
|
2020-06-02 05:55:27 +02:00
|
|
|
ReachableIdxs.push_back(I - 1);
|
2011-10-19 12:32:19 +02:00
|
|
|
}
|
|
|
|
assert(Weights.size() == TI->getNumSuccessors() && "Checked above");
|
2015-05-07 19:22:06 +02:00
|
|
|
|
2020-10-31 08:15:46 +01:00
|
|
|
// If the sum of weights does not fit in 32 bits, scale every weight down
|
|
|
|
// accordingly.
|
|
|
|
uint64_t ScalingFactor =
|
|
|
|
(WeightSum > UINT32_MAX) ? WeightSum / UINT32_MAX + 1 : 1;
|
|
|
|
|
|
|
|
if (ScalingFactor > 1) {
|
|
|
|
WeightSum = 0;
|
|
|
|
for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) {
|
|
|
|
Weights[I] /= ScalingFactor;
|
|
|
|
WeightSum += Weights[I];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert(WeightSum <= UINT32_MAX &&
|
|
|
|
"Expected weights to scale down to 32 bits");
|
2015-12-23 00:45:55 +01:00
|
|
|
|
2017-04-17 06:33:04 +02:00
|
|
|
if (WeightSum == 0 || ReachableIdxs.size() == 0) {
|
2020-06-02 05:55:27 +02:00
|
|
|
for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I)
|
|
|
|
Weights[I] = 1;
|
2017-04-17 06:33:04 +02:00
|
|
|
WeightSum = TI->getNumSuccessors();
|
2015-12-23 00:45:55 +01:00
|
|
|
}
|
2015-12-22 19:56:14 +01:00
|
|
|
|
2017-04-17 06:33:04 +02:00
|
|
|
// Set the probability.
|
|
|
|
SmallVector<BranchProbability, 2> BP;
|
2020-06-02 05:55:27 +02:00
|
|
|
for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I)
|
2020-10-31 08:15:46 +01:00
|
|
|
BP.push_back({ Weights[I], static_cast<uint32_t>(WeightSum) });
|
2017-04-17 06:33:04 +02:00
|
|
|
|
|
|
|
// Examine the metadata against unreachable heuristic.
|
|
|
|
// If the unreachable heuristic is more strong then we use it for this edge.
|
2020-06-02 06:28:12 +02:00
|
|
|
if (UnreachableIdxs.size() == 0 || ReachableIdxs.size() == 0) {
|
|
|
|
setEdgeProbability(BB, BP);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto UnreachableProb = UR_TAKEN_PROB;
|
|
|
|
for (auto I : UnreachableIdxs)
|
|
|
|
if (UnreachableProb < BP[I]) {
|
|
|
|
BP[I] = UnreachableProb;
|
|
|
|
}
|
2017-04-17 06:33:04 +02:00
|
|
|
|
2020-06-02 06:28:12 +02:00
|
|
|
// Sum of all edge probabilities must be 1.0. If we modified the probability
|
|
|
|
// of some edges then we must distribute the introduced difference over the
|
|
|
|
// reachable blocks.
|
|
|
|
//
|
|
|
|
// Proportional distribution: the relation between probabilities of the
|
|
|
|
// reachable edges is kept unchanged. That is for any reachable edges i and j:
|
|
|
|
// newBP[i] / newBP[j] == oldBP[i] / oldBP[j] =>
|
|
|
|
// newBP[i] / oldBP[i] == newBP[j] / oldBP[j] == K
|
|
|
|
// Where K is independent of i,j.
|
|
|
|
// newBP[i] == oldBP[i] * K
|
|
|
|
// We need to find K.
|
|
|
|
// Make sum of all reachables of the left and right parts:
|
|
|
|
// sum_of_reachable(newBP) == K * sum_of_reachable(oldBP)
|
|
|
|
// Sum of newBP must be equal to 1.0:
|
|
|
|
// sum_of_reachable(newBP) + sum_of_unreachable(newBP) == 1.0 =>
|
|
|
|
// sum_of_reachable(newBP) = 1.0 - sum_of_unreachable(newBP)
|
|
|
|
// Where sum_of_unreachable(newBP) is what has been just changed.
|
|
|
|
// Finally:
|
|
|
|
// K == sum_of_reachable(newBP) / sum_of_reachable(oldBP) =>
|
|
|
|
// K == (1.0 - sum_of_unreachable(newBP)) / sum_of_reachable(oldBP)
|
|
|
|
BranchProbability NewUnreachableSum = BranchProbability::getZero();
|
|
|
|
for (auto I : UnreachableIdxs)
|
|
|
|
NewUnreachableSum += BP[I];
|
|
|
|
|
|
|
|
BranchProbability NewReachableSum =
|
|
|
|
BranchProbability::getOne() - NewUnreachableSum;
|
|
|
|
|
|
|
|
BranchProbability OldReachableSum = BranchProbability::getZero();
|
|
|
|
for (auto I : ReachableIdxs)
|
|
|
|
OldReachableSum += BP[I];
|
|
|
|
|
|
|
|
if (OldReachableSum != NewReachableSum) { // Anything to dsitribute?
|
|
|
|
if (OldReachableSum.isZero()) {
|
|
|
|
// If all oldBP[i] are zeroes then the proportional distribution results
|
|
|
|
// in all zero probabilities and the error stays big. In this case we
|
|
|
|
// evenly spread NewReachableSum over the reachable edges.
|
|
|
|
BranchProbability PerEdge = NewReachableSum / ReachableIdxs.size();
|
2020-06-02 05:55:27 +02:00
|
|
|
for (auto I : ReachableIdxs)
|
2020-06-02 06:28:12 +02:00
|
|
|
BP[I] = PerEdge;
|
|
|
|
} else {
|
|
|
|
for (auto I : ReachableIdxs) {
|
|
|
|
// We use uint64_t to avoid double rounding error of the following
|
|
|
|
// calculation: BP[i] = BP[i] * NewReachableSum / OldReachableSum
|
|
|
|
// The formula is taken from the private constructor
|
|
|
|
// BranchProbability(uint32_t Numerator, uint32_t Denominator)
|
|
|
|
uint64_t Mul = static_cast<uint64_t>(NewReachableSum.getNumerator()) *
|
|
|
|
BP[I].getNumerator();
|
|
|
|
uint32_t Div = static_cast<uint32_t>(
|
|
|
|
divideNearest(Mul, OldReachableSum.getNumerator()));
|
|
|
|
BP[I] = BranchProbability::getRaw(Div);
|
|
|
|
}
|
|
|
|
}
|
2017-04-17 06:33:04 +02:00
|
|
|
}
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(BB, BP);
|
2017-04-17 06:33:04 +02:00
|
|
|
|
2011-10-19 12:30:30 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Calculate edge weights for edges leading to cold blocks.
|
2013-05-24 14:26:52 +02:00
|
|
|
///
|
|
|
|
/// A cold block is one post-dominated by a block with a call to a
|
|
|
|
/// cold function. Those edges are unlikely to be taken, so we give
|
|
|
|
/// them relatively low weight.
|
|
|
|
///
|
|
|
|
/// Return true if we could compute the weights for cold edges.
|
|
|
|
/// Return false, otherwise.
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcColdCallHeuristics(const BasicBlock *BB) {
|
2018-10-15 12:04:59 +02:00
|
|
|
const Instruction *TI = BB->getTerminator();
|
2018-06-08 15:03:21 +02:00
|
|
|
(void) TI;
|
2017-04-17 08:39:47 +02:00
|
|
|
assert(TI->getNumSuccessors() > 1 && "expected more than one successor!");
|
2018-06-08 15:03:21 +02:00
|
|
|
assert(!isa<InvokeInst>(TI) &&
|
|
|
|
"Invokes should have already been handled by calcInvokeHeuristics");
|
2013-05-24 14:26:52 +02:00
|
|
|
|
|
|
|
// Determine which successors are post-dominated by a cold block.
|
|
|
|
SmallVector<unsigned, 4> ColdEdges;
|
|
|
|
SmallVector<unsigned, 4> NormalEdges;
|
2020-03-10 19:33:02 +01:00
|
|
|
for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I)
|
2013-05-24 14:26:52 +02:00
|
|
|
if (PostDominatedByColdCall.count(*I))
|
|
|
|
ColdEdges.push_back(I.getSuccessorIndex());
|
|
|
|
else
|
|
|
|
NormalEdges.push_back(I.getSuccessorIndex());
|
|
|
|
|
2017-04-17 08:39:47 +02:00
|
|
|
// Skip probabilities if no cold edges.
|
|
|
|
if (ColdEdges.empty())
|
2013-05-24 14:26:52 +02:00
|
|
|
return false;
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
SmallVector<BranchProbability, 4> EdgeProbabilities(
|
|
|
|
BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown());
|
2015-12-22 19:56:14 +01:00
|
|
|
if (NormalEdges.empty()) {
|
|
|
|
BranchProbability Prob(1, ColdEdges.size());
|
|
|
|
for (unsigned SuccIdx : ColdEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = Prob;
|
|
|
|
setEdgeProbability(BB, EdgeProbabilities);
|
2013-05-24 14:26:52 +02:00
|
|
|
return true;
|
2015-12-22 19:56:14 +01:00
|
|
|
}
|
|
|
|
|
2016-12-17 02:02:08 +01:00
|
|
|
auto ColdProb = BranchProbability::getBranchProbability(
|
|
|
|
CC_TAKEN_WEIGHT,
|
|
|
|
(CC_TAKEN_WEIGHT + CC_NONTAKEN_WEIGHT) * uint64_t(ColdEdges.size()));
|
|
|
|
auto NormalProb = BranchProbability::getBranchProbability(
|
|
|
|
CC_NONTAKEN_WEIGHT,
|
|
|
|
(CC_TAKEN_WEIGHT + CC_NONTAKEN_WEIGHT) * uint64_t(NormalEdges.size()));
|
2015-12-22 19:56:14 +01:00
|
|
|
|
|
|
|
for (unsigned SuccIdx : ColdEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = ColdProb;
|
2015-12-22 19:56:14 +01:00
|
|
|
for (unsigned SuccIdx : NormalEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = NormalProb;
|
2013-05-24 14:26:52 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(BB, EdgeProbabilities);
|
2013-05-24 14:26:52 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-03-02 19:57:02 +01:00
|
|
|
// Calculate Edge Weights using "Pointer Heuristics". Predict a comparison
|
2011-06-04 03:16:30 +02:00
|
|
|
// between two pointer or pointer and NULL will fail.
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcPointerHeuristics(const BasicBlock *BB) {
|
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
2011-06-04 03:16:30 +02:00
|
|
|
if (!BI || !BI->isConditional())
|
2011-07-28 23:45:07 +02:00
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
Value *Cond = BI->getCondition();
|
|
|
|
ICmpInst *CI = dyn_cast<ICmpInst>(Cond);
|
2011-07-15 22:51:06 +02:00
|
|
|
if (!CI || !CI->isEquality())
|
2011-07-28 23:45:07 +02:00
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
Value *LHS = CI->getOperand(0);
|
|
|
|
|
|
|
|
if (!LHS->getType()->isPointerTy())
|
2011-07-28 23:45:07 +02:00
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2011-06-04 04:07:10 +02:00
|
|
|
assert(CI->getOperand(1)->getType()->isPointerTy());
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
BranchProbability TakenProb(PH_TAKEN_WEIGHT,
|
|
|
|
PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT);
|
|
|
|
BranchProbability UntakenProb(PH_NONTAKEN_WEIGHT,
|
|
|
|
PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT);
|
|
|
|
|
2011-06-04 03:16:30 +02:00
|
|
|
// p != 0 -> isProb = true
|
|
|
|
// p == 0 -> isProb = false
|
|
|
|
// p != q -> isProb = true
|
|
|
|
// p == q -> isProb = false;
|
2011-07-15 22:51:06 +02:00
|
|
|
bool isProb = CI->getPredicate() == ICmpInst::ICMP_NE;
|
2011-06-04 03:16:30 +02:00
|
|
|
if (!isProb)
|
2020-05-21 06:49:11 +02:00
|
|
|
std::swap(TakenProb, UntakenProb);
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(
|
|
|
|
BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb}));
|
2011-07-28 23:45:07 +02:00
|
|
|
return true;
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2018-02-23 18:17:31 +01:00
|
|
|
// Compute the unlikely successors to the block BB in the loop L, specifically
|
|
|
|
// those that are unlikely because this is a loop, and add them to the
|
|
|
|
// UnlikelyBlocks set.
|
|
|
|
static void
|
|
|
|
computeUnlikelySuccessors(const BasicBlock *BB, Loop *L,
|
|
|
|
SmallPtrSetImpl<const BasicBlock*> &UnlikelyBlocks) {
|
|
|
|
// Sometimes in a loop we have a branch whose condition is made false by
|
|
|
|
// taking it. This is typically something like
|
|
|
|
// int n = 0;
|
|
|
|
// while (...) {
|
|
|
|
// if (++n >= MAX) {
|
|
|
|
// n = 0;
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
// In this sort of situation taking the branch means that at the very least it
|
|
|
|
// won't be taken again in the next iteration of the loop, so we should
|
|
|
|
// consider it less likely than a typical branch.
|
|
|
|
//
|
|
|
|
// We detect this by looking back through the graph of PHI nodes that sets the
|
|
|
|
// value that the condition depends on, and seeing if we can reach a successor
|
|
|
|
// block which can be determined to make the condition false.
|
|
|
|
//
|
|
|
|
// FIXME: We currently consider unlikely blocks to be half as likely as other
|
|
|
|
// blocks, but if we consider the example above the likelyhood is actually
|
|
|
|
// 1/MAX. We could therefore be more precise in how unlikely we consider
|
|
|
|
// blocks to be, but it would require more careful examination of the form
|
|
|
|
// of the comparison expression.
|
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
|
|
|
if (!BI || !BI->isConditional())
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Check if the branch is based on an instruction compared with a constant
|
|
|
|
CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition());
|
|
|
|
if (!CI || !isa<Instruction>(CI->getOperand(0)) ||
|
|
|
|
!isa<Constant>(CI->getOperand(1)))
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Either the instruction must be a PHI, or a chain of operations involving
|
|
|
|
// constants that ends in a PHI which we can then collapse into a single value
|
|
|
|
// if the PHI value is known.
|
|
|
|
Instruction *CmpLHS = dyn_cast<Instruction>(CI->getOperand(0));
|
|
|
|
PHINode *CmpPHI = dyn_cast<PHINode>(CmpLHS);
|
|
|
|
Constant *CmpConst = dyn_cast<Constant>(CI->getOperand(1));
|
|
|
|
// Collect the instructions until we hit a PHI
|
2018-06-15 23:06:43 +02:00
|
|
|
SmallVector<BinaryOperator *, 1> InstChain;
|
2018-02-23 18:17:31 +01:00
|
|
|
while (!CmpPHI && CmpLHS && isa<BinaryOperator>(CmpLHS) &&
|
|
|
|
isa<Constant>(CmpLHS->getOperand(1))) {
|
|
|
|
// Stop if the chain extends outside of the loop
|
|
|
|
if (!L->contains(CmpLHS))
|
|
|
|
return;
|
2018-06-15 23:06:43 +02:00
|
|
|
InstChain.push_back(cast<BinaryOperator>(CmpLHS));
|
2018-02-23 18:17:31 +01:00
|
|
|
CmpLHS = dyn_cast<Instruction>(CmpLHS->getOperand(0));
|
|
|
|
if (CmpLHS)
|
|
|
|
CmpPHI = dyn_cast<PHINode>(CmpLHS);
|
|
|
|
}
|
|
|
|
if (!CmpPHI || !L->contains(CmpPHI))
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Trace the phi node to find all values that come from successors of BB
|
|
|
|
SmallPtrSet<PHINode*, 8> VisitedInsts;
|
|
|
|
SmallVector<PHINode*, 8> WorkList;
|
|
|
|
WorkList.push_back(CmpPHI);
|
|
|
|
VisitedInsts.insert(CmpPHI);
|
|
|
|
while (!WorkList.empty()) {
|
|
|
|
PHINode *P = WorkList.back();
|
|
|
|
WorkList.pop_back();
|
|
|
|
for (BasicBlock *B : P->blocks()) {
|
|
|
|
// Skip blocks that aren't part of the loop
|
|
|
|
if (!L->contains(B))
|
|
|
|
continue;
|
|
|
|
Value *V = P->getIncomingValueForBlock(B);
|
|
|
|
// If the source is a PHI add it to the work list if we haven't
|
|
|
|
// already visited it.
|
|
|
|
if (PHINode *PN = dyn_cast<PHINode>(V)) {
|
|
|
|
if (VisitedInsts.insert(PN).second)
|
|
|
|
WorkList.push_back(PN);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// If this incoming value is a constant and B is a successor of BB, then
|
|
|
|
// we can constant-evaluate the compare to see if it makes the branch be
|
|
|
|
// taken or not.
|
|
|
|
Constant *CmpLHSConst = dyn_cast<Constant>(V);
|
2020-08-02 06:49:38 +02:00
|
|
|
if (!CmpLHSConst || !llvm::is_contained(successors(BB), B))
|
2018-02-23 18:17:31 +01:00
|
|
|
continue;
|
|
|
|
// First collapse InstChain
|
2018-06-15 23:06:43 +02:00
|
|
|
for (Instruction *I : llvm::reverse(InstChain)) {
|
2018-02-23 18:17:31 +01:00
|
|
|
CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst,
|
2018-06-15 23:06:43 +02:00
|
|
|
cast<Constant>(I->getOperand(1)), true);
|
2018-02-23 18:17:31 +01:00
|
|
|
if (!CmpLHSConst)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (!CmpLHSConst)
|
|
|
|
continue;
|
|
|
|
// Now constant-evaluate the compare
|
|
|
|
Constant *Result = ConstantExpr::getCompare(CI->getPredicate(),
|
|
|
|
CmpLHSConst, CmpConst, true);
|
|
|
|
// If the result means we don't branch to the block then that block is
|
|
|
|
// unlikely.
|
|
|
|
if (Result &&
|
|
|
|
((Result->isZeroValue() && B == BI->getSuccessor(0)) ||
|
|
|
|
(Result->isOneValue() && B == BI->getSuccessor(1))))
|
|
|
|
UnlikelyBlocks.insert(B);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-06-04 03:16:30 +02:00
|
|
|
// Calculate Edge Weights using "Loop Branch Heuristics". Predict backedges
|
|
|
|
// as taken, exiting edges as not-taken.
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcLoopBranchHeuristics(const BasicBlock *BB,
|
2020-07-24 13:57:10 +02:00
|
|
|
const LoopInfo &LI) {
|
2020-07-29 14:19:00 +02:00
|
|
|
LoopBlock LB(BB, LI, *SccI.get());
|
|
|
|
if (!LB.belongsToLoop())
|
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2018-02-23 18:17:31 +01:00
|
|
|
SmallPtrSet<const BasicBlock*, 8> UnlikelyBlocks;
|
2020-07-29 14:19:00 +02:00
|
|
|
if (LB.getLoop())
|
|
|
|
computeUnlikelySuccessors(BB, LB.getLoop(), UnlikelyBlocks);
|
2018-02-23 18:17:31 +01:00
|
|
|
|
2012-08-24 20:14:27 +02:00
|
|
|
SmallVector<unsigned, 8> BackEdges;
|
|
|
|
SmallVector<unsigned, 8> ExitingEdges;
|
|
|
|
SmallVector<unsigned, 8> InEdges; // Edges from header to the loop.
|
2018-02-23 18:17:31 +01:00
|
|
|
SmallVector<unsigned, 8> UnlikelyEdges;
|
2011-07-28 23:33:46 +02:00
|
|
|
|
2020-03-10 19:33:02 +01:00
|
|
|
for (const_succ_iterator I = succ_begin(BB), E = succ_end(BB); I != E; ++I) {
|
2020-07-29 14:19:00 +02:00
|
|
|
LoopBlock SuccLB(*I, LI, *SccI.get());
|
|
|
|
LoopEdge Edge(LB, SuccLB);
|
2020-12-19 04:08:17 +01:00
|
|
|
bool IsUnlikelyEdge = LB.getLoop() && UnlikelyBlocks.contains(*I);
|
2020-07-29 14:19:00 +02:00
|
|
|
|
|
|
|
if (IsUnlikelyEdge)
|
|
|
|
UnlikelyEdges.push_back(I.getSuccessorIndex());
|
|
|
|
else if (isLoopExitingEdge(Edge))
|
|
|
|
ExitingEdges.push_back(I.getSuccessorIndex());
|
|
|
|
else if (isLoopBackEdge(Edge))
|
|
|
|
BackEdges.push_back(I.getSuccessorIndex());
|
|
|
|
else {
|
|
|
|
InEdges.push_back(I.getSuccessorIndex());
|
2017-11-01 16:16:50 +01:00
|
|
|
}
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2018-02-23 18:17:31 +01:00
|
|
|
if (BackEdges.empty() && ExitingEdges.empty() && UnlikelyEdges.empty())
|
2014-04-14 18:56:19 +02:00
|
|
|
return false;
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
// Collect the sum of probabilities of back-edges/in-edges/exiting-edges, and
|
|
|
|
// normalize them so that they sum up to one.
|
|
|
|
unsigned Denom = (BackEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) +
|
|
|
|
(InEdges.empty() ? 0 : LBH_TAKEN_WEIGHT) +
|
2018-02-23 18:17:31 +01:00
|
|
|
(UnlikelyEdges.empty() ? 0 : LBH_UNLIKELY_WEIGHT) +
|
2015-12-22 19:56:14 +01:00
|
|
|
(ExitingEdges.empty() ? 0 : LBH_NONTAKEN_WEIGHT);
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
SmallVector<BranchProbability, 4> EdgeProbabilities(
|
|
|
|
BB->getTerminator()->getNumSuccessors(), BranchProbability::getUnknown());
|
2015-12-22 19:56:14 +01:00
|
|
|
if (uint32_t numBackEdges = BackEdges.size()) {
|
2018-02-23 18:17:31 +01:00
|
|
|
BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom);
|
|
|
|
auto Prob = TakenProb / numBackEdges;
|
2015-12-22 19:56:14 +01:00
|
|
|
for (unsigned SuccIdx : BackEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = Prob;
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2011-07-28 23:33:46 +02:00
|
|
|
if (uint32_t numInEdges = InEdges.size()) {
|
2018-02-23 18:17:31 +01:00
|
|
|
BranchProbability TakenProb = BranchProbability(LBH_TAKEN_WEIGHT, Denom);
|
|
|
|
auto Prob = TakenProb / numInEdges;
|
2015-12-22 19:56:14 +01:00
|
|
|
for (unsigned SuccIdx : InEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = Prob;
|
2011-07-28 23:33:46 +02:00
|
|
|
}
|
|
|
|
|
2011-10-25 11:47:41 +02:00
|
|
|
if (uint32_t numExitingEdges = ExitingEdges.size()) {
|
2018-02-23 18:17:31 +01:00
|
|
|
BranchProbability NotTakenProb = BranchProbability(LBH_NONTAKEN_WEIGHT,
|
|
|
|
Denom);
|
|
|
|
auto Prob = NotTakenProb / numExitingEdges;
|
2015-12-22 19:56:14 +01:00
|
|
|
for (unsigned SuccIdx : ExitingEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = Prob;
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
2011-07-28 23:45:07 +02:00
|
|
|
|
2018-02-23 18:17:31 +01:00
|
|
|
if (uint32_t numUnlikelyEdges = UnlikelyEdges.size()) {
|
|
|
|
BranchProbability UnlikelyProb = BranchProbability(LBH_UNLIKELY_WEIGHT,
|
|
|
|
Denom);
|
|
|
|
auto Prob = UnlikelyProb / numUnlikelyEdges;
|
|
|
|
for (unsigned SuccIdx : UnlikelyEdges)
|
2020-05-21 06:49:11 +02:00
|
|
|
EdgeProbabilities[SuccIdx] = Prob;
|
2018-02-23 18:17:31 +01:00
|
|
|
}
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(BB, EdgeProbabilities);
|
2011-07-28 23:45:07 +02:00
|
|
|
return true;
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2020-08-17 20:42:57 +02:00
|
|
|
bool BranchProbabilityInfo::calcZeroHeuristics(const BasicBlock *BB,
|
2017-06-08 11:44:40 +02:00
|
|
|
const TargetLibraryInfo *TLI) {
|
2016-04-07 23:59:28 +02:00
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
2011-07-31 05:27:24 +02:00
|
|
|
if (!BI || !BI->isConditional())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
Value *Cond = BI->getCondition();
|
|
|
|
ICmpInst *CI = dyn_cast<ICmpInst>(Cond);
|
|
|
|
if (!CI)
|
|
|
|
return false;
|
|
|
|
|
2019-02-15 12:50:21 +01:00
|
|
|
auto GetConstantInt = [](Value *V) {
|
|
|
|
if (auto *I = dyn_cast<BitCastInst>(V))
|
|
|
|
return dyn_cast<ConstantInt>(I->getOperand(0));
|
|
|
|
return dyn_cast<ConstantInt>(V);
|
|
|
|
};
|
|
|
|
|
2011-07-31 05:27:24 +02:00
|
|
|
Value *RHS = CI->getOperand(1);
|
2019-02-15 12:50:21 +01:00
|
|
|
ConstantInt *CV = GetConstantInt(RHS);
|
2020-08-17 20:42:57 +02:00
|
|
|
if (!CV)
|
|
|
|
return false;
|
2011-07-31 05:27:24 +02:00
|
|
|
|
2015-04-15 08:24:07 +02:00
|
|
|
// If the LHS is the result of AND'ing a value with a single bit bitmask,
|
|
|
|
// we don't have information about probabilities.
|
|
|
|
if (Instruction *LHS = dyn_cast<Instruction>(CI->getOperand(0)))
|
|
|
|
if (LHS->getOpcode() == Instruction::And)
|
2020-11-13 20:17:47 +01:00
|
|
|
if (ConstantInt *AndRHS = GetConstantInt(LHS->getOperand(1)))
|
2017-08-04 18:59:29 +02:00
|
|
|
if (AndRHS->getValue().isPowerOf2())
|
2015-04-15 08:24:07 +02:00
|
|
|
return false;
|
|
|
|
|
2017-06-08 11:44:40 +02:00
|
|
|
// Check if the LHS is the return value of a library function
|
|
|
|
LibFunc Func = NumLibFuncs;
|
|
|
|
if (TLI)
|
|
|
|
if (CallInst *Call = dyn_cast<CallInst>(CI->getOperand(0)))
|
|
|
|
if (Function *CalledFn = Call->getCalledFunction())
|
|
|
|
TLI->getLibFunc(*CalledFn, Func);
|
|
|
|
|
2011-07-31 05:27:24 +02:00
|
|
|
bool isProb;
|
2017-06-08 11:44:40 +02:00
|
|
|
if (Func == LibFunc_strcasecmp ||
|
|
|
|
Func == LibFunc_strcmp ||
|
|
|
|
Func == LibFunc_strncasecmp ||
|
|
|
|
Func == LibFunc_strncmp ||
|
2020-08-11 20:42:58 +02:00
|
|
|
Func == LibFunc_memcmp ||
|
|
|
|
Func == LibFunc_bcmp) {
|
2017-06-08 11:44:40 +02:00
|
|
|
// strcmp and similar functions return zero, negative, or positive, if the
|
|
|
|
// first string is equal, less, or greater than the second. We consider it
|
|
|
|
// likely that the strings are not equal, so a comparison with zero is
|
|
|
|
// probably false, but also a comparison with any other number is also
|
|
|
|
// probably false given that what exactly is returned for nonzero values is
|
|
|
|
// not specified. Any kind of comparison other than equality we know
|
|
|
|
// nothing about.
|
|
|
|
switch (CI->getPredicate()) {
|
|
|
|
case CmpInst::ICMP_EQ:
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_NE:
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else if (CV->isZero()) {
|
2011-09-05 01:53:04 +02:00
|
|
|
switch (CI->getPredicate()) {
|
|
|
|
case CmpInst::ICMP_EQ:
|
|
|
|
// X == 0 -> Unlikely
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_NE:
|
|
|
|
// X != 0 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_SLT:
|
|
|
|
// X < 0 -> Unlikely
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_SGT:
|
|
|
|
// X > 0 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else if (CV->isOne() && CI->getPredicate() == CmpInst::ICMP_SLT) {
|
|
|
|
// InstCombine canonicalizes X <= 0 into X < 1.
|
|
|
|
// X <= 0 -> Unlikely
|
2011-07-31 06:47:20 +02:00
|
|
|
isProb = false;
|
2017-07-06 20:39:47 +02:00
|
|
|
} else if (CV->isMinusOne()) {
|
2013-11-01 11:58:22 +01:00
|
|
|
switch (CI->getPredicate()) {
|
|
|
|
case CmpInst::ICMP_EQ:
|
|
|
|
// X == -1 -> Unlikely
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_NE:
|
|
|
|
// X != -1 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_SGT:
|
|
|
|
// InstCombine canonicalizes X >= 0 into X > -1.
|
|
|
|
// X >= 0 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
2011-09-05 01:53:04 +02:00
|
|
|
} else {
|
2011-07-31 05:27:24 +02:00
|
|
|
return false;
|
2011-09-05 01:53:04 +02:00
|
|
|
}
|
2011-07-31 05:27:24 +02:00
|
|
|
|
2020-08-17 20:42:57 +02:00
|
|
|
BranchProbability TakenProb(ZH_TAKEN_WEIGHT,
|
|
|
|
ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT);
|
|
|
|
BranchProbability UntakenProb(ZH_NONTAKEN_WEIGHT,
|
|
|
|
ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT);
|
2020-05-21 06:49:11 +02:00
|
|
|
if (!isProb)
|
|
|
|
std::swap(TakenProb, UntakenProb);
|
|
|
|
|
|
|
|
setEdgeProbability(
|
|
|
|
BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb}));
|
2011-07-31 05:27:24 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcFloatingPointHeuristics(const BasicBlock *BB) {
|
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
2011-10-21 22:12:47 +02:00
|
|
|
if (!BI || !BI->isConditional())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
Value *Cond = BI->getCondition();
|
|
|
|
FCmpInst *FCmp = dyn_cast<FCmpInst>(Cond);
|
2011-10-21 23:13:47 +02:00
|
|
|
if (!FCmp)
|
2011-10-21 22:12:47 +02:00
|
|
|
return false;
|
|
|
|
|
2019-09-10 19:25:11 +02:00
|
|
|
uint32_t TakenWeight = FPH_TAKEN_WEIGHT;
|
|
|
|
uint32_t NontakenWeight = FPH_NONTAKEN_WEIGHT;
|
2011-10-21 23:13:47 +02:00
|
|
|
bool isProb;
|
|
|
|
if (FCmp->isEquality()) {
|
|
|
|
// f1 == f2 -> Unlikely
|
|
|
|
// f1 != f2 -> Likely
|
|
|
|
isProb = !FCmp->isTrueWhenEqual();
|
|
|
|
} else if (FCmp->getPredicate() == FCmpInst::FCMP_ORD) {
|
|
|
|
// !isnan -> Likely
|
|
|
|
isProb = true;
|
2019-09-10 19:25:11 +02:00
|
|
|
TakenWeight = FPH_ORD_WEIGHT;
|
|
|
|
NontakenWeight = FPH_UNO_WEIGHT;
|
2011-10-21 23:13:47 +02:00
|
|
|
} else if (FCmp->getPredicate() == FCmpInst::FCMP_UNO) {
|
|
|
|
// isnan -> Unlikely
|
|
|
|
isProb = false;
|
2019-09-10 19:25:11 +02:00
|
|
|
TakenWeight = FPH_ORD_WEIGHT;
|
|
|
|
NontakenWeight = FPH_UNO_WEIGHT;
|
2011-10-21 23:13:47 +02:00
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
BranchProbability TakenProb(TakenWeight, TakenWeight + NontakenWeight);
|
|
|
|
BranchProbability UntakenProb(NontakenWeight, TakenWeight + NontakenWeight);
|
2011-10-21 23:13:47 +02:00
|
|
|
if (!isProb)
|
2020-05-21 06:49:11 +02:00
|
|
|
std::swap(TakenProb, UntakenProb);
|
2011-10-21 22:12:47 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(
|
|
|
|
BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb}));
|
2011-10-21 22:12:47 +02:00
|
|
|
return true;
|
|
|
|
}
|
2011-07-31 05:27:24 +02:00
|
|
|
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcInvokeHeuristics(const BasicBlock *BB) {
|
|
|
|
const InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator());
|
2012-08-15 14:22:35 +02:00
|
|
|
if (!II)
|
|
|
|
return false;
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
BranchProbability TakenProb(IH_TAKEN_WEIGHT,
|
|
|
|
IH_TAKEN_WEIGHT + IH_NONTAKEN_WEIGHT);
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(
|
|
|
|
BB, SmallVector<BranchProbability, 2>({TakenProb, TakenProb.getCompl()}));
|
2012-08-15 14:22:35 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2015-05-28 21:43:06 +02:00
|
|
|
void BranchProbabilityInfo::releaseMemory() {
|
2015-12-22 19:56:14 +01:00
|
|
|
Probs.clear();
|
2020-04-07 21:21:30 +02:00
|
|
|
Handles.clear();
|
2015-05-28 21:43:06 +02:00
|
|
|
}
|
|
|
|
|
2020-01-15 23:02:33 +01:00
|
|
|
bool BranchProbabilityInfo::invalidate(Function &, const PreservedAnalyses &PA,
|
|
|
|
FunctionAnalysisManager::Invalidator &) {
|
|
|
|
// Check whether the analysis, all analyses on functions, or the function's
|
|
|
|
// CFG have been preserved.
|
|
|
|
auto PAC = PA.getChecker<BranchProbabilityAnalysis>();
|
|
|
|
return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>() ||
|
|
|
|
PAC.preservedSet<CFGAnalyses>());
|
|
|
|
}
|
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
void BranchProbabilityInfo::print(raw_ostream &OS) const {
|
2011-10-23 23:21:50 +02:00
|
|
|
OS << "---- Branch Probabilities ----\n";
|
|
|
|
// We print the probabilities from the last function the analysis ran over,
|
|
|
|
// or the function it is currently running over.
|
|
|
|
assert(LastF && "Cannot print prior to running over a function");
|
Analysis: Remove implicit ilist iterator conversions
Remove implicit ilist iterator conversions from LLVMAnalysis.
I came across something really scary in `llvm::isKnownNotFullPoison()`
which relied on `Instruction::getNextNode()` being completely broken
(not surprising, but scary nevertheless). This function is documented
(and coded to) return `nullptr` when it gets to the sentinel, but with
an `ilist_half_node` as a sentinel, the sentinel check looks into some
other memory and we don't recognize we've hit the end.
Rooting out these scary cases is the reason I'm removing the implicit
conversions before doing anything else with `ilist`; I'm not at all
surprised that clients rely on badness.
I found another scary case -- this time, not relying on badness, just
bad (but I guess getting lucky so far) -- in
`ObjectSizeOffsetEvaluator::compute_()`. Here, we save out the
insertion point, do some things, and then restore it. Previously, we
let the iterator auto-convert to `Instruction*`, and then set it back
using the `Instruction*` version:
Instruction *PrevInsertPoint = Builder.GetInsertPoint();
/* Logic that may change insert point */
if (PrevInsertPoint)
Builder.SetInsertPoint(PrevInsertPoint);
The check for `PrevInsertPoint` doesn't protect correctly against bad
accesses. If the insertion point has been set to the end of a basic
block (i.e., `SetInsertPoint(SomeBB)`), then `GetInsertPoint()` returns
an iterator pointing at the list sentinel. The version of
`SetInsertPoint()` that's getting called will then call
`PrevInsertPoint->getParent()`, which explodes horribly. The only
reason this hasn't blown up is that it's fairly unlikely the builder is
adding to the end of the block; usually, we're adding instructions
somewhere before the terminator.
llvm-svn: 249925
2015-10-10 02:53:03 +02:00
|
|
|
for (const auto &BI : *LastF) {
|
2020-03-10 19:33:02 +01:00
|
|
|
for (const_succ_iterator SI = succ_begin(&BI), SE = succ_end(&BI); SI != SE;
|
Analysis: Remove implicit ilist iterator conversions
Remove implicit ilist iterator conversions from LLVMAnalysis.
I came across something really scary in `llvm::isKnownNotFullPoison()`
which relied on `Instruction::getNextNode()` being completely broken
(not surprising, but scary nevertheless). This function is documented
(and coded to) return `nullptr` when it gets to the sentinel, but with
an `ilist_half_node` as a sentinel, the sentinel check looks into some
other memory and we don't recognize we've hit the end.
Rooting out these scary cases is the reason I'm removing the implicit
conversions before doing anything else with `ilist`; I'm not at all
surprised that clients rely on badness.
I found another scary case -- this time, not relying on badness, just
bad (but I guess getting lucky so far) -- in
`ObjectSizeOffsetEvaluator::compute_()`. Here, we save out the
insertion point, do some things, and then restore it. Previously, we
let the iterator auto-convert to `Instruction*`, and then set it back
using the `Instruction*` version:
Instruction *PrevInsertPoint = Builder.GetInsertPoint();
/* Logic that may change insert point */
if (PrevInsertPoint)
Builder.SetInsertPoint(PrevInsertPoint);
The check for `PrevInsertPoint` doesn't protect correctly against bad
accesses. If the insertion point has been set to the end of a basic
block (i.e., `SetInsertPoint(SomeBB)`), then `GetInsertPoint()` returns
an iterator pointing at the list sentinel. The version of
`SetInsertPoint()` that's getting called will then call
`PrevInsertPoint->getParent()`, which explodes horribly. The only
reason this hasn't blown up is that it's fairly unlikely the builder is
adding to the end of the block; usually, we're adding instructions
somewhere before the terminator.
llvm-svn: 249925
2015-10-10 02:53:03 +02:00
|
|
|
++SI) {
|
|
|
|
printEdgeProbability(OS << " ", &BI, *SI);
|
2014-07-21 19:06:51 +02:00
|
|
|
}
|
|
|
|
}
|
2011-10-23 23:21:50 +02:00
|
|
|
}
|
|
|
|
|
2011-07-29 21:30:00 +02:00
|
|
|
bool BranchProbabilityInfo::
|
|
|
|
isEdgeHot(const BasicBlock *Src, const BasicBlock *Dst) const {
|
2011-06-11 03:05:22 +02:00
|
|
|
// Hot probability is at least 4/5 = 80%
|
2011-10-23 13:19:14 +02:00
|
|
|
// FIXME: Compare against a static "hot" BranchProbability.
|
|
|
|
return getEdgeProbability(Src, Dst) > BranchProbability(4, 5);
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2016-04-07 23:59:28 +02:00
|
|
|
const BasicBlock *
|
|
|
|
BranchProbabilityInfo::getHotSucc(const BasicBlock *BB) const {
|
2015-12-22 19:56:14 +01:00
|
|
|
auto MaxProb = BranchProbability::getZero();
|
2016-04-07 23:59:28 +02:00
|
|
|
const BasicBlock *MaxSucc = nullptr;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-11-10 18:00:17 +01:00
|
|
|
for (const auto *Succ : successors(BB)) {
|
2015-12-22 19:56:14 +01:00
|
|
|
auto Prob = getEdgeProbability(BB, Succ);
|
|
|
|
if (Prob > MaxProb) {
|
|
|
|
MaxProb = Prob;
|
2011-06-04 03:16:30 +02:00
|
|
|
MaxSucc = Succ;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-10-23 13:19:14 +02:00
|
|
|
// Hot probability is at least 4/5 = 80%
|
2015-12-22 19:56:14 +01:00
|
|
|
if (MaxProb > BranchProbability(4, 5))
|
2011-06-04 03:16:30 +02:00
|
|
|
return MaxSucc;
|
|
|
|
|
2014-04-15 06:59:12 +02:00
|
|
|
return nullptr;
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
/// Get the raw edge probability for the edge. If can't find it, return a
|
|
|
|
/// default probability 1/N where N is the number of successors. Here an edge is
|
|
|
|
/// specified using PredBlock and an
|
|
|
|
/// index to the successors.
|
|
|
|
BranchProbability
|
|
|
|
BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src,
|
|
|
|
unsigned IndexInSuccessors) const {
|
2020-11-11 04:17:13 +01:00
|
|
|
auto I = Probs.find(std::make_pair(Src, IndexInSuccessors));
|
|
|
|
assert((Probs.end() == Probs.find(std::make_pair(Src, 0))) ==
|
|
|
|
(Probs.end() == I) &&
|
|
|
|
"Probability for I-th successor must always be defined along with the "
|
|
|
|
"probability for the first successor");
|
|
|
|
|
|
|
|
if (I != Probs.end())
|
|
|
|
return I->second;
|
|
|
|
|
|
|
|
return {1, static_cast<uint32_t>(succ_size(Src))};
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
BranchProbability
|
|
|
|
BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src,
|
2020-03-10 19:33:02 +01:00
|
|
|
const_succ_iterator Dst) const {
|
2015-12-22 19:56:14 +01:00
|
|
|
return getEdgeProbability(Src, Dst.getSuccessorIndex());
|
2013-12-14 03:24:25 +01:00
|
|
|
}
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
/// Get the raw edge probability calculated for the block pair. This returns the
|
|
|
|
/// sum of all raw edge probabilities from Src to Dst.
|
|
|
|
BranchProbability
|
|
|
|
BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src,
|
|
|
|
const BasicBlock *Dst) const {
|
2020-11-11 04:17:13 +01:00
|
|
|
if (!Probs.count(std::make_pair(Src, 0)))
|
2020-11-07 07:47:22 +01:00
|
|
|
return BranchProbability(llvm::count(successors(Src), Dst), succ_size(Src));
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
auto Prob = BranchProbability::getZero();
|
2020-03-10 19:33:02 +01:00
|
|
|
for (const_succ_iterator I = succ_begin(Src), E = succ_end(Src); I != E; ++I)
|
2020-11-07 07:47:22 +01:00
|
|
|
if (*I == Dst)
|
2020-11-11 04:17:13 +01:00
|
|
|
Prob += Probs.find(std::make_pair(Src, I.getSuccessorIndex()))->second;
|
2020-11-07 07:47:22 +01:00
|
|
|
|
|
|
|
return Prob;
|
2012-08-24 20:14:27 +02:00
|
|
|
}
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
/// Set the edge probability for all edges at once.
|
|
|
|
void BranchProbabilityInfo::setEdgeProbability(
|
|
|
|
const BasicBlock *Src, const SmallVectorImpl<BranchProbability> &Probs) {
|
|
|
|
assert(Src->getTerminator()->getNumSuccessors() == Probs.size());
|
2020-11-06 06:16:19 +01:00
|
|
|
eraseBlock(Src); // Erase stale data if any.
|
2020-05-21 06:49:11 +02:00
|
|
|
if (Probs.size() == 0)
|
|
|
|
return; // Nothing to set.
|
|
|
|
|
2020-11-06 07:11:08 +01:00
|
|
|
Handles.insert(BasicBlockCallbackVH(Src, this));
|
2020-05-21 06:49:11 +02:00
|
|
|
uint64_t TotalNumerator = 0;
|
|
|
|
for (unsigned SuccIdx = 0; SuccIdx < Probs.size(); ++SuccIdx) {
|
2020-11-11 04:17:13 +01:00
|
|
|
this->Probs[std::make_pair(Src, SuccIdx)] = Probs[SuccIdx];
|
2020-11-06 06:16:19 +01:00
|
|
|
LLVM_DEBUG(dbgs() << "set edge " << Src->getName() << " -> " << SuccIdx
|
|
|
|
<< " successor probability to " << Probs[SuccIdx]
|
|
|
|
<< "\n");
|
2020-05-21 06:49:11 +02:00
|
|
|
TotalNumerator += Probs[SuccIdx].getNumerator();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Because of rounding errors the total probability cannot be checked to be
|
|
|
|
// 1.0 exactly. That is TotalNumerator == BranchProbability::getDenominator.
|
|
|
|
// Instead, every single probability in Probs must be as accurate as possible.
|
|
|
|
// This results in error 1/denominator at most, thus the total absolute error
|
|
|
|
// should be within Probs.size / BranchProbability::getDenominator.
|
|
|
|
assert(TotalNumerator <= BranchProbability::getDenominator() + Probs.size());
|
|
|
|
assert(TotalNumerator >= BranchProbability::getDenominator() - Probs.size());
|
|
|
|
}
|
|
|
|
|
2020-11-06 08:46:34 +01:00
|
|
|
void BranchProbabilityInfo::copyEdgeProbabilities(BasicBlock *Src,
|
|
|
|
BasicBlock *Dst) {
|
|
|
|
eraseBlock(Dst); // Erase stale data if any.
|
|
|
|
unsigned NumSuccessors = Src->getTerminator()->getNumSuccessors();
|
|
|
|
assert(NumSuccessors == Dst->getTerminator()->getNumSuccessors());
|
|
|
|
if (NumSuccessors == 0)
|
|
|
|
return; // Nothing to set.
|
2020-11-11 04:17:13 +01:00
|
|
|
if (this->Probs.find(std::make_pair(Src, 0)) == this->Probs.end())
|
2020-11-06 08:46:34 +01:00
|
|
|
return; // No probability is set for edges from Src. Keep the same for Dst.
|
|
|
|
|
|
|
|
Handles.insert(BasicBlockCallbackVH(Dst, this));
|
2020-11-11 04:17:13 +01:00
|
|
|
for (unsigned SuccIdx = 0; SuccIdx < NumSuccessors; ++SuccIdx) {
|
|
|
|
auto Prob = this->Probs[std::make_pair(Src, SuccIdx)];
|
|
|
|
this->Probs[std::make_pair(Dst, SuccIdx)] = Prob;
|
2020-11-06 08:46:34 +01:00
|
|
|
LLVM_DEBUG(dbgs() << "set edge " << Dst->getName() << " -> " << SuccIdx
|
2020-11-11 04:17:13 +01:00
|
|
|
<< " successor probability to " << Prob << "\n");
|
|
|
|
}
|
2020-11-06 08:46:34 +01:00
|
|
|
}
|
|
|
|
|
2011-06-11 03:05:22 +02:00
|
|
|
raw_ostream &
|
2011-10-23 23:21:50 +02:00
|
|
|
BranchProbabilityInfo::printEdgeProbability(raw_ostream &OS,
|
|
|
|
const BasicBlock *Src,
|
|
|
|
const BasicBlock *Dst) const {
|
2011-06-16 22:22:37 +02:00
|
|
|
const BranchProbability Prob = getEdgeProbability(Src, Dst);
|
2011-11-15 17:27:03 +01:00
|
|
|
OS << "edge " << Src->getName() << " -> " << Dst->getName()
|
2011-06-11 03:05:22 +02:00
|
|
|
<< " probability is " << Prob
|
|
|
|
<< (isEdgeHot(Src, Dst) ? " [HOT edge]\n" : "\n");
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
return OS;
|
|
|
|
}
|
2015-07-16 00:48:29 +02:00
|
|
|
|
2016-07-15 16:31:16 +02:00
|
|
|
void BranchProbabilityInfo::eraseBlock(const BasicBlock *BB) {
|
2020-11-16 07:29:30 +01:00
|
|
|
LLVM_DEBUG(dbgs() << "eraseBlock " << BB->getName() << "\n");
|
|
|
|
|
2020-11-11 04:17:13 +01:00
|
|
|
// Note that we cannot use successors of BB because the terminator of BB may
|
|
|
|
// have changed when eraseBlock is called as a BasicBlockCallbackVH callback.
|
|
|
|
// Instead we remove prob data for the block by iterating successors by their
|
|
|
|
// indices from 0 till the last which exists. There could not be prob data for
|
|
|
|
// a pair (BB, N) if there is no data for (BB, N-1) because the data is always
|
|
|
|
// set for all successors from 0 to M at once by the method
|
|
|
|
// setEdgeProbability().
|
2020-11-06 07:11:08 +01:00
|
|
|
Handles.erase(BasicBlockCallbackVH(BB, this));
|
2020-11-11 04:17:13 +01:00
|
|
|
for (unsigned I = 0;; ++I) {
|
|
|
|
auto MapI = Probs.find(std::make_pair(BB, I));
|
|
|
|
if (MapI == Probs.end()) {
|
|
|
|
assert(Probs.count(std::make_pair(BB, I + 1)) == 0 &&
|
|
|
|
"Must be no more successors");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
Probs.erase(MapI);
|
|
|
|
}
|
2016-07-15 16:31:16 +02:00
|
|
|
}
|
|
|
|
|
2017-06-08 11:44:40 +02:00
|
|
|
void BranchProbabilityInfo::calculate(const Function &F, const LoopInfo &LI,
|
2020-04-28 11:31:20 +02:00
|
|
|
const TargetLibraryInfo *TLI,
|
|
|
|
PostDominatorTree *PDT) {
|
2018-05-14 14:53:11 +02:00
|
|
|
LLVM_DEBUG(dbgs() << "---- Branch Probability Info : " << F.getName()
|
|
|
|
<< " ----\n\n");
|
2015-07-16 00:48:29 +02:00
|
|
|
LastF = &F; // Store the last function we ran on for printing.
|
|
|
|
assert(PostDominatedByUnreachable.empty());
|
|
|
|
assert(PostDominatedByColdCall.empty());
|
|
|
|
|
2020-07-24 13:57:10 +02:00
|
|
|
SccI = std::make_unique<SccInfo>(F);
|
2017-11-01 16:16:50 +01:00
|
|
|
|
2020-04-28 11:31:20 +02:00
|
|
|
std::unique_ptr<PostDominatorTree> PDTPtr;
|
|
|
|
|
|
|
|
if (!PDT) {
|
|
|
|
PDTPtr = std::make_unique<PostDominatorTree>(const_cast<Function &>(F));
|
|
|
|
PDT = PDTPtr.get();
|
|
|
|
}
|
|
|
|
|
|
|
|
computePostDominatedByUnreachable(F, PDT);
|
|
|
|
computePostDominatedByColdCall(F, PDT);
|
2019-12-02 19:15:22 +01:00
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
// Walk the basic blocks in post-order so that we can build up state about
|
|
|
|
// the successors of a block iteratively.
|
|
|
|
for (auto BB : post_order(&F.getEntryBlock())) {
|
2018-05-14 14:53:11 +02:00
|
|
|
LLVM_DEBUG(dbgs() << "Computing probabilities for " << BB->getName()
|
|
|
|
<< "\n");
|
2017-04-17 08:39:47 +02:00
|
|
|
// If there is no at least two successors, no sense to set probability.
|
|
|
|
if (BB->getTerminator()->getNumSuccessors() < 2)
|
|
|
|
continue;
|
2015-07-16 00:48:29 +02:00
|
|
|
if (calcMetadataWeights(BB))
|
|
|
|
continue;
|
2018-06-08 15:03:21 +02:00
|
|
|
if (calcInvokeHeuristics(BB))
|
|
|
|
continue;
|
2017-04-17 06:33:04 +02:00
|
|
|
if (calcUnreachableHeuristics(BB))
|
|
|
|
continue;
|
2015-07-16 00:48:29 +02:00
|
|
|
if (calcColdCallHeuristics(BB))
|
|
|
|
continue;
|
2020-07-24 13:57:10 +02:00
|
|
|
if (calcLoopBranchHeuristics(BB, LI))
|
2015-07-16 00:48:29 +02:00
|
|
|
continue;
|
|
|
|
if (calcPointerHeuristics(BB))
|
|
|
|
continue;
|
2020-08-17 20:42:57 +02:00
|
|
|
if (calcZeroHeuristics(BB, TLI))
|
2015-07-16 00:48:29 +02:00
|
|
|
continue;
|
|
|
|
if (calcFloatingPointHeuristics(BB))
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
PostDominatedByUnreachable.clear();
|
|
|
|
PostDominatedByColdCall.clear();
|
2020-07-28 14:50:40 +02:00
|
|
|
SccI.reset();
|
2017-08-26 02:31:00 +02:00
|
|
|
|
|
|
|
if (PrintBranchProb &&
|
|
|
|
(PrintBranchProbFuncName.empty() ||
|
|
|
|
F.getName().equals(PrintBranchProbFuncName))) {
|
|
|
|
print(dbgs());
|
|
|
|
}
|
2015-07-16 00:48:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfoWrapperPass::getAnalysisUsage(
|
|
|
|
AnalysisUsage &AU) const {
|
2018-05-17 11:05:40 +02:00
|
|
|
// We require DT so it's available when LI is available. The LI updating code
|
|
|
|
// asserts that DT is also present so if we don't make sure that we have DT
|
|
|
|
// here, that assert will trigger.
|
|
|
|
AU.addRequired<DominatorTreeWrapperPass>();
|
2015-07-16 00:48:29 +02:00
|
|
|
AU.addRequired<LoopInfoWrapperPass>();
|
2017-06-08 11:44:40 +02:00
|
|
|
AU.addRequired<TargetLibraryInfoWrapperPass>();
|
2020-04-28 11:31:20 +02:00
|
|
|
AU.addRequired<PostDominatorTreeWrapperPass>();
|
2015-07-16 00:48:29 +02:00
|
|
|
AU.setPreservesAll();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfoWrapperPass::runOnFunction(Function &F) {
|
|
|
|
const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
|
Change TargetLibraryInfo analysis passes to always require Function
Summary:
This is the first change to enable the TLI to be built per-function so
that -fno-builtin* handling can be migrated to use function attributes.
See discussion on D61634 for background. This is an enabler for fixing
handling of these options for LTO, for example.
This change should not affect behavior, as the provided function is not
yet used to build a specifically per-function TLI, but rather enables
that migration.
Most of the changes were very mechanical, e.g. passing a Function to the
legacy analysis pass's getTLI interface, or in Module level cases,
adding a callback. This is similar to the way the per-function TTI
analysis works.
There was one place where we were looking for builtins but not in the
context of a specific function. See FindCXAAtExit in
lib/Transforms/IPO/GlobalOpt.cpp. I'm somewhat concerned my workaround
could provide the wrong behavior in some corner cases. Suggestions
welcome.
Reviewers: chandlerc, hfinkel
Subscribers: arsenm, dschuff, jvesely, nhaehnle, mehdi_amini, javed.absar, sbc100, jgravelle-google, eraman, aheejin, steven_wu, george.burgess.iv, dexonsmith, jfb, asbirlea, gchatelet, llvm-commits
Tags: #llvm
Differential Revision: https://reviews.llvm.org/D66428
llvm-svn: 371284
2019-09-07 05:09:36 +02:00
|
|
|
const TargetLibraryInfo &TLI =
|
|
|
|
getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
|
2020-04-28 11:31:20 +02:00
|
|
|
PostDominatorTree &PDT =
|
|
|
|
getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree();
|
|
|
|
BPI.calculate(F, LI, &TLI, &PDT);
|
2015-07-16 00:48:29 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfoWrapperPass::releaseMemory() { BPI.releaseMemory(); }
|
|
|
|
|
|
|
|
void BranchProbabilityInfoWrapperPass::print(raw_ostream &OS,
|
|
|
|
const Module *) const {
|
|
|
|
BPI.print(OS);
|
|
|
|
}
|
2016-05-05 04:59:57 +02:00
|
|
|
|
2016-11-23 18:53:26 +01:00
|
|
|
AnalysisKey BranchProbabilityAnalysis::Key;
|
2016-05-05 04:59:57 +02:00
|
|
|
BranchProbabilityInfo
|
2016-08-09 02:28:15 +02:00
|
|
|
BranchProbabilityAnalysis::run(Function &F, FunctionAnalysisManager &AM) {
|
2016-05-05 04:59:57 +02:00
|
|
|
BranchProbabilityInfo BPI;
|
2020-04-28 11:31:20 +02:00
|
|
|
BPI.calculate(F, AM.getResult<LoopAnalysis>(F),
|
|
|
|
&AM.getResult<TargetLibraryAnalysis>(F),
|
|
|
|
&AM.getResult<PostDominatorTreeAnalysis>(F));
|
2016-05-05 04:59:57 +02:00
|
|
|
return BPI;
|
|
|
|
}
|
|
|
|
|
|
|
|
PreservedAnalyses
|
2016-08-09 02:28:15 +02:00
|
|
|
BranchProbabilityPrinterPass::run(Function &F, FunctionAnalysisManager &AM) {
|
2016-05-05 04:59:57 +02:00
|
|
|
OS << "Printing analysis results of BPI for function "
|
|
|
|
<< "'" << F.getName() << "':"
|
|
|
|
<< "\n";
|
|
|
|
AM.getResult<BranchProbabilityAnalysis>(F).print(OS);
|
|
|
|
return PreservedAnalyses::all();
|
|
|
|
}
|