2017-07-21 23:37:46 +02:00
|
|
|
//===- BranchProbabilityInfo.cpp - Branch Probability Analysis ------------===//
|
2011-06-04 03:16:30 +02:00
|
|
|
//
|
2019-01-19 09:50:56 +01:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2011-06-04 03:16:30 +02:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// Loops should be simplified before this analysis.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2012-12-03 17:50:05 +01:00
|
|
|
#include "llvm/Analysis/BranchProbabilityInfo.h"
|
|
|
|
#include "llvm/ADT/PostOrderIterator.h"
|
2017-11-01 16:16:50 +01:00
|
|
|
#include "llvm/ADT/SCCIterator.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/ADT/STLExtras.h"
|
|
|
|
#include "llvm/ADT/SmallVector.h"
|
2012-12-03 17:50:05 +01:00
|
|
|
#include "llvm/Analysis/LoopInfo.h"
|
2019-12-02 19:15:22 +01:00
|
|
|
#include "llvm/Analysis/PostDominators.h"
|
2017-06-08 11:44:40 +02:00
|
|
|
#include "llvm/Analysis/TargetLibraryInfo.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/IR/Attributes.h"
|
|
|
|
#include "llvm/IR/BasicBlock.h"
|
2014-03-04 12:45:46 +01:00
|
|
|
#include "llvm/IR/CFG.h"
|
2013-01-02 12:36:10 +01:00
|
|
|
#include "llvm/IR/Constants.h"
|
2018-05-17 11:05:40 +02:00
|
|
|
#include "llvm/IR/Dominators.h"
|
2013-01-02 12:36:10 +01:00
|
|
|
#include "llvm/IR/Function.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/IR/InstrTypes.h"
|
|
|
|
#include "llvm/IR/Instruction.h"
|
2013-01-02 12:36:10 +01:00
|
|
|
#include "llvm/IR/Instructions.h"
|
|
|
|
#include "llvm/IR/LLVMContext.h"
|
|
|
|
#include "llvm/IR/Metadata.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/IR/PassManager.h"
|
|
|
|
#include "llvm/IR/Type.h"
|
|
|
|
#include "llvm/IR/Value.h"
|
Sink all InitializePasses.h includes
This file lists every pass in LLVM, and is included by Pass.h, which is
very popular. Every time we add, remove, or rename a pass in LLVM, it
caused lots of recompilation.
I found this fact by looking at this table, which is sorted by the
number of times a file was changed over the last 100,000 git commits
multiplied by the number of object files that depend on it in the
current checkout:
recompiles touches affected_files header
342380 95 3604 llvm/include/llvm/ADT/STLExtras.h
314730 234 1345 llvm/include/llvm/InitializePasses.h
307036 118 2602 llvm/include/llvm/ADT/APInt.h
213049 59 3611 llvm/include/llvm/Support/MathExtras.h
170422 47 3626 llvm/include/llvm/Support/Compiler.h
162225 45 3605 llvm/include/llvm/ADT/Optional.h
158319 63 2513 llvm/include/llvm/ADT/Triple.h
140322 39 3598 llvm/include/llvm/ADT/StringRef.h
137647 59 2333 llvm/include/llvm/Support/Error.h
131619 73 1803 llvm/include/llvm/Support/FileSystem.h
Before this change, touching InitializePasses.h would cause 1345 files
to recompile. After this change, touching it only causes 550 compiles in
an incremental rebuild.
Reviewers: bkramer, asbirlea, bollu, jdoerfert
Differential Revision: https://reviews.llvm.org/D70211
2019-11-13 22:15:01 +01:00
|
|
|
#include "llvm/InitializePasses.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include "llvm/Pass.h"
|
|
|
|
#include "llvm/Support/BranchProbability.h"
|
|
|
|
#include "llvm/Support/Casting.h"
|
2019-11-15 00:15:48 +01:00
|
|
|
#include "llvm/Support/CommandLine.h"
|
2011-06-11 03:05:22 +02:00
|
|
|
#include "llvm/Support/Debug.h"
|
2015-03-23 19:07:13 +01:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2017-07-21 23:37:46 +02:00
|
|
|
#include <cassert>
|
|
|
|
#include <cstdint>
|
|
|
|
#include <iterator>
|
|
|
|
#include <utility>
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
using namespace llvm;
|
|
|
|
|
2014-04-22 04:48:03 +02:00
|
|
|
#define DEBUG_TYPE "branch-prob"
|
|
|
|
|
2017-08-26 02:31:00 +02:00
|
|
|
static cl::opt<bool> PrintBranchProb(
|
|
|
|
"print-bpi", cl::init(false), cl::Hidden,
|
|
|
|
cl::desc("Print the branch probability info."));
|
|
|
|
|
|
|
|
cl::opt<std::string> PrintBranchProbFuncName(
|
|
|
|
"print-bpi-func-name", cl::Hidden,
|
|
|
|
cl::desc("The option to specify the name of the function "
|
|
|
|
"whose branch probability info is printed."));
|
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
INITIALIZE_PASS_BEGIN(BranchProbabilityInfoWrapperPass, "branch-prob",
|
2011-06-04 03:16:30 +02:00
|
|
|
"Branch Probability Analysis", false, true)
|
2015-01-17 15:16:18 +01:00
|
|
|
INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass)
|
2017-06-08 11:44:40 +02:00
|
|
|
INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
|
2020-06-18 11:20:55 +02:00
|
|
|
INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
|
2020-04-28 11:31:20 +02:00
|
|
|
INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass)
|
2015-07-16 00:48:29 +02:00
|
|
|
INITIALIZE_PASS_END(BranchProbabilityInfoWrapperPass, "branch-prob",
|
2011-06-04 03:16:30 +02:00
|
|
|
"Branch Probability Analysis", false, true)
|
|
|
|
|
Sink all InitializePasses.h includes
This file lists every pass in LLVM, and is included by Pass.h, which is
very popular. Every time we add, remove, or rename a pass in LLVM, it
caused lots of recompilation.
I found this fact by looking at this table, which is sorted by the
number of times a file was changed over the last 100,000 git commits
multiplied by the number of object files that depend on it in the
current checkout:
recompiles touches affected_files header
342380 95 3604 llvm/include/llvm/ADT/STLExtras.h
314730 234 1345 llvm/include/llvm/InitializePasses.h
307036 118 2602 llvm/include/llvm/ADT/APInt.h
213049 59 3611 llvm/include/llvm/Support/MathExtras.h
170422 47 3626 llvm/include/llvm/Support/Compiler.h
162225 45 3605 llvm/include/llvm/ADT/Optional.h
158319 63 2513 llvm/include/llvm/ADT/Triple.h
140322 39 3598 llvm/include/llvm/ADT/StringRef.h
137647 59 2333 llvm/include/llvm/Support/Error.h
131619 73 1803 llvm/include/llvm/Support/FileSystem.h
Before this change, touching InitializePasses.h would cause 1345 files
to recompile. After this change, touching it only causes 550 compiles in
an incremental rebuild.
Reviewers: bkramer, asbirlea, bollu, jdoerfert
Differential Revision: https://reviews.llvm.org/D70211
2019-11-13 22:15:01 +01:00
|
|
|
BranchProbabilityInfoWrapperPass::BranchProbabilityInfoWrapperPass()
|
|
|
|
: FunctionPass(ID) {
|
|
|
|
initializeBranchProbabilityInfoWrapperPassPass(
|
|
|
|
*PassRegistry::getPassRegistry());
|
|
|
|
}
|
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
char BranchProbabilityInfoWrapperPass::ID = 0;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2011-10-24 03:40:45 +02:00
|
|
|
// Weights are for internal use only. They are used by heuristics to help to
|
|
|
|
// estimate edges' probability. Example:
|
|
|
|
//
|
|
|
|
// Using "Loop Branch Heuristics" we predict weights of edges for the
|
|
|
|
// block BB2.
|
|
|
|
// ...
|
|
|
|
// |
|
|
|
|
// V
|
|
|
|
// BB1<-+
|
|
|
|
// | |
|
|
|
|
// | | (Weight = 124)
|
|
|
|
// V |
|
|
|
|
// BB2--+
|
|
|
|
// |
|
|
|
|
// | (Weight = 4)
|
|
|
|
// V
|
|
|
|
// BB3
|
|
|
|
//
|
|
|
|
// Probability of the edge BB2->BB1 = 124 / (124 + 4) = 0.96875
|
|
|
|
// Probability of the edge BB2->BB3 = 4 / (124 + 4) = 0.03125
|
|
|
|
static const uint32_t LBH_TAKEN_WEIGHT = 124;
|
|
|
|
static const uint32_t LBH_NONTAKEN_WEIGHT = 4;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2018-05-01 17:54:18 +02:00
|
|
|
/// Unreachable-terminating branch taken probability.
|
2011-10-24 14:01:08 +02:00
|
|
|
///
|
2017-05-18 08:11:56 +02:00
|
|
|
/// This is the probability for a branch being taken to a block that terminates
|
2011-10-24 14:01:08 +02:00
|
|
|
/// (eventually) in unreachable. These are predicted as unlikely as possible.
|
2020-06-02 06:28:12 +02:00
|
|
|
/// All reachable probability will proportionally share the remaining part.
|
2017-05-18 08:11:56 +02:00
|
|
|
static const BranchProbability UR_TAKEN_PROB = BranchProbability::getRaw(1);
|
2017-04-17 06:33:04 +02:00
|
|
|
|
2011-10-24 03:40:45 +02:00
|
|
|
static const uint32_t PH_TAKEN_WEIGHT = 20;
|
|
|
|
static const uint32_t PH_NONTAKEN_WEIGHT = 12;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-08-17 20:42:57 +02:00
|
|
|
static const uint32_t ZH_TAKEN_WEIGHT = 20;
|
|
|
|
static const uint32_t ZH_NONTAKEN_WEIGHT = 12;
|
2011-10-24 03:40:45 +02:00
|
|
|
|
|
|
|
static const uint32_t FPH_TAKEN_WEIGHT = 20;
|
|
|
|
static const uint32_t FPH_NONTAKEN_WEIGHT = 12;
|
|
|
|
|
2019-09-10 19:25:11 +02:00
|
|
|
/// This is the probability for an ordered floating point comparison.
|
|
|
|
static const uint32_t FPH_ORD_WEIGHT = 1024 * 1024 - 1;
|
|
|
|
/// This is the probability for an unordered floating point comparison, it means
|
|
|
|
/// one or two of the operands are NaN. Usually it is used to test for an
|
|
|
|
/// exceptional case, so the result is unlikely.
|
|
|
|
static const uint32_t FPH_UNO_WEIGHT = 1;
|
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
/// Set of dedicated "absolute" execution weights for a block. These weights are
|
|
|
|
/// meaningful relative to each other and their derivatives only.
|
|
|
|
enum class BlockExecWeight : std::uint32_t {
|
|
|
|
/// Special weight used for cases with exact zero probability.
|
|
|
|
ZERO = 0x0,
|
|
|
|
/// Minimal possible non zero weight.
|
|
|
|
LOWEST_NON_ZERO = 0x1,
|
|
|
|
/// Weight to an 'unreachable' block.
|
|
|
|
UNREACHABLE = ZERO,
|
|
|
|
/// Weight to a block containing non returning call.
|
|
|
|
NORETURN = LOWEST_NON_ZERO,
|
|
|
|
/// Weight to 'unwind' block of an invoke instruction.
|
|
|
|
UNWIND = LOWEST_NON_ZERO,
|
|
|
|
/// Weight to a 'cold' block. Cold blocks are the ones containing calls marked
|
|
|
|
/// with attribute 'cold'.
|
|
|
|
COLD = 0xffff,
|
|
|
|
/// Default weight is used in cases when there is no dedicated execution
|
|
|
|
/// weight set. It is not propagated through the domination line either.
|
|
|
|
DEFAULT = 0xfffff
|
|
|
|
};
|
2012-08-15 14:22:35 +02:00
|
|
|
|
2020-07-24 13:57:10 +02:00
|
|
|
BranchProbabilityInfo::SccInfo::SccInfo(const Function &F) {
|
|
|
|
// Record SCC numbers of blocks in the CFG to identify irreducible loops.
|
|
|
|
// FIXME: We could only calculate this if the CFG is known to be irreducible
|
|
|
|
// (perhaps cache this info in LoopInfo if we can easily calculate it there?).
|
|
|
|
int SccNum = 0;
|
|
|
|
for (scc_iterator<const Function *> It = scc_begin(&F); !It.isAtEnd();
|
|
|
|
++It, ++SccNum) {
|
|
|
|
// Ignore single-block SCCs since they either aren't loops or LoopInfo will
|
|
|
|
// catch them.
|
|
|
|
const std::vector<const BasicBlock *> &Scc = *It;
|
|
|
|
if (Scc.size() == 1)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
LLVM_DEBUG(dbgs() << "BPI: SCC " << SccNum << ":");
|
|
|
|
for (const auto *BB : Scc) {
|
|
|
|
LLVM_DEBUG(dbgs() << " " << BB->getName());
|
|
|
|
SccNums[BB] = SccNum;
|
|
|
|
calculateSccBlockType(BB, SccNum);
|
|
|
|
}
|
|
|
|
LLVM_DEBUG(dbgs() << "\n");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
int BranchProbabilityInfo::SccInfo::getSCCNum(const BasicBlock *BB) const {
|
|
|
|
auto SccIt = SccNums.find(BB);
|
|
|
|
if (SccIt == SccNums.end())
|
|
|
|
return -1;
|
|
|
|
return SccIt->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::SccInfo::getSccEnterBlocks(
|
|
|
|
int SccNum, SmallVectorImpl<BasicBlock *> &Enters) const {
|
|
|
|
|
|
|
|
for (auto MapIt : SccBlocks[SccNum]) {
|
|
|
|
const auto *BB = MapIt.first;
|
|
|
|
if (isSCCHeader(BB, SccNum))
|
|
|
|
for (const auto *Pred : predecessors(BB))
|
|
|
|
if (getSCCNum(Pred) != SccNum)
|
|
|
|
Enters.push_back(const_cast<BasicBlock *>(BB));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::SccInfo::getSccExitBlocks(
|
|
|
|
int SccNum, SmallVectorImpl<BasicBlock *> &Exits) const {
|
|
|
|
for (auto MapIt : SccBlocks[SccNum]) {
|
|
|
|
const auto *BB = MapIt.first;
|
|
|
|
if (isSCCExitingBlock(BB, SccNum))
|
|
|
|
for (const auto *Succ : successors(BB))
|
|
|
|
if (getSCCNum(Succ) != SccNum)
|
|
|
|
Exits.push_back(const_cast<BasicBlock *>(BB));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t BranchProbabilityInfo::SccInfo::getSccBlockType(const BasicBlock *BB,
|
|
|
|
int SccNum) const {
|
|
|
|
assert(getSCCNum(BB) == SccNum);
|
|
|
|
|
|
|
|
assert(SccBlocks.size() > static_cast<unsigned>(SccNum) && "Unknown SCC");
|
|
|
|
const auto &SccBlockTypes = SccBlocks[SccNum];
|
|
|
|
|
|
|
|
auto It = SccBlockTypes.find(BB);
|
|
|
|
if (It != SccBlockTypes.end()) {
|
|
|
|
return It->second;
|
|
|
|
}
|
|
|
|
return Inner;
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::SccInfo::calculateSccBlockType(const BasicBlock *BB,
|
|
|
|
int SccNum) {
|
|
|
|
assert(getSCCNum(BB) == SccNum);
|
|
|
|
uint32_t BlockType = Inner;
|
|
|
|
|
2020-11-16 04:26:38 +01:00
|
|
|
if (llvm::any_of(predecessors(BB), [&](const BasicBlock *Pred) {
|
2020-07-24 13:57:10 +02:00
|
|
|
// Consider any block that is an entry point to the SCC as
|
|
|
|
// a header.
|
|
|
|
return getSCCNum(Pred) != SccNum;
|
|
|
|
}))
|
|
|
|
BlockType |= Header;
|
|
|
|
|
2020-11-16 04:26:38 +01:00
|
|
|
if (llvm::any_of(successors(BB), [&](const BasicBlock *Succ) {
|
|
|
|
return getSCCNum(Succ) != SccNum;
|
|
|
|
}))
|
2020-07-24 13:57:10 +02:00
|
|
|
BlockType |= Exiting;
|
|
|
|
|
|
|
|
// Lazily compute the set of headers for a given SCC and cache the results
|
|
|
|
// in the SccHeaderMap.
|
|
|
|
if (SccBlocks.size() <= static_cast<unsigned>(SccNum))
|
|
|
|
SccBlocks.resize(SccNum + 1);
|
|
|
|
auto &SccBlockTypes = SccBlocks[SccNum];
|
|
|
|
|
|
|
|
if (BlockType != Inner) {
|
|
|
|
bool IsInserted;
|
|
|
|
std::tie(std::ignore, IsInserted) =
|
|
|
|
SccBlockTypes.insert(std::make_pair(BB, BlockType));
|
|
|
|
assert(IsInserted && "Duplicated block in SCC");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-29 14:19:00 +02:00
|
|
|
BranchProbabilityInfo::LoopBlock::LoopBlock(const BasicBlock *BB,
|
|
|
|
const LoopInfo &LI,
|
|
|
|
const SccInfo &SccI)
|
|
|
|
: BB(BB) {
|
|
|
|
LD.first = LI.getLoopFor(BB);
|
|
|
|
if (!LD.first) {
|
|
|
|
LD.second = SccI.getSCCNum(BB);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopEnteringEdge(const LoopEdge &Edge) const {
|
|
|
|
const auto &SrcBlock = Edge.first;
|
|
|
|
const auto &DstBlock = Edge.second;
|
|
|
|
return (DstBlock.getLoop() &&
|
|
|
|
!DstBlock.getLoop()->contains(SrcBlock.getLoop())) ||
|
|
|
|
// Assume that SCCs can't be nested.
|
|
|
|
(DstBlock.getSccNum() != -1 &&
|
|
|
|
SrcBlock.getSccNum() != DstBlock.getSccNum());
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopExitingEdge(const LoopEdge &Edge) const {
|
|
|
|
return isLoopEnteringEdge({Edge.second, Edge.first});
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopEnteringExitingEdge(
|
|
|
|
const LoopEdge &Edge) const {
|
|
|
|
return isLoopEnteringEdge(Edge) || isLoopExitingEdge(Edge);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfo::isLoopBackEdge(const LoopEdge &Edge) const {
|
|
|
|
const auto &SrcBlock = Edge.first;
|
|
|
|
const auto &DstBlock = Edge.second;
|
|
|
|
return SrcBlock.belongsToSameLoop(DstBlock) &&
|
|
|
|
((DstBlock.getLoop() &&
|
|
|
|
DstBlock.getLoop()->getHeader() == DstBlock.getBlock()) ||
|
|
|
|
(DstBlock.getSccNum() != -1 &&
|
|
|
|
SccI->isSCCHeader(DstBlock.getBlock(), DstBlock.getSccNum())));
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::getLoopEnterBlocks(
|
|
|
|
const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Enters) const {
|
|
|
|
if (LB.getLoop()) {
|
|
|
|
auto *Header = LB.getLoop()->getHeader();
|
|
|
|
Enters.append(pred_begin(Header), pred_end(Header));
|
|
|
|
} else {
|
|
|
|
assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?");
|
|
|
|
SccI->getSccEnterBlocks(LB.getSccNum(), Enters);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfo::getLoopExitBlocks(
|
|
|
|
const LoopBlock &LB, SmallVectorImpl<BasicBlock *> &Exits) const {
|
|
|
|
if (LB.getLoop()) {
|
|
|
|
LB.getLoop()->getExitBlocks(Exits);
|
|
|
|
} else {
|
|
|
|
assert(LB.getSccNum() != -1 && "LB doesn't belong to any loop?");
|
|
|
|
SccI->getSccExitBlocks(LB.getSccNum(), Exits);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-10-19 12:30:30 +02:00
|
|
|
// Propagate existing explicit probabilities from either profile data or
|
2017-04-17 06:33:04 +02:00
|
|
|
// 'expect' intrinsic processing. Examine metadata against unreachable
|
|
|
|
// heuristic. The probability of the edge coming to unreachable block is
|
|
|
|
// set to min of metadata and unreachable heuristic.
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcMetadataWeights(const BasicBlock *BB) {
|
2018-10-15 12:04:59 +02:00
|
|
|
const Instruction *TI = BB->getTerminator();
|
2017-04-17 08:39:47 +02:00
|
|
|
assert(TI->getNumSuccessors() > 1 && "expected more than one successor!");
|
2020-06-04 10:34:14 +02:00
|
|
|
if (!(isa<BranchInst>(TI) || isa<SwitchInst>(TI) || isa<IndirectBrInst>(TI) ||
|
|
|
|
isa<InvokeInst>(TI)))
|
2011-10-19 12:30:30 +02:00
|
|
|
return false;
|
|
|
|
|
2014-11-11 22:30:22 +01:00
|
|
|
MDNode *WeightsNode = TI->getMetadata(LLVMContext::MD_prof);
|
2011-10-19 12:32:19 +02:00
|
|
|
if (!WeightsNode)
|
2011-10-19 12:30:30 +02:00
|
|
|
return false;
|
|
|
|
|
2015-05-07 19:22:06 +02:00
|
|
|
// Check that the number of successors is manageable.
|
|
|
|
assert(TI->getNumSuccessors() < UINT32_MAX && "Too many successors");
|
|
|
|
|
2011-10-19 12:32:19 +02:00
|
|
|
// Ensure there are weights for all of the successors. Note that the first
|
|
|
|
// operand to the metadata node is a name, not a weight.
|
|
|
|
if (WeightsNode->getNumOperands() != TI->getNumSuccessors() + 1)
|
2011-10-19 12:30:30 +02:00
|
|
|
return false;
|
|
|
|
|
2015-05-07 19:22:06 +02:00
|
|
|
// Build up the final weights that will be used in a temporary buffer.
|
2020-10-31 08:15:46 +01:00
|
|
|
// Compute the sum of all weights to later decide whether they need to
|
|
|
|
// be scaled to fit in 32 bits.
|
|
|
|
uint64_t WeightSum = 0;
|
|
|
|
SmallVector<uint32_t, 2> Weights;
|
2017-04-17 06:33:04 +02:00
|
|
|
SmallVector<unsigned, 2> UnreachableIdxs;
|
|
|
|
SmallVector<unsigned, 2> ReachableIdxs;
|
2011-10-19 12:32:19 +02:00
|
|
|
Weights.reserve(TI->getNumSuccessors());
|
2020-06-02 05:55:27 +02:00
|
|
|
for (unsigned I = 1, E = WeightsNode->getNumOperands(); I != E; ++I) {
|
IR: Split Metadata from Value
Split `Metadata` away from the `Value` class hierarchy, as part of
PR21532. Assembly and bitcode changes are in the wings, but this is the
bulk of the change for the IR C++ API.
I have a follow-up patch prepared for `clang`. If this breaks other
sub-projects, I apologize in advance :(. Help me compile it on Darwin
I'll try to fix it. FWIW, the errors should be easy to fix, so it may
be simpler to just fix it yourself.
This breaks the build for all metadata-related code that's out-of-tree.
Rest assured the transition is mechanical and the compiler should catch
almost all of the problems.
Here's a quick guide for updating your code:
- `Metadata` is the root of a class hierarchy with three main classes:
`MDNode`, `MDString`, and `ValueAsMetadata`. It is distinct from
the `Value` class hierarchy. It is typeless -- i.e., instances do
*not* have a `Type`.
- `MDNode`'s operands are all `Metadata *` (instead of `Value *`).
- `TrackingVH<MDNode>` and `WeakVH` referring to metadata can be
replaced with `TrackingMDNodeRef` and `TrackingMDRef`, respectively.
If you're referring solely to resolved `MDNode`s -- post graph
construction -- just use `MDNode*`.
- `MDNode` (and the rest of `Metadata`) have only limited support for
`replaceAllUsesWith()`.
As long as an `MDNode` is pointing at a forward declaration -- the
result of `MDNode::getTemporary()` -- it maintains a side map of its
uses and can RAUW itself. Once the forward declarations are fully
resolved RAUW support is dropped on the ground. This means that
uniquing collisions on changing operands cause nodes to become
"distinct". (This already happened fairly commonly, whenever an
operand went to null.)
If you're constructing complex (non self-reference) `MDNode` cycles,
you need to call `MDNode::resolveCycles()` on each node (or on a
top-level node that somehow references all of the nodes). Also,
don't do that. Metadata cycles (and the RAUW machinery needed to
construct them) are expensive.
- An `MDNode` can only refer to a `Constant` through a bridge called
`ConstantAsMetadata` (one of the subclasses of `ValueAsMetadata`).
As a side effect, accessing an operand of an `MDNode` that is known
to be, e.g., `ConstantInt`, takes three steps: first, cast from
`Metadata` to `ConstantAsMetadata`; second, extract the `Constant`;
third, cast down to `ConstantInt`.
The eventual goal is to introduce `MDInt`/`MDFloat`/etc. and have
metadata schema owners transition away from using `Constant`s when
the type isn't important (and they don't care about referring to
`GlobalValue`s).
In the meantime, I've added transitional API to the `mdconst`
namespace that matches semantics with the old code, in order to
avoid adding the error-prone three-step equivalent to every call
site. If your old code was:
MDNode *N = foo();
bar(isa <ConstantInt>(N->getOperand(0)));
baz(cast <ConstantInt>(N->getOperand(1)));
bak(cast_or_null <ConstantInt>(N->getOperand(2)));
bat(dyn_cast <ConstantInt>(N->getOperand(3)));
bay(dyn_cast_or_null<ConstantInt>(N->getOperand(4)));
you can trivially match its semantics with:
MDNode *N = foo();
bar(mdconst::hasa <ConstantInt>(N->getOperand(0)));
baz(mdconst::extract <ConstantInt>(N->getOperand(1)));
bak(mdconst::extract_or_null <ConstantInt>(N->getOperand(2)));
bat(mdconst::dyn_extract <ConstantInt>(N->getOperand(3)));
bay(mdconst::dyn_extract_or_null<ConstantInt>(N->getOperand(4)));
and when you transition your metadata schema to `MDInt`:
MDNode *N = foo();
bar(isa <MDInt>(N->getOperand(0)));
baz(cast <MDInt>(N->getOperand(1)));
bak(cast_or_null <MDInt>(N->getOperand(2)));
bat(dyn_cast <MDInt>(N->getOperand(3)));
bay(dyn_cast_or_null<MDInt>(N->getOperand(4)));
- A `CallInst` -- specifically, intrinsic instructions -- can refer to
metadata through a bridge called `MetadataAsValue`. This is a
subclass of `Value` where `getType()->isMetadataTy()`.
`MetadataAsValue` is the *only* class that can legally refer to a
`LocalAsMetadata`, which is a bridged form of non-`Constant` values
like `Argument` and `Instruction`. It can also refer to any other
`Metadata` subclass.
(I'll break all your testcases in a follow-up commit, when I propagate
this change to assembly.)
llvm-svn: 223802
2014-12-09 19:38:53 +01:00
|
|
|
ConstantInt *Weight =
|
2020-06-02 05:55:27 +02:00
|
|
|
mdconst::dyn_extract<ConstantInt>(WeightsNode->getOperand(I));
|
2011-10-19 12:32:19 +02:00
|
|
|
if (!Weight)
|
|
|
|
return false;
|
2020-10-31 08:15:46 +01:00
|
|
|
assert(Weight->getValue().getActiveBits() <= 32 &&
|
|
|
|
"Too many bits for uint32_t");
|
|
|
|
Weights.push_back(Weight->getZExtValue());
|
|
|
|
WeightSum += Weights.back();
|
2020-06-18 11:20:55 +02:00
|
|
|
const LoopBlock SrcLoopBB = getLoopBlock(BB);
|
|
|
|
const LoopBlock DstLoopBB = getLoopBlock(TI->getSuccessor(I - 1));
|
|
|
|
auto EstimatedWeight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB});
|
|
|
|
if (EstimatedWeight &&
|
|
|
|
EstimatedWeight.getValue() <=
|
|
|
|
static_cast<uint32_t>(BlockExecWeight::UNREACHABLE))
|
2020-06-02 05:55:27 +02:00
|
|
|
UnreachableIdxs.push_back(I - 1);
|
2017-04-17 06:33:04 +02:00
|
|
|
else
|
2020-06-02 05:55:27 +02:00
|
|
|
ReachableIdxs.push_back(I - 1);
|
2011-10-19 12:32:19 +02:00
|
|
|
}
|
|
|
|
assert(Weights.size() == TI->getNumSuccessors() && "Checked above");
|
2015-05-07 19:22:06 +02:00
|
|
|
|
2020-10-31 08:15:46 +01:00
|
|
|
// If the sum of weights does not fit in 32 bits, scale every weight down
|
|
|
|
// accordingly.
|
|
|
|
uint64_t ScalingFactor =
|
|
|
|
(WeightSum > UINT32_MAX) ? WeightSum / UINT32_MAX + 1 : 1;
|
|
|
|
|
|
|
|
if (ScalingFactor > 1) {
|
|
|
|
WeightSum = 0;
|
|
|
|
for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I) {
|
|
|
|
Weights[I] /= ScalingFactor;
|
|
|
|
WeightSum += Weights[I];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert(WeightSum <= UINT32_MAX &&
|
|
|
|
"Expected weights to scale down to 32 bits");
|
2015-12-23 00:45:55 +01:00
|
|
|
|
2017-04-17 06:33:04 +02:00
|
|
|
if (WeightSum == 0 || ReachableIdxs.size() == 0) {
|
2020-06-02 05:55:27 +02:00
|
|
|
for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I)
|
|
|
|
Weights[I] = 1;
|
2017-04-17 06:33:04 +02:00
|
|
|
WeightSum = TI->getNumSuccessors();
|
2015-12-23 00:45:55 +01:00
|
|
|
}
|
2015-12-22 19:56:14 +01:00
|
|
|
|
2017-04-17 06:33:04 +02:00
|
|
|
// Set the probability.
|
|
|
|
SmallVector<BranchProbability, 2> BP;
|
2020-06-02 05:55:27 +02:00
|
|
|
for (unsigned I = 0, E = TI->getNumSuccessors(); I != E; ++I)
|
2020-10-31 08:15:46 +01:00
|
|
|
BP.push_back({ Weights[I], static_cast<uint32_t>(WeightSum) });
|
2017-04-17 06:33:04 +02:00
|
|
|
|
|
|
|
// Examine the metadata against unreachable heuristic.
|
|
|
|
// If the unreachable heuristic is more strong then we use it for this edge.
|
2020-06-02 06:28:12 +02:00
|
|
|
if (UnreachableIdxs.size() == 0 || ReachableIdxs.size() == 0) {
|
|
|
|
setEdgeProbability(BB, BP);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto UnreachableProb = UR_TAKEN_PROB;
|
|
|
|
for (auto I : UnreachableIdxs)
|
|
|
|
if (UnreachableProb < BP[I]) {
|
|
|
|
BP[I] = UnreachableProb;
|
|
|
|
}
|
2017-04-17 06:33:04 +02:00
|
|
|
|
2020-06-02 06:28:12 +02:00
|
|
|
// Sum of all edge probabilities must be 1.0. If we modified the probability
|
|
|
|
// of some edges then we must distribute the introduced difference over the
|
|
|
|
// reachable blocks.
|
|
|
|
//
|
|
|
|
// Proportional distribution: the relation between probabilities of the
|
|
|
|
// reachable edges is kept unchanged. That is for any reachable edges i and j:
|
|
|
|
// newBP[i] / newBP[j] == oldBP[i] / oldBP[j] =>
|
|
|
|
// newBP[i] / oldBP[i] == newBP[j] / oldBP[j] == K
|
|
|
|
// Where K is independent of i,j.
|
|
|
|
// newBP[i] == oldBP[i] * K
|
|
|
|
// We need to find K.
|
|
|
|
// Make sum of all reachables of the left and right parts:
|
|
|
|
// sum_of_reachable(newBP) == K * sum_of_reachable(oldBP)
|
|
|
|
// Sum of newBP must be equal to 1.0:
|
|
|
|
// sum_of_reachable(newBP) + sum_of_unreachable(newBP) == 1.0 =>
|
|
|
|
// sum_of_reachable(newBP) = 1.0 - sum_of_unreachable(newBP)
|
|
|
|
// Where sum_of_unreachable(newBP) is what has been just changed.
|
|
|
|
// Finally:
|
|
|
|
// K == sum_of_reachable(newBP) / sum_of_reachable(oldBP) =>
|
|
|
|
// K == (1.0 - sum_of_unreachable(newBP)) / sum_of_reachable(oldBP)
|
|
|
|
BranchProbability NewUnreachableSum = BranchProbability::getZero();
|
|
|
|
for (auto I : UnreachableIdxs)
|
|
|
|
NewUnreachableSum += BP[I];
|
|
|
|
|
|
|
|
BranchProbability NewReachableSum =
|
|
|
|
BranchProbability::getOne() - NewUnreachableSum;
|
|
|
|
|
|
|
|
BranchProbability OldReachableSum = BranchProbability::getZero();
|
|
|
|
for (auto I : ReachableIdxs)
|
|
|
|
OldReachableSum += BP[I];
|
|
|
|
|
|
|
|
if (OldReachableSum != NewReachableSum) { // Anything to dsitribute?
|
|
|
|
if (OldReachableSum.isZero()) {
|
|
|
|
// If all oldBP[i] are zeroes then the proportional distribution results
|
|
|
|
// in all zero probabilities and the error stays big. In this case we
|
|
|
|
// evenly spread NewReachableSum over the reachable edges.
|
|
|
|
BranchProbability PerEdge = NewReachableSum / ReachableIdxs.size();
|
2020-06-02 05:55:27 +02:00
|
|
|
for (auto I : ReachableIdxs)
|
2020-06-02 06:28:12 +02:00
|
|
|
BP[I] = PerEdge;
|
|
|
|
} else {
|
|
|
|
for (auto I : ReachableIdxs) {
|
|
|
|
// We use uint64_t to avoid double rounding error of the following
|
|
|
|
// calculation: BP[i] = BP[i] * NewReachableSum / OldReachableSum
|
|
|
|
// The formula is taken from the private constructor
|
|
|
|
// BranchProbability(uint32_t Numerator, uint32_t Denominator)
|
|
|
|
uint64_t Mul = static_cast<uint64_t>(NewReachableSum.getNumerator()) *
|
|
|
|
BP[I].getNumerator();
|
|
|
|
uint32_t Div = static_cast<uint32_t>(
|
|
|
|
divideNearest(Mul, OldReachableSum.getNumerator()));
|
|
|
|
BP[I] = BranchProbability::getRaw(Div);
|
|
|
|
}
|
|
|
|
}
|
2017-04-17 06:33:04 +02:00
|
|
|
}
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(BB, BP);
|
2017-04-17 06:33:04 +02:00
|
|
|
|
2011-10-19 12:30:30 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-03-02 19:57:02 +01:00
|
|
|
// Calculate Edge Weights using "Pointer Heuristics". Predict a comparison
|
2011-06-04 03:16:30 +02:00
|
|
|
// between two pointer or pointer and NULL will fail.
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcPointerHeuristics(const BasicBlock *BB) {
|
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
2011-06-04 03:16:30 +02:00
|
|
|
if (!BI || !BI->isConditional())
|
2011-07-28 23:45:07 +02:00
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
Value *Cond = BI->getCondition();
|
|
|
|
ICmpInst *CI = dyn_cast<ICmpInst>(Cond);
|
2011-07-15 22:51:06 +02:00
|
|
|
if (!CI || !CI->isEquality())
|
2011-07-28 23:45:07 +02:00
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
Value *LHS = CI->getOperand(0);
|
|
|
|
|
|
|
|
if (!LHS->getType()->isPointerTy())
|
2011-07-28 23:45:07 +02:00
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2011-06-04 04:07:10 +02:00
|
|
|
assert(CI->getOperand(1)->getType()->isPointerTy());
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
BranchProbability TakenProb(PH_TAKEN_WEIGHT,
|
|
|
|
PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT);
|
|
|
|
BranchProbability UntakenProb(PH_NONTAKEN_WEIGHT,
|
|
|
|
PH_TAKEN_WEIGHT + PH_NONTAKEN_WEIGHT);
|
|
|
|
|
2011-06-04 03:16:30 +02:00
|
|
|
// p != 0 -> isProb = true
|
|
|
|
// p == 0 -> isProb = false
|
|
|
|
// p != q -> isProb = true
|
|
|
|
// p == q -> isProb = false;
|
2011-07-15 22:51:06 +02:00
|
|
|
bool isProb = CI->getPredicate() == ICmpInst::ICMP_NE;
|
2011-06-04 03:16:30 +02:00
|
|
|
if (!isProb)
|
2020-05-21 06:49:11 +02:00
|
|
|
std::swap(TakenProb, UntakenProb);
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(
|
|
|
|
BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb}));
|
2011-07-28 23:45:07 +02:00
|
|
|
return true;
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2018-02-23 18:17:31 +01:00
|
|
|
// Compute the unlikely successors to the block BB in the loop L, specifically
|
|
|
|
// those that are unlikely because this is a loop, and add them to the
|
|
|
|
// UnlikelyBlocks set.
|
|
|
|
static void
|
|
|
|
computeUnlikelySuccessors(const BasicBlock *BB, Loop *L,
|
|
|
|
SmallPtrSetImpl<const BasicBlock*> &UnlikelyBlocks) {
|
|
|
|
// Sometimes in a loop we have a branch whose condition is made false by
|
|
|
|
// taking it. This is typically something like
|
|
|
|
// int n = 0;
|
|
|
|
// while (...) {
|
|
|
|
// if (++n >= MAX) {
|
|
|
|
// n = 0;
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
// In this sort of situation taking the branch means that at the very least it
|
|
|
|
// won't be taken again in the next iteration of the loop, so we should
|
|
|
|
// consider it less likely than a typical branch.
|
|
|
|
//
|
|
|
|
// We detect this by looking back through the graph of PHI nodes that sets the
|
|
|
|
// value that the condition depends on, and seeing if we can reach a successor
|
|
|
|
// block which can be determined to make the condition false.
|
|
|
|
//
|
|
|
|
// FIXME: We currently consider unlikely blocks to be half as likely as other
|
|
|
|
// blocks, but if we consider the example above the likelyhood is actually
|
|
|
|
// 1/MAX. We could therefore be more precise in how unlikely we consider
|
|
|
|
// blocks to be, but it would require more careful examination of the form
|
|
|
|
// of the comparison expression.
|
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
|
|
|
if (!BI || !BI->isConditional())
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Check if the branch is based on an instruction compared with a constant
|
|
|
|
CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition());
|
|
|
|
if (!CI || !isa<Instruction>(CI->getOperand(0)) ||
|
|
|
|
!isa<Constant>(CI->getOperand(1)))
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Either the instruction must be a PHI, or a chain of operations involving
|
|
|
|
// constants that ends in a PHI which we can then collapse into a single value
|
|
|
|
// if the PHI value is known.
|
|
|
|
Instruction *CmpLHS = dyn_cast<Instruction>(CI->getOperand(0));
|
|
|
|
PHINode *CmpPHI = dyn_cast<PHINode>(CmpLHS);
|
|
|
|
Constant *CmpConst = dyn_cast<Constant>(CI->getOperand(1));
|
|
|
|
// Collect the instructions until we hit a PHI
|
2018-06-15 23:06:43 +02:00
|
|
|
SmallVector<BinaryOperator *, 1> InstChain;
|
2018-02-23 18:17:31 +01:00
|
|
|
while (!CmpPHI && CmpLHS && isa<BinaryOperator>(CmpLHS) &&
|
|
|
|
isa<Constant>(CmpLHS->getOperand(1))) {
|
|
|
|
// Stop if the chain extends outside of the loop
|
|
|
|
if (!L->contains(CmpLHS))
|
|
|
|
return;
|
2018-06-15 23:06:43 +02:00
|
|
|
InstChain.push_back(cast<BinaryOperator>(CmpLHS));
|
2018-02-23 18:17:31 +01:00
|
|
|
CmpLHS = dyn_cast<Instruction>(CmpLHS->getOperand(0));
|
|
|
|
if (CmpLHS)
|
|
|
|
CmpPHI = dyn_cast<PHINode>(CmpLHS);
|
|
|
|
}
|
|
|
|
if (!CmpPHI || !L->contains(CmpPHI))
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Trace the phi node to find all values that come from successors of BB
|
|
|
|
SmallPtrSet<PHINode*, 8> VisitedInsts;
|
|
|
|
SmallVector<PHINode*, 8> WorkList;
|
|
|
|
WorkList.push_back(CmpPHI);
|
|
|
|
VisitedInsts.insert(CmpPHI);
|
|
|
|
while (!WorkList.empty()) {
|
2021-02-02 05:55:05 +01:00
|
|
|
PHINode *P = WorkList.pop_back_val();
|
2018-02-23 18:17:31 +01:00
|
|
|
for (BasicBlock *B : P->blocks()) {
|
|
|
|
// Skip blocks that aren't part of the loop
|
|
|
|
if (!L->contains(B))
|
|
|
|
continue;
|
|
|
|
Value *V = P->getIncomingValueForBlock(B);
|
|
|
|
// If the source is a PHI add it to the work list if we haven't
|
|
|
|
// already visited it.
|
|
|
|
if (PHINode *PN = dyn_cast<PHINode>(V)) {
|
|
|
|
if (VisitedInsts.insert(PN).second)
|
|
|
|
WorkList.push_back(PN);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// If this incoming value is a constant and B is a successor of BB, then
|
|
|
|
// we can constant-evaluate the compare to see if it makes the branch be
|
|
|
|
// taken or not.
|
|
|
|
Constant *CmpLHSConst = dyn_cast<Constant>(V);
|
2020-08-02 06:49:38 +02:00
|
|
|
if (!CmpLHSConst || !llvm::is_contained(successors(BB), B))
|
2018-02-23 18:17:31 +01:00
|
|
|
continue;
|
|
|
|
// First collapse InstChain
|
2018-06-15 23:06:43 +02:00
|
|
|
for (Instruction *I : llvm::reverse(InstChain)) {
|
2018-02-23 18:17:31 +01:00
|
|
|
CmpLHSConst = ConstantExpr::get(I->getOpcode(), CmpLHSConst,
|
2018-06-15 23:06:43 +02:00
|
|
|
cast<Constant>(I->getOperand(1)), true);
|
2018-02-23 18:17:31 +01:00
|
|
|
if (!CmpLHSConst)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (!CmpLHSConst)
|
|
|
|
continue;
|
|
|
|
// Now constant-evaluate the compare
|
|
|
|
Constant *Result = ConstantExpr::getCompare(CI->getPredicate(),
|
|
|
|
CmpLHSConst, CmpConst, true);
|
|
|
|
// If the result means we don't branch to the block then that block is
|
|
|
|
// unlikely.
|
|
|
|
if (Result &&
|
|
|
|
((Result->isZeroValue() && B == BI->getSuccessor(0)) ||
|
|
|
|
(Result->isOneValue() && B == BI->getSuccessor(1))))
|
|
|
|
UnlikelyBlocks.insert(B);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
Optional<uint32_t>
|
|
|
|
BranchProbabilityInfo::getEstimatedBlockWeight(const BasicBlock *BB) const {
|
|
|
|
auto WeightIt = EstimatedBlockWeight.find(BB);
|
|
|
|
if (WeightIt == EstimatedBlockWeight.end())
|
|
|
|
return None;
|
|
|
|
return WeightIt->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
Optional<uint32_t>
|
|
|
|
BranchProbabilityInfo::getEstimatedLoopWeight(const LoopData &L) const {
|
|
|
|
auto WeightIt = EstimatedLoopWeight.find(L);
|
|
|
|
if (WeightIt == EstimatedLoopWeight.end())
|
|
|
|
return None;
|
|
|
|
return WeightIt->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
Optional<uint32_t>
|
|
|
|
BranchProbabilityInfo::getEstimatedEdgeWeight(const LoopEdge &Edge) const {
|
|
|
|
// For edges entering a loop take weight of a loop rather than an individual
|
|
|
|
// block in the loop.
|
|
|
|
return isLoopEnteringEdge(Edge)
|
|
|
|
? getEstimatedLoopWeight(Edge.second.getLoopData())
|
|
|
|
: getEstimatedBlockWeight(Edge.second.getBlock());
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class IterT>
|
|
|
|
Optional<uint32_t> BranchProbabilityInfo::getMaxEstimatedEdgeWeight(
|
|
|
|
const LoopBlock &SrcLoopBB, iterator_range<IterT> Successors) const {
|
|
|
|
SmallVector<uint32_t, 4> Weights;
|
|
|
|
Optional<uint32_t> MaxWeight;
|
|
|
|
for (const BasicBlock *DstBB : Successors) {
|
|
|
|
const LoopBlock DstLoopBB = getLoopBlock(DstBB);
|
|
|
|
auto Weight = getEstimatedEdgeWeight({SrcLoopBB, DstLoopBB});
|
|
|
|
|
|
|
|
if (!Weight)
|
|
|
|
return None;
|
|
|
|
|
|
|
|
if (!MaxWeight || MaxWeight.getValue() < Weight.getValue())
|
|
|
|
MaxWeight = Weight;
|
|
|
|
}
|
|
|
|
|
|
|
|
return MaxWeight;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Updates \p LoopBB's weight and returns true. If \p LoopBB has already
|
|
|
|
// an associated weight it is unchanged and false is returned.
|
|
|
|
//
|
|
|
|
// Please note by the algorithm the weight is not expected to change once set
|
|
|
|
// thus 'false' status is used to track visited blocks.
|
|
|
|
bool BranchProbabilityInfo::updateEstimatedBlockWeight(
|
|
|
|
LoopBlock &LoopBB, uint32_t BBWeight,
|
|
|
|
SmallVectorImpl<BasicBlock *> &BlockWorkList,
|
|
|
|
SmallVectorImpl<LoopBlock> &LoopWorkList) {
|
|
|
|
BasicBlock *BB = LoopBB.getBlock();
|
|
|
|
|
|
|
|
// In general, weight is assigned to a block when it has final value and
|
|
|
|
// can't/shouldn't be changed. However, there are cases when a block
|
|
|
|
// inherently has several (possibly "contradicting") weights. For example,
|
|
|
|
// "unwind" block may also contain "cold" call. In that case the first
|
|
|
|
// set weight is favored and all consequent weights are ignored.
|
|
|
|
if (!EstimatedBlockWeight.insert({BB, BBWeight}).second)
|
2020-07-29 14:19:00 +02:00
|
|
|
return false;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
for (BasicBlock *PredBlock : predecessors(BB)) {
|
|
|
|
LoopBlock PredLoop = getLoopBlock(PredBlock);
|
|
|
|
// Add affected block/loop to a working list.
|
|
|
|
if (isLoopExitingEdge({PredLoop, LoopBB})) {
|
|
|
|
if (!EstimatedLoopWeight.count(PredLoop.getLoopData()))
|
|
|
|
LoopWorkList.push_back(PredLoop);
|
|
|
|
} else if (!EstimatedBlockWeight.count(PredBlock))
|
|
|
|
BlockWorkList.push_back(PredBlock);
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2018-02-23 18:17:31 +01:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
// Starting from \p BB traverse through dominator blocks and assign \p BBWeight
|
|
|
|
// to all such blocks that are post dominated by \BB. In other words to all
|
|
|
|
// blocks that the one is executed if and only if another one is executed.
|
|
|
|
// Importantly, we skip loops here for two reasons. First weights of blocks in
|
|
|
|
// a loop should be scaled by trip count (yet possibly unknown). Second there is
|
|
|
|
// no any value in doing that because that doesn't give any additional
|
|
|
|
// information regarding distribution of probabilities inside the loop.
|
|
|
|
// Exception is loop 'enter' and 'exit' edges that are handled in a special way
|
|
|
|
// at calcEstimatedHeuristics.
|
|
|
|
//
|
|
|
|
// In addition, \p WorkList is populated with basic blocks if at leas one
|
|
|
|
// successor has updated estimated weight.
|
|
|
|
void BranchProbabilityInfo::propagateEstimatedBlockWeight(
|
|
|
|
const LoopBlock &LoopBB, DominatorTree *DT, PostDominatorTree *PDT,
|
|
|
|
uint32_t BBWeight, SmallVectorImpl<BasicBlock *> &BlockWorkList,
|
|
|
|
SmallVectorImpl<LoopBlock> &LoopWorkList) {
|
|
|
|
const BasicBlock *BB = LoopBB.getBlock();
|
|
|
|
const auto *DTStartNode = DT->getNode(BB);
|
|
|
|
const auto *PDTStartNode = PDT->getNode(BB);
|
|
|
|
|
|
|
|
// TODO: Consider propagating weight down the domination line as well.
|
|
|
|
for (const auto *DTNode = DTStartNode; DTNode != nullptr;
|
|
|
|
DTNode = DTNode->getIDom()) {
|
|
|
|
auto *DomBB = DTNode->getBlock();
|
|
|
|
// Consider blocks which lie on one 'line'.
|
|
|
|
if (!PDT->dominates(PDTStartNode, PDT->getNode(DomBB)))
|
|
|
|
// If BB doesn't post dominate DomBB it will not post dominate dominators
|
|
|
|
// of DomBB as well.
|
|
|
|
break;
|
2011-07-28 23:33:46 +02:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
LoopBlock DomLoopBB = getLoopBlock(DomBB);
|
|
|
|
const LoopEdge Edge{DomLoopBB, LoopBB};
|
|
|
|
// Don't propagate weight to blocks belonging to different loops.
|
|
|
|
if (!isLoopEnteringExitingEdge(Edge)) {
|
|
|
|
if (!updateEstimatedBlockWeight(DomLoopBB, BBWeight, BlockWorkList,
|
|
|
|
LoopWorkList))
|
|
|
|
// If DomBB has weight set then all it's predecessors are already
|
|
|
|
// processed (since we propagate weight up to the top of IR each time).
|
|
|
|
break;
|
|
|
|
} else if (isLoopExitingEdge(Edge)) {
|
|
|
|
LoopWorkList.push_back(DomLoopBB);
|
2017-11-01 16:16:50 +01:00
|
|
|
}
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
2020-06-18 11:20:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
Optional<uint32_t> BranchProbabilityInfo::getInitialEstimatedBlockWeight(
|
|
|
|
const BasicBlock *BB) {
|
|
|
|
// Returns true if \p BB has call marked with "NoReturn" attribute.
|
|
|
|
auto hasNoReturn = [&](const BasicBlock *BB) {
|
|
|
|
for (const auto &I : reverse(*BB))
|
|
|
|
if (const CallInst *CI = dyn_cast<CallInst>(&I))
|
|
|
|
if (CI->hasFnAttr(Attribute::NoReturn))
|
|
|
|
return true;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2014-04-14 18:56:19 +02:00
|
|
|
return false;
|
2020-06-18 11:20:55 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
// Important note regarding the order of checks. They are ordered by weight
|
|
|
|
// from lowest to highest. Doing that allows to avoid "unstable" results
|
|
|
|
// when several conditions heuristics can be applied simultaneously.
|
|
|
|
if (isa<UnreachableInst>(BB->getTerminator()) ||
|
|
|
|
// If this block is terminated by a call to
|
|
|
|
// @llvm.experimental.deoptimize then treat it like an unreachable
|
|
|
|
// since it is expected to practically never execute.
|
|
|
|
// TODO: Should we actually treat as never returning call?
|
|
|
|
BB->getTerminatingDeoptimizeCall())
|
|
|
|
return hasNoReturn(BB)
|
|
|
|
? static_cast<uint32_t>(BlockExecWeight::NORETURN)
|
|
|
|
: static_cast<uint32_t>(BlockExecWeight::UNREACHABLE);
|
|
|
|
|
|
|
|
// Check if the block is 'unwind' handler of some invoke instruction.
|
|
|
|
for (const auto *Pred : predecessors(BB))
|
|
|
|
if (Pred)
|
|
|
|
if (const auto *II = dyn_cast<InvokeInst>(Pred->getTerminator()))
|
|
|
|
if (II->getUnwindDest() == BB)
|
|
|
|
return static_cast<uint32_t>(BlockExecWeight::UNWIND);
|
|
|
|
|
|
|
|
// Check if the block contains 'cold' call.
|
|
|
|
for (const auto &I : *BB)
|
|
|
|
if (const CallInst *CI = dyn_cast<CallInst>(&I))
|
|
|
|
if (CI->hasFnAttr(Attribute::Cold))
|
|
|
|
return static_cast<uint32_t>(BlockExecWeight::COLD);
|
|
|
|
|
|
|
|
return None;
|
|
|
|
}
|
2014-04-14 18:56:19 +02:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
// Does RPO traversal over all blocks in \p F and assigns weights to
|
|
|
|
// 'unreachable', 'noreturn', 'cold', 'unwind' blocks. In addition it does its
|
|
|
|
// best to propagate the weight to up/down the IR.
|
|
|
|
void BranchProbabilityInfo::computeEestimateBlockWeight(
|
|
|
|
const Function &F, DominatorTree *DT, PostDominatorTree *PDT) {
|
|
|
|
SmallVector<BasicBlock *, 8> BlockWorkList;
|
|
|
|
SmallVector<LoopBlock, 8> LoopWorkList;
|
|
|
|
|
|
|
|
// By doing RPO we make sure that all predecessors already have weights
|
|
|
|
// calculated before visiting theirs successors.
|
|
|
|
ReversePostOrderTraversal<const Function *> RPOT(&F);
|
|
|
|
for (const auto *BB : RPOT)
|
|
|
|
if (auto BBWeight = getInitialEstimatedBlockWeight(BB))
|
|
|
|
// If we were able to find estimated weight for the block set it to this
|
|
|
|
// block and propagate up the IR.
|
|
|
|
propagateEstimatedBlockWeight(getLoopBlock(BB), DT, PDT,
|
|
|
|
BBWeight.getValue(), BlockWorkList,
|
|
|
|
LoopWorkList);
|
|
|
|
|
|
|
|
// BlockWorklist/LoopWorkList contains blocks/loops with at least one
|
|
|
|
// successor/exit having estimated weight. Try to propagate weight to such
|
|
|
|
// blocks/loops from successors/exits.
|
|
|
|
// Process loops and blocks. Order is not important.
|
|
|
|
do {
|
|
|
|
while (!LoopWorkList.empty()) {
|
|
|
|
const LoopBlock LoopBB = LoopWorkList.pop_back_val();
|
|
|
|
|
|
|
|
if (EstimatedLoopWeight.count(LoopBB.getLoopData()))
|
|
|
|
continue;
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
SmallVector<BasicBlock *, 4> Exits;
|
|
|
|
getLoopExitBlocks(LoopBB, Exits);
|
|
|
|
auto LoopWeight = getMaxEstimatedEdgeWeight(
|
|
|
|
LoopBB, make_range(Exits.begin(), Exits.end()));
|
2011-06-04 03:16:30 +02:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
if (LoopWeight) {
|
|
|
|
// If we never exit the loop then we can enter it once at maximum.
|
|
|
|
if (LoopWeight <= static_cast<uint32_t>(BlockExecWeight::UNREACHABLE))
|
|
|
|
LoopWeight = static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO);
|
|
|
|
|
|
|
|
EstimatedLoopWeight.insert(
|
|
|
|
{LoopBB.getLoopData(), LoopWeight.getValue()});
|
|
|
|
// Add all blocks entering the loop into working list.
|
|
|
|
getLoopEnterBlocks(LoopBB, BlockWorkList);
|
|
|
|
}
|
|
|
|
}
|
2011-07-28 23:33:46 +02:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
while (!BlockWorkList.empty()) {
|
|
|
|
// We can reach here only if BlockWorkList is not empty.
|
|
|
|
const BasicBlock *BB = BlockWorkList.pop_back_val();
|
|
|
|
if (EstimatedBlockWeight.count(BB))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// We take maximum over all weights of successors. In other words we take
|
|
|
|
// weight of "hot" path. In theory we can probably find a better function
|
|
|
|
// which gives higher accuracy results (comparing to "maximum") but I
|
|
|
|
// can't
|
|
|
|
// think of any right now. And I doubt it will make any difference in
|
|
|
|
// practice.
|
|
|
|
const LoopBlock LoopBB = getLoopBlock(BB);
|
|
|
|
auto MaxWeight = getMaxEstimatedEdgeWeight(LoopBB, successors(BB));
|
|
|
|
|
|
|
|
if (MaxWeight)
|
|
|
|
propagateEstimatedBlockWeight(LoopBB, DT, PDT, MaxWeight.getValue(),
|
|
|
|
BlockWorkList, LoopWorkList);
|
|
|
|
}
|
|
|
|
} while (!BlockWorkList.empty() || !LoopWorkList.empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
// Calculate edge probabilities based on block's estimated weight.
|
|
|
|
// Note that gathered weights were not scaled for loops. Thus edges entering
|
|
|
|
// and exiting loops requires special processing.
|
|
|
|
bool BranchProbabilityInfo::calcEstimatedHeuristics(const BasicBlock *BB) {
|
|
|
|
assert(BB->getTerminator()->getNumSuccessors() > 1 &&
|
|
|
|
"expected more than one successor!");
|
|
|
|
|
|
|
|
const LoopBlock LoopBB = getLoopBlock(BB);
|
|
|
|
|
|
|
|
SmallPtrSet<const BasicBlock *, 8> UnlikelyBlocks;
|
|
|
|
uint32_t TC = LBH_TAKEN_WEIGHT / LBH_NONTAKEN_WEIGHT;
|
|
|
|
if (LoopBB.getLoop())
|
|
|
|
computeUnlikelySuccessors(BB, LoopBB.getLoop(), UnlikelyBlocks);
|
|
|
|
|
|
|
|
// Changed to 'true' if at least one successor has estimated weight.
|
|
|
|
bool FoundEstimatedWeight = false;
|
|
|
|
SmallVector<uint32_t, 4> SuccWeights;
|
|
|
|
uint64_t TotalWeight = 0;
|
|
|
|
// Go over all successors of BB and put their weights into SuccWeights.
|
2021-02-06 20:17:09 +01:00
|
|
|
for (const BasicBlock *SuccBB : successors(BB)) {
|
2020-06-18 11:20:55 +02:00
|
|
|
Optional<uint32_t> Weight;
|
|
|
|
const LoopBlock SuccLoopBB = getLoopBlock(SuccBB);
|
|
|
|
const LoopEdge Edge{LoopBB, SuccLoopBB};
|
|
|
|
|
|
|
|
Weight = getEstimatedEdgeWeight(Edge);
|
|
|
|
|
|
|
|
if (isLoopExitingEdge(Edge) &&
|
|
|
|
// Avoid adjustment of ZERO weight since it should remain unchanged.
|
|
|
|
Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) {
|
|
|
|
// Scale down loop exiting weight by trip count.
|
|
|
|
Weight = std::max(
|
|
|
|
static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO),
|
|
|
|
Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) /
|
|
|
|
TC);
|
|
|
|
}
|
|
|
|
bool IsUnlikelyEdge = LoopBB.getLoop() && UnlikelyBlocks.contains(SuccBB);
|
|
|
|
if (IsUnlikelyEdge &&
|
|
|
|
// Avoid adjustment of ZERO weight since it should remain unchanged.
|
|
|
|
Weight != static_cast<uint32_t>(BlockExecWeight::ZERO)) {
|
|
|
|
// 'Unlikely' blocks have twice lower weight.
|
|
|
|
Weight = std::max(
|
|
|
|
static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO),
|
|
|
|
Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT)) /
|
|
|
|
2);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (Weight)
|
|
|
|
FoundEstimatedWeight = true;
|
|
|
|
|
|
|
|
auto WeightVal =
|
|
|
|
Weight.getValueOr(static_cast<uint32_t>(BlockExecWeight::DEFAULT));
|
|
|
|
TotalWeight += WeightVal;
|
|
|
|
SuccWeights.push_back(WeightVal);
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
2011-07-28 23:45:07 +02:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
// If non of blocks have estimated weight bail out.
|
|
|
|
// If TotalWeight is 0 that means weight of each successor is 0 as well and
|
|
|
|
// equally likely. Bail out early to not deal with devision by zero.
|
|
|
|
if (!FoundEstimatedWeight || TotalWeight == 0)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
assert(SuccWeights.size() == succ_size(BB) && "Missed successor?");
|
|
|
|
const unsigned SuccCount = SuccWeights.size();
|
|
|
|
|
|
|
|
// If the sum of weights does not fit in 32 bits, scale every weight down
|
|
|
|
// accordingly.
|
|
|
|
if (TotalWeight > UINT32_MAX) {
|
|
|
|
uint64_t ScalingFactor = TotalWeight / UINT32_MAX + 1;
|
|
|
|
TotalWeight = 0;
|
|
|
|
for (unsigned Idx = 0; Idx < SuccCount; ++Idx) {
|
|
|
|
SuccWeights[Idx] /= ScalingFactor;
|
|
|
|
if (SuccWeights[Idx] == static_cast<uint32_t>(BlockExecWeight::ZERO))
|
|
|
|
SuccWeights[Idx] =
|
|
|
|
static_cast<uint32_t>(BlockExecWeight::LOWEST_NON_ZERO);
|
|
|
|
TotalWeight += SuccWeights[Idx];
|
|
|
|
}
|
|
|
|
assert(TotalWeight <= UINT32_MAX && "Total weight overflows");
|
2018-02-23 18:17:31 +01:00
|
|
|
}
|
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
// Finally set probabilities to edges according to estimated block weights.
|
|
|
|
SmallVector<BranchProbability, 4> EdgeProbabilities(
|
|
|
|
SuccCount, BranchProbability::getUnknown());
|
|
|
|
|
|
|
|
for (unsigned Idx = 0; Idx < SuccCount; ++Idx) {
|
|
|
|
EdgeProbabilities[Idx] =
|
|
|
|
BranchProbability(SuccWeights[Idx], (uint32_t)TotalWeight);
|
|
|
|
}
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(BB, EdgeProbabilities);
|
2011-07-28 23:45:07 +02:00
|
|
|
return true;
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2020-08-17 20:42:57 +02:00
|
|
|
bool BranchProbabilityInfo::calcZeroHeuristics(const BasicBlock *BB,
|
2017-06-08 11:44:40 +02:00
|
|
|
const TargetLibraryInfo *TLI) {
|
2016-04-07 23:59:28 +02:00
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
2011-07-31 05:27:24 +02:00
|
|
|
if (!BI || !BI->isConditional())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
Value *Cond = BI->getCondition();
|
|
|
|
ICmpInst *CI = dyn_cast<ICmpInst>(Cond);
|
|
|
|
if (!CI)
|
|
|
|
return false;
|
|
|
|
|
2019-02-15 12:50:21 +01:00
|
|
|
auto GetConstantInt = [](Value *V) {
|
|
|
|
if (auto *I = dyn_cast<BitCastInst>(V))
|
|
|
|
return dyn_cast<ConstantInt>(I->getOperand(0));
|
|
|
|
return dyn_cast<ConstantInt>(V);
|
|
|
|
};
|
|
|
|
|
2011-07-31 05:27:24 +02:00
|
|
|
Value *RHS = CI->getOperand(1);
|
2019-02-15 12:50:21 +01:00
|
|
|
ConstantInt *CV = GetConstantInt(RHS);
|
2020-08-17 20:42:57 +02:00
|
|
|
if (!CV)
|
|
|
|
return false;
|
2011-07-31 05:27:24 +02:00
|
|
|
|
2015-04-15 08:24:07 +02:00
|
|
|
// If the LHS is the result of AND'ing a value with a single bit bitmask,
|
|
|
|
// we don't have information about probabilities.
|
|
|
|
if (Instruction *LHS = dyn_cast<Instruction>(CI->getOperand(0)))
|
|
|
|
if (LHS->getOpcode() == Instruction::And)
|
2020-11-13 20:17:47 +01:00
|
|
|
if (ConstantInt *AndRHS = GetConstantInt(LHS->getOperand(1)))
|
2017-08-04 18:59:29 +02:00
|
|
|
if (AndRHS->getValue().isPowerOf2())
|
2015-04-15 08:24:07 +02:00
|
|
|
return false;
|
|
|
|
|
2017-06-08 11:44:40 +02:00
|
|
|
// Check if the LHS is the return value of a library function
|
|
|
|
LibFunc Func = NumLibFuncs;
|
|
|
|
if (TLI)
|
|
|
|
if (CallInst *Call = dyn_cast<CallInst>(CI->getOperand(0)))
|
|
|
|
if (Function *CalledFn = Call->getCalledFunction())
|
|
|
|
TLI->getLibFunc(*CalledFn, Func);
|
|
|
|
|
2011-07-31 05:27:24 +02:00
|
|
|
bool isProb;
|
2017-06-08 11:44:40 +02:00
|
|
|
if (Func == LibFunc_strcasecmp ||
|
|
|
|
Func == LibFunc_strcmp ||
|
|
|
|
Func == LibFunc_strncasecmp ||
|
|
|
|
Func == LibFunc_strncmp ||
|
2020-08-11 20:42:58 +02:00
|
|
|
Func == LibFunc_memcmp ||
|
|
|
|
Func == LibFunc_bcmp) {
|
2017-06-08 11:44:40 +02:00
|
|
|
// strcmp and similar functions return zero, negative, or positive, if the
|
|
|
|
// first string is equal, less, or greater than the second. We consider it
|
|
|
|
// likely that the strings are not equal, so a comparison with zero is
|
|
|
|
// probably false, but also a comparison with any other number is also
|
|
|
|
// probably false given that what exactly is returned for nonzero values is
|
|
|
|
// not specified. Any kind of comparison other than equality we know
|
|
|
|
// nothing about.
|
|
|
|
switch (CI->getPredicate()) {
|
|
|
|
case CmpInst::ICMP_EQ:
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_NE:
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else if (CV->isZero()) {
|
2011-09-05 01:53:04 +02:00
|
|
|
switch (CI->getPredicate()) {
|
|
|
|
case CmpInst::ICMP_EQ:
|
|
|
|
// X == 0 -> Unlikely
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_NE:
|
|
|
|
// X != 0 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_SLT:
|
|
|
|
// X < 0 -> Unlikely
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_SGT:
|
|
|
|
// X > 0 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else if (CV->isOne() && CI->getPredicate() == CmpInst::ICMP_SLT) {
|
|
|
|
// InstCombine canonicalizes X <= 0 into X < 1.
|
|
|
|
// X <= 0 -> Unlikely
|
2011-07-31 06:47:20 +02:00
|
|
|
isProb = false;
|
2017-07-06 20:39:47 +02:00
|
|
|
} else if (CV->isMinusOne()) {
|
2013-11-01 11:58:22 +01:00
|
|
|
switch (CI->getPredicate()) {
|
|
|
|
case CmpInst::ICMP_EQ:
|
|
|
|
// X == -1 -> Unlikely
|
|
|
|
isProb = false;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_NE:
|
|
|
|
// X != -1 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
case CmpInst::ICMP_SGT:
|
|
|
|
// InstCombine canonicalizes X >= 0 into X > -1.
|
|
|
|
// X >= 0 -> Likely
|
|
|
|
isProb = true;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
2011-09-05 01:53:04 +02:00
|
|
|
} else {
|
2011-07-31 05:27:24 +02:00
|
|
|
return false;
|
2011-09-05 01:53:04 +02:00
|
|
|
}
|
2011-07-31 05:27:24 +02:00
|
|
|
|
2020-08-17 20:42:57 +02:00
|
|
|
BranchProbability TakenProb(ZH_TAKEN_WEIGHT,
|
|
|
|
ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT);
|
|
|
|
BranchProbability UntakenProb(ZH_NONTAKEN_WEIGHT,
|
|
|
|
ZH_TAKEN_WEIGHT + ZH_NONTAKEN_WEIGHT);
|
2020-05-21 06:49:11 +02:00
|
|
|
if (!isProb)
|
|
|
|
std::swap(TakenProb, UntakenProb);
|
|
|
|
|
|
|
|
setEdgeProbability(
|
|
|
|
BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb}));
|
2011-07-31 05:27:24 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-04-07 23:59:28 +02:00
|
|
|
bool BranchProbabilityInfo::calcFloatingPointHeuristics(const BasicBlock *BB) {
|
|
|
|
const BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator());
|
2011-10-21 22:12:47 +02:00
|
|
|
if (!BI || !BI->isConditional())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
Value *Cond = BI->getCondition();
|
|
|
|
FCmpInst *FCmp = dyn_cast<FCmpInst>(Cond);
|
2011-10-21 23:13:47 +02:00
|
|
|
if (!FCmp)
|
2011-10-21 22:12:47 +02:00
|
|
|
return false;
|
|
|
|
|
2019-09-10 19:25:11 +02:00
|
|
|
uint32_t TakenWeight = FPH_TAKEN_WEIGHT;
|
|
|
|
uint32_t NontakenWeight = FPH_NONTAKEN_WEIGHT;
|
2011-10-21 23:13:47 +02:00
|
|
|
bool isProb;
|
|
|
|
if (FCmp->isEquality()) {
|
|
|
|
// f1 == f2 -> Unlikely
|
|
|
|
// f1 != f2 -> Likely
|
|
|
|
isProb = !FCmp->isTrueWhenEqual();
|
|
|
|
} else if (FCmp->getPredicate() == FCmpInst::FCMP_ORD) {
|
|
|
|
// !isnan -> Likely
|
|
|
|
isProb = true;
|
2019-09-10 19:25:11 +02:00
|
|
|
TakenWeight = FPH_ORD_WEIGHT;
|
|
|
|
NontakenWeight = FPH_UNO_WEIGHT;
|
2011-10-21 23:13:47 +02:00
|
|
|
} else if (FCmp->getPredicate() == FCmpInst::FCMP_UNO) {
|
|
|
|
// isnan -> Unlikely
|
|
|
|
isProb = false;
|
2019-09-10 19:25:11 +02:00
|
|
|
TakenWeight = FPH_ORD_WEIGHT;
|
|
|
|
NontakenWeight = FPH_UNO_WEIGHT;
|
2011-10-21 23:13:47 +02:00
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
BranchProbability TakenProb(TakenWeight, TakenWeight + NontakenWeight);
|
|
|
|
BranchProbability UntakenProb(NontakenWeight, TakenWeight + NontakenWeight);
|
2011-10-21 23:13:47 +02:00
|
|
|
if (!isProb)
|
2020-05-21 06:49:11 +02:00
|
|
|
std::swap(TakenProb, UntakenProb);
|
2011-10-21 22:12:47 +02:00
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
setEdgeProbability(
|
|
|
|
BB, SmallVector<BranchProbability, 2>({TakenProb, UntakenProb}));
|
2011-10-21 22:12:47 +02:00
|
|
|
return true;
|
|
|
|
}
|
2011-07-31 05:27:24 +02:00
|
|
|
|
2015-05-28 21:43:06 +02:00
|
|
|
void BranchProbabilityInfo::releaseMemory() {
|
2015-12-22 19:56:14 +01:00
|
|
|
Probs.clear();
|
2020-04-07 21:21:30 +02:00
|
|
|
Handles.clear();
|
2015-05-28 21:43:06 +02:00
|
|
|
}
|
|
|
|
|
2020-01-15 23:02:33 +01:00
|
|
|
bool BranchProbabilityInfo::invalidate(Function &, const PreservedAnalyses &PA,
|
|
|
|
FunctionAnalysisManager::Invalidator &) {
|
|
|
|
// Check whether the analysis, all analyses on functions, or the function's
|
|
|
|
// CFG have been preserved.
|
|
|
|
auto PAC = PA.getChecker<BranchProbabilityAnalysis>();
|
|
|
|
return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>() ||
|
|
|
|
PAC.preservedSet<CFGAnalyses>());
|
|
|
|
}
|
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
void BranchProbabilityInfo::print(raw_ostream &OS) const {
|
2011-10-23 23:21:50 +02:00
|
|
|
OS << "---- Branch Probabilities ----\n";
|
|
|
|
// We print the probabilities from the last function the analysis ran over,
|
|
|
|
// or the function it is currently running over.
|
|
|
|
assert(LastF && "Cannot print prior to running over a function");
|
Analysis: Remove implicit ilist iterator conversions
Remove implicit ilist iterator conversions from LLVMAnalysis.
I came across something really scary in `llvm::isKnownNotFullPoison()`
which relied on `Instruction::getNextNode()` being completely broken
(not surprising, but scary nevertheless). This function is documented
(and coded to) return `nullptr` when it gets to the sentinel, but with
an `ilist_half_node` as a sentinel, the sentinel check looks into some
other memory and we don't recognize we've hit the end.
Rooting out these scary cases is the reason I'm removing the implicit
conversions before doing anything else with `ilist`; I'm not at all
surprised that clients rely on badness.
I found another scary case -- this time, not relying on badness, just
bad (but I guess getting lucky so far) -- in
`ObjectSizeOffsetEvaluator::compute_()`. Here, we save out the
insertion point, do some things, and then restore it. Previously, we
let the iterator auto-convert to `Instruction*`, and then set it back
using the `Instruction*` version:
Instruction *PrevInsertPoint = Builder.GetInsertPoint();
/* Logic that may change insert point */
if (PrevInsertPoint)
Builder.SetInsertPoint(PrevInsertPoint);
The check for `PrevInsertPoint` doesn't protect correctly against bad
accesses. If the insertion point has been set to the end of a basic
block (i.e., `SetInsertPoint(SomeBB)`), then `GetInsertPoint()` returns
an iterator pointing at the list sentinel. The version of
`SetInsertPoint()` that's getting called will then call
`PrevInsertPoint->getParent()`, which explodes horribly. The only
reason this hasn't blown up is that it's fairly unlikely the builder is
adding to the end of the block; usually, we're adding instructions
somewhere before the terminator.
llvm-svn: 249925
2015-10-10 02:53:03 +02:00
|
|
|
for (const auto &BI : *LastF) {
|
2021-02-06 20:17:09 +01:00
|
|
|
for (const BasicBlock *Succ : successors(&BI))
|
|
|
|
printEdgeProbability(OS << " ", &BI, Succ);
|
2014-07-21 19:06:51 +02:00
|
|
|
}
|
2011-10-23 23:21:50 +02:00
|
|
|
}
|
|
|
|
|
2011-07-29 21:30:00 +02:00
|
|
|
bool BranchProbabilityInfo::
|
|
|
|
isEdgeHot(const BasicBlock *Src, const BasicBlock *Dst) const {
|
2011-06-11 03:05:22 +02:00
|
|
|
// Hot probability is at least 4/5 = 80%
|
2011-10-23 13:19:14 +02:00
|
|
|
// FIXME: Compare against a static "hot" BranchProbability.
|
|
|
|
return getEdgeProbability(Src, Dst) > BranchProbability(4, 5);
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
/// Get the raw edge probability for the edge. If can't find it, return a
|
|
|
|
/// default probability 1/N where N is the number of successors. Here an edge is
|
|
|
|
/// specified using PredBlock and an
|
|
|
|
/// index to the successors.
|
|
|
|
BranchProbability
|
|
|
|
BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src,
|
|
|
|
unsigned IndexInSuccessors) const {
|
2020-11-11 04:17:13 +01:00
|
|
|
auto I = Probs.find(std::make_pair(Src, IndexInSuccessors));
|
|
|
|
assert((Probs.end() == Probs.find(std::make_pair(Src, 0))) ==
|
|
|
|
(Probs.end() == I) &&
|
|
|
|
"Probability for I-th successor must always be defined along with the "
|
|
|
|
"probability for the first successor");
|
|
|
|
|
|
|
|
if (I != Probs.end())
|
|
|
|
return I->second;
|
|
|
|
|
|
|
|
return {1, static_cast<uint32_t>(succ_size(Src))};
|
2011-06-04 03:16:30 +02:00
|
|
|
}
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
BranchProbability
|
|
|
|
BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src,
|
2020-03-10 19:33:02 +01:00
|
|
|
const_succ_iterator Dst) const {
|
2015-12-22 19:56:14 +01:00
|
|
|
return getEdgeProbability(Src, Dst.getSuccessorIndex());
|
2013-12-14 03:24:25 +01:00
|
|
|
}
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
/// Get the raw edge probability calculated for the block pair. This returns the
|
|
|
|
/// sum of all raw edge probabilities from Src to Dst.
|
|
|
|
BranchProbability
|
|
|
|
BranchProbabilityInfo::getEdgeProbability(const BasicBlock *Src,
|
|
|
|
const BasicBlock *Dst) const {
|
2020-11-11 04:17:13 +01:00
|
|
|
if (!Probs.count(std::make_pair(Src, 0)))
|
2020-11-07 07:47:22 +01:00
|
|
|
return BranchProbability(llvm::count(successors(Src), Dst), succ_size(Src));
|
|
|
|
|
2015-12-22 19:56:14 +01:00
|
|
|
auto Prob = BranchProbability::getZero();
|
2020-03-10 19:33:02 +01:00
|
|
|
for (const_succ_iterator I = succ_begin(Src), E = succ_end(Src); I != E; ++I)
|
2020-11-07 07:47:22 +01:00
|
|
|
if (*I == Dst)
|
2020-11-11 04:17:13 +01:00
|
|
|
Prob += Probs.find(std::make_pair(Src, I.getSuccessorIndex()))->second;
|
2020-11-07 07:47:22 +01:00
|
|
|
|
|
|
|
return Prob;
|
2012-08-24 20:14:27 +02:00
|
|
|
}
|
|
|
|
|
2020-05-21 06:49:11 +02:00
|
|
|
/// Set the edge probability for all edges at once.
|
|
|
|
void BranchProbabilityInfo::setEdgeProbability(
|
|
|
|
const BasicBlock *Src, const SmallVectorImpl<BranchProbability> &Probs) {
|
|
|
|
assert(Src->getTerminator()->getNumSuccessors() == Probs.size());
|
2020-11-06 06:16:19 +01:00
|
|
|
eraseBlock(Src); // Erase stale data if any.
|
2020-05-21 06:49:11 +02:00
|
|
|
if (Probs.size() == 0)
|
|
|
|
return; // Nothing to set.
|
|
|
|
|
2020-11-06 07:11:08 +01:00
|
|
|
Handles.insert(BasicBlockCallbackVH(Src, this));
|
2020-05-21 06:49:11 +02:00
|
|
|
uint64_t TotalNumerator = 0;
|
|
|
|
for (unsigned SuccIdx = 0; SuccIdx < Probs.size(); ++SuccIdx) {
|
2020-11-11 04:17:13 +01:00
|
|
|
this->Probs[std::make_pair(Src, SuccIdx)] = Probs[SuccIdx];
|
2020-11-06 06:16:19 +01:00
|
|
|
LLVM_DEBUG(dbgs() << "set edge " << Src->getName() << " -> " << SuccIdx
|
|
|
|
<< " successor probability to " << Probs[SuccIdx]
|
|
|
|
<< "\n");
|
2020-05-21 06:49:11 +02:00
|
|
|
TotalNumerator += Probs[SuccIdx].getNumerator();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Because of rounding errors the total probability cannot be checked to be
|
|
|
|
// 1.0 exactly. That is TotalNumerator == BranchProbability::getDenominator.
|
|
|
|
// Instead, every single probability in Probs must be as accurate as possible.
|
|
|
|
// This results in error 1/denominator at most, thus the total absolute error
|
|
|
|
// should be within Probs.size / BranchProbability::getDenominator.
|
|
|
|
assert(TotalNumerator <= BranchProbability::getDenominator() + Probs.size());
|
|
|
|
assert(TotalNumerator >= BranchProbability::getDenominator() - Probs.size());
|
2021-06-05 08:34:43 +02:00
|
|
|
(void)TotalNumerator;
|
2020-05-21 06:49:11 +02:00
|
|
|
}
|
|
|
|
|
2020-11-06 08:46:34 +01:00
|
|
|
void BranchProbabilityInfo::copyEdgeProbabilities(BasicBlock *Src,
|
|
|
|
BasicBlock *Dst) {
|
|
|
|
eraseBlock(Dst); // Erase stale data if any.
|
|
|
|
unsigned NumSuccessors = Src->getTerminator()->getNumSuccessors();
|
|
|
|
assert(NumSuccessors == Dst->getTerminator()->getNumSuccessors());
|
|
|
|
if (NumSuccessors == 0)
|
|
|
|
return; // Nothing to set.
|
2020-11-11 04:17:13 +01:00
|
|
|
if (this->Probs.find(std::make_pair(Src, 0)) == this->Probs.end())
|
2020-11-06 08:46:34 +01:00
|
|
|
return; // No probability is set for edges from Src. Keep the same for Dst.
|
|
|
|
|
|
|
|
Handles.insert(BasicBlockCallbackVH(Dst, this));
|
2020-11-11 04:17:13 +01:00
|
|
|
for (unsigned SuccIdx = 0; SuccIdx < NumSuccessors; ++SuccIdx) {
|
|
|
|
auto Prob = this->Probs[std::make_pair(Src, SuccIdx)];
|
|
|
|
this->Probs[std::make_pair(Dst, SuccIdx)] = Prob;
|
2020-11-06 08:46:34 +01:00
|
|
|
LLVM_DEBUG(dbgs() << "set edge " << Dst->getName() << " -> " << SuccIdx
|
2020-11-11 04:17:13 +01:00
|
|
|
<< " successor probability to " << Prob << "\n");
|
|
|
|
}
|
2020-11-06 08:46:34 +01:00
|
|
|
}
|
|
|
|
|
2011-06-11 03:05:22 +02:00
|
|
|
raw_ostream &
|
2011-10-23 23:21:50 +02:00
|
|
|
BranchProbabilityInfo::printEdgeProbability(raw_ostream &OS,
|
|
|
|
const BasicBlock *Src,
|
|
|
|
const BasicBlock *Dst) const {
|
2011-06-16 22:22:37 +02:00
|
|
|
const BranchProbability Prob = getEdgeProbability(Src, Dst);
|
2011-11-15 17:27:03 +01:00
|
|
|
OS << "edge " << Src->getName() << " -> " << Dst->getName()
|
2011-06-11 03:05:22 +02:00
|
|
|
<< " probability is " << Prob
|
|
|
|
<< (isEdgeHot(Src, Dst) ? " [HOT edge]\n" : "\n");
|
2011-06-04 03:16:30 +02:00
|
|
|
|
|
|
|
return OS;
|
|
|
|
}
|
2015-07-16 00:48:29 +02:00
|
|
|
|
2016-07-15 16:31:16 +02:00
|
|
|
void BranchProbabilityInfo::eraseBlock(const BasicBlock *BB) {
|
2020-11-16 07:29:30 +01:00
|
|
|
LLVM_DEBUG(dbgs() << "eraseBlock " << BB->getName() << "\n");
|
|
|
|
|
2020-11-11 04:17:13 +01:00
|
|
|
// Note that we cannot use successors of BB because the terminator of BB may
|
|
|
|
// have changed when eraseBlock is called as a BasicBlockCallbackVH callback.
|
|
|
|
// Instead we remove prob data for the block by iterating successors by their
|
|
|
|
// indices from 0 till the last which exists. There could not be prob data for
|
|
|
|
// a pair (BB, N) if there is no data for (BB, N-1) because the data is always
|
|
|
|
// set for all successors from 0 to M at once by the method
|
|
|
|
// setEdgeProbability().
|
2020-11-06 07:11:08 +01:00
|
|
|
Handles.erase(BasicBlockCallbackVH(BB, this));
|
2020-11-11 04:17:13 +01:00
|
|
|
for (unsigned I = 0;; ++I) {
|
|
|
|
auto MapI = Probs.find(std::make_pair(BB, I));
|
|
|
|
if (MapI == Probs.end()) {
|
|
|
|
assert(Probs.count(std::make_pair(BB, I + 1)) == 0 &&
|
|
|
|
"Must be no more successors");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
Probs.erase(MapI);
|
|
|
|
}
|
2016-07-15 16:31:16 +02:00
|
|
|
}
|
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
void BranchProbabilityInfo::calculate(const Function &F, const LoopInfo &LoopI,
|
2020-04-28 11:31:20 +02:00
|
|
|
const TargetLibraryInfo *TLI,
|
2020-06-18 11:20:55 +02:00
|
|
|
DominatorTree *DT,
|
2020-04-28 11:31:20 +02:00
|
|
|
PostDominatorTree *PDT) {
|
2018-05-14 14:53:11 +02:00
|
|
|
LLVM_DEBUG(dbgs() << "---- Branch Probability Info : " << F.getName()
|
|
|
|
<< " ----\n\n");
|
2015-07-16 00:48:29 +02:00
|
|
|
LastF = &F; // Store the last function we ran on for printing.
|
2020-06-18 11:20:55 +02:00
|
|
|
LI = &LoopI;
|
2015-07-16 00:48:29 +02:00
|
|
|
|
2020-07-24 13:57:10 +02:00
|
|
|
SccI = std::make_unique<SccInfo>(F);
|
2017-11-01 16:16:50 +01:00
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
assert(EstimatedBlockWeight.empty());
|
|
|
|
assert(EstimatedLoopWeight.empty());
|
|
|
|
|
|
|
|
std::unique_ptr<DominatorTree> DTPtr;
|
2020-04-28 11:31:20 +02:00
|
|
|
std::unique_ptr<PostDominatorTree> PDTPtr;
|
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
if (!DT) {
|
|
|
|
DTPtr = std::make_unique<DominatorTree>(const_cast<Function &>(F));
|
|
|
|
DT = DTPtr.get();
|
|
|
|
}
|
|
|
|
|
2020-04-28 11:31:20 +02:00
|
|
|
if (!PDT) {
|
|
|
|
PDTPtr = std::make_unique<PostDominatorTree>(const_cast<Function &>(F));
|
|
|
|
PDT = PDTPtr.get();
|
|
|
|
}
|
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
computeEestimateBlockWeight(F, DT, PDT);
|
2019-12-02 19:15:22 +01:00
|
|
|
|
2015-07-16 00:48:29 +02:00
|
|
|
// Walk the basic blocks in post-order so that we can build up state about
|
|
|
|
// the successors of a block iteratively.
|
|
|
|
for (auto BB : post_order(&F.getEntryBlock())) {
|
2018-05-14 14:53:11 +02:00
|
|
|
LLVM_DEBUG(dbgs() << "Computing probabilities for " << BB->getName()
|
|
|
|
<< "\n");
|
2017-04-17 08:39:47 +02:00
|
|
|
// If there is no at least two successors, no sense to set probability.
|
|
|
|
if (BB->getTerminator()->getNumSuccessors() < 2)
|
|
|
|
continue;
|
2015-07-16 00:48:29 +02:00
|
|
|
if (calcMetadataWeights(BB))
|
|
|
|
continue;
|
2020-06-18 11:20:55 +02:00
|
|
|
if (calcEstimatedHeuristics(BB))
|
2015-07-16 00:48:29 +02:00
|
|
|
continue;
|
|
|
|
if (calcPointerHeuristics(BB))
|
|
|
|
continue;
|
2020-08-17 20:42:57 +02:00
|
|
|
if (calcZeroHeuristics(BB, TLI))
|
2015-07-16 00:48:29 +02:00
|
|
|
continue;
|
|
|
|
if (calcFloatingPointHeuristics(BB))
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2020-06-18 11:20:55 +02:00
|
|
|
EstimatedLoopWeight.clear();
|
|
|
|
EstimatedBlockWeight.clear();
|
2020-07-28 14:50:40 +02:00
|
|
|
SccI.reset();
|
2017-08-26 02:31:00 +02:00
|
|
|
|
|
|
|
if (PrintBranchProb &&
|
|
|
|
(PrintBranchProbFuncName.empty() ||
|
|
|
|
F.getName().equals(PrintBranchProbFuncName))) {
|
|
|
|
print(dbgs());
|
|
|
|
}
|
2015-07-16 00:48:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfoWrapperPass::getAnalysisUsage(
|
|
|
|
AnalysisUsage &AU) const {
|
2018-05-17 11:05:40 +02:00
|
|
|
// We require DT so it's available when LI is available. The LI updating code
|
|
|
|
// asserts that DT is also present so if we don't make sure that we have DT
|
|
|
|
// here, that assert will trigger.
|
|
|
|
AU.addRequired<DominatorTreeWrapperPass>();
|
2015-07-16 00:48:29 +02:00
|
|
|
AU.addRequired<LoopInfoWrapperPass>();
|
2017-06-08 11:44:40 +02:00
|
|
|
AU.addRequired<TargetLibraryInfoWrapperPass>();
|
2020-06-18 11:20:55 +02:00
|
|
|
AU.addRequired<DominatorTreeWrapperPass>();
|
2020-04-28 11:31:20 +02:00
|
|
|
AU.addRequired<PostDominatorTreeWrapperPass>();
|
2015-07-16 00:48:29 +02:00
|
|
|
AU.setPreservesAll();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool BranchProbabilityInfoWrapperPass::runOnFunction(Function &F) {
|
|
|
|
const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
|
Change TargetLibraryInfo analysis passes to always require Function
Summary:
This is the first change to enable the TLI to be built per-function so
that -fno-builtin* handling can be migrated to use function attributes.
See discussion on D61634 for background. This is an enabler for fixing
handling of these options for LTO, for example.
This change should not affect behavior, as the provided function is not
yet used to build a specifically per-function TLI, but rather enables
that migration.
Most of the changes were very mechanical, e.g. passing a Function to the
legacy analysis pass's getTLI interface, or in Module level cases,
adding a callback. This is similar to the way the per-function TTI
analysis works.
There was one place where we were looking for builtins but not in the
context of a specific function. See FindCXAAtExit in
lib/Transforms/IPO/GlobalOpt.cpp. I'm somewhat concerned my workaround
could provide the wrong behavior in some corner cases. Suggestions
welcome.
Reviewers: chandlerc, hfinkel
Subscribers: arsenm, dschuff, jvesely, nhaehnle, mehdi_amini, javed.absar, sbc100, jgravelle-google, eraman, aheejin, steven_wu, george.burgess.iv, dexonsmith, jfb, asbirlea, gchatelet, llvm-commits
Tags: #llvm
Differential Revision: https://reviews.llvm.org/D66428
llvm-svn: 371284
2019-09-07 05:09:36 +02:00
|
|
|
const TargetLibraryInfo &TLI =
|
|
|
|
getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
|
2020-06-18 11:20:55 +02:00
|
|
|
DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
|
2020-04-28 11:31:20 +02:00
|
|
|
PostDominatorTree &PDT =
|
|
|
|
getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree();
|
2020-06-18 11:20:55 +02:00
|
|
|
BPI.calculate(F, LI, &TLI, &DT, &PDT);
|
2015-07-16 00:48:29 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void BranchProbabilityInfoWrapperPass::releaseMemory() { BPI.releaseMemory(); }
|
|
|
|
|
|
|
|
void BranchProbabilityInfoWrapperPass::print(raw_ostream &OS,
|
|
|
|
const Module *) const {
|
|
|
|
BPI.print(OS);
|
|
|
|
}
|
2016-05-05 04:59:57 +02:00
|
|
|
|
2016-11-23 18:53:26 +01:00
|
|
|
AnalysisKey BranchProbabilityAnalysis::Key;
|
2016-05-05 04:59:57 +02:00
|
|
|
BranchProbabilityInfo
|
2016-08-09 02:28:15 +02:00
|
|
|
BranchProbabilityAnalysis::run(Function &F, FunctionAnalysisManager &AM) {
|
2016-05-05 04:59:57 +02:00
|
|
|
BranchProbabilityInfo BPI;
|
2020-04-28 11:31:20 +02:00
|
|
|
BPI.calculate(F, AM.getResult<LoopAnalysis>(F),
|
|
|
|
&AM.getResult<TargetLibraryAnalysis>(F),
|
2020-06-18 11:20:55 +02:00
|
|
|
&AM.getResult<DominatorTreeAnalysis>(F),
|
2020-04-28 11:31:20 +02:00
|
|
|
&AM.getResult<PostDominatorTreeAnalysis>(F));
|
2016-05-05 04:59:57 +02:00
|
|
|
return BPI;
|
|
|
|
}
|
|
|
|
|
|
|
|
PreservedAnalyses
|
2016-08-09 02:28:15 +02:00
|
|
|
BranchProbabilityPrinterPass::run(Function &F, FunctionAnalysisManager &AM) {
|
2016-05-05 04:59:57 +02:00
|
|
|
OS << "Printing analysis results of BPI for function "
|
|
|
|
<< "'" << F.getName() << "':"
|
|
|
|
<< "\n";
|
|
|
|
AM.getResult<BranchProbabilityAnalysis>(F).print(OS);
|
|
|
|
return PreservedAnalyses::all();
|
|
|
|
}
|