1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2025-02-01 05:01:59 +01:00

Cache the DataLayout rather than looking it up frequently.

llvm-svn: 300393
This commit is contained in:
Eric Christopher 2017-04-15 06:14:50 +00:00
parent f7f1e6f3b0
commit bdb7a110e3

View File

@ -87,6 +87,9 @@ class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
/// The called function. /// The called function.
Function &F; Function &F;
// Cache the DataLayout since we use it a lot.
const DataLayout &DL;
/// The candidate callsite being analyzed. Please do not use this to do /// The candidate callsite being analyzed. Please do not use this to do
/// analysis in the caller function; we want the inline cost query to be /// analysis in the caller function; we want the inline cost query to be
/// easily cacheable. Instead, use the cover function paramHasAttr. /// easily cacheable. Instead, use the cover function paramHasAttr.
@ -217,17 +220,17 @@ public:
ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg, ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg,
const InlineParams &Params) const InlineParams &Params)
: TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI), : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
PSI(PSI), F(Callee), CandidateCS(CSArg), Params(Params), PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()),
Threshold(Params.DefaultThreshold), Cost(0), IsCallerRecursive(false), CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
IsRecursiveCall(false), ExposesReturnsTwice(false), Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
HasDynamicAlloca(false), ContainsNoDuplicateCall(false), ExposesReturnsTwice(false), HasDynamicAlloca(false),
HasReturn(false), HasIndirectBr(false), HasFrameEscape(false), ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
AllocatedSize(0), NumInstructions(0), NumVectorInstructions(0), HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
FiftyPercentVectorBonus(0), TenPercentVectorBonus(0), VectorBonus(0), NumVectorInstructions(0), FiftyPercentVectorBonus(0),
NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
NumConstantPtrCmps(0), NumConstantPtrDiffs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
NumInstructionsSimplified(0), SROACostSavings(0), NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
SROACostSavingsLost(0) {} SROACostSavings(0), SROACostSavingsLost(0) {}
bool analyzeCall(CallSite CS); bool analyzeCall(CallSite CS);
@ -304,7 +307,6 @@ void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
/// Returns false if unable to compute the offset for any reason. Respects any /// Returns false if unable to compute the offset for any reason. Respects any
/// simplified values known during the analysis of this callsite. /// simplified values known during the analysis of this callsite.
bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) { bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
const DataLayout &DL = F.getParent()->getDataLayout();
unsigned IntPtrWidth = DL.getPointerSizeInBits(); unsigned IntPtrWidth = DL.getPointerSizeInBits();
assert(IntPtrWidth == Offset.getBitWidth()); assert(IntPtrWidth == Offset.getBitWidth());
@ -354,7 +356,6 @@ bool CallAnalyzer::visitAlloca(AllocaInst &I) {
if (I.isArrayAllocation()) { if (I.isArrayAllocation()) {
Constant *Size = SimplifiedValues.lookup(I.getArraySize()); Constant *Size = SimplifiedValues.lookup(I.getArraySize());
if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) { if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
const DataLayout &DL = F.getParent()->getDataLayout();
Type *Ty = I.getAllocatedType(); Type *Ty = I.getAllocatedType();
AllocatedSize = SaturatingMultiplyAdd( AllocatedSize = SaturatingMultiplyAdd(
AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize); AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
@ -364,7 +365,6 @@ bool CallAnalyzer::visitAlloca(AllocaInst &I) {
// Accumulate the allocated size. // Accumulate the allocated size.
if (I.isStaticAlloca()) { if (I.isStaticAlloca()) {
const DataLayout &DL = F.getParent()->getDataLayout();
Type *Ty = I.getAllocatedType(); Type *Ty = I.getAllocatedType();
AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize); AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
} }
@ -505,7 +505,6 @@ bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
// Track base/offset pairs when converted to a plain integer provided the // Track base/offset pairs when converted to a plain integer provided the
// integer is large enough to represent the pointer. // integer is large enough to represent the pointer.
unsigned IntegerSize = I.getType()->getScalarSizeInBits(); unsigned IntegerSize = I.getType()->getScalarSizeInBits();
const DataLayout &DL = F.getParent()->getDataLayout();
if (IntegerSize >= DL.getPointerSizeInBits()) { if (IntegerSize >= DL.getPointerSizeInBits()) {
std::pair<Value *, APInt> BaseAndOffset = std::pair<Value *, APInt> BaseAndOffset =
ConstantOffsetPtrs.lookup(I.getOperand(0)); ConstantOffsetPtrs.lookup(I.getOperand(0));
@ -539,7 +538,6 @@ bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
// modifications provided the integer is not too large. // modifications provided the integer is not too large.
Value *Op = I.getOperand(0); Value *Op = I.getOperand(0);
unsigned IntegerSize = Op->getType()->getScalarSizeInBits(); unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
const DataLayout &DL = F.getParent()->getDataLayout();
if (IntegerSize <= DL.getPointerSizeInBits()) { if (IntegerSize <= DL.getPointerSizeInBits()) {
std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op); std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
if (BaseAndOffset.first) if (BaseAndOffset.first)
@ -571,7 +569,6 @@ bool CallAnalyzer::visitCastInst(CastInst &I) {
bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) { bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
Value *Operand = I.getOperand(0); Value *Operand = I.getOperand(0);
if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) { if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
const DataLayout &DL = F.getParent()->getDataLayout();
return ConstantFoldInstOperands(&I, COps[0], DL); return ConstantFoldInstOperands(&I, COps[0], DL);
})) }))
return true; return true;
@ -777,7 +774,6 @@ bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
Value *LHS = I.getOperand(0), *RHS = I.getOperand(1); Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
auto Evaluate = [&](SmallVectorImpl<Constant *> &COps) { auto Evaluate = [&](SmallVectorImpl<Constant *> &COps) {
Value *SimpleV = nullptr; Value *SimpleV = nullptr;
const DataLayout &DL = F.getParent()->getDataLayout();
if (auto FI = dyn_cast<FPMathOperator>(&I)) if (auto FI = dyn_cast<FPMathOperator>(&I))
SimpleV = SimplifyFPBinOp(I.getOpcode(), COps[0], COps[1], SimpleV = SimplifyFPBinOp(I.getOpcode(), COps[0], COps[1],
FI->getFastMathFlags(), DL); FI->getFastMathFlags(), DL);
@ -1162,7 +1158,6 @@ ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
if (!V->getType()->isPointerTy()) if (!V->getType()->isPointerTy())
return nullptr; return nullptr;
const DataLayout &DL = F.getParent()->getDataLayout();
unsigned IntPtrWidth = DL.getPointerSizeInBits(); unsigned IntPtrWidth = DL.getPointerSizeInBits();
APInt Offset = APInt::getNullValue(IntPtrWidth); APInt Offset = APInt::getNullValue(IntPtrWidth);
@ -1219,7 +1214,6 @@ bool CallAnalyzer::analyzeCall(CallSite CS) {
FiftyPercentVectorBonus = 3 * Threshold / 2; FiftyPercentVectorBonus = 3 * Threshold / 2;
TenPercentVectorBonus = 3 * Threshold / 4; TenPercentVectorBonus = 3 * Threshold / 4;
const DataLayout &DL = F.getParent()->getDataLayout();
// Track whether the post-inlining function would have more than one basic // Track whether the post-inlining function would have more than one basic
// block. A single basic block is often intended for inlining. Balloon the // block. A single basic block is often intended for inlining. Balloon the