mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-26 12:43:36 +01:00
[CallSite removal] Migrate ConstantFolding APIs and implementation to
`CallBase`. Users have been updated. You can see how to update any out-of-tree usages: pass `cast<CallBase>(CS.getInstruction())`. llvm-svn: 353661
This commit is contained in:
parent
eaf4cbb2b3
commit
25dd71753a
@ -22,7 +22,7 @@
|
|||||||
namespace llvm {
|
namespace llvm {
|
||||||
class APInt;
|
class APInt;
|
||||||
template <typename T> class ArrayRef;
|
template <typename T> class ArrayRef;
|
||||||
class CallSite;
|
class CallBase;
|
||||||
class Constant;
|
class Constant;
|
||||||
class ConstantExpr;
|
class ConstantExpr;
|
||||||
class ConstantVector;
|
class ConstantVector;
|
||||||
@ -30,7 +30,6 @@ class DataLayout;
|
|||||||
class Function;
|
class Function;
|
||||||
class GlobalValue;
|
class GlobalValue;
|
||||||
class Instruction;
|
class Instruction;
|
||||||
class ImmutableCallSite;
|
|
||||||
class TargetLibraryInfo;
|
class TargetLibraryInfo;
|
||||||
class Type;
|
class Type;
|
||||||
|
|
||||||
@ -138,11 +137,11 @@ Constant *ConstantFoldLoadThroughGEPIndices(Constant *C,
|
|||||||
|
|
||||||
/// canConstantFoldCallTo - Return true if its even possible to fold a call to
|
/// canConstantFoldCallTo - Return true if its even possible to fold a call to
|
||||||
/// the specified function.
|
/// the specified function.
|
||||||
bool canConstantFoldCallTo(ImmutableCallSite CS, const Function *F);
|
bool canConstantFoldCallTo(const CallBase *Call, const Function *F);
|
||||||
|
|
||||||
/// ConstantFoldCall - Attempt to constant fold a call to the specified function
|
/// ConstantFoldCall - Attempt to constant fold a call to the specified function
|
||||||
/// with the specified arguments, returning null if unsuccessful.
|
/// with the specified arguments, returning null if unsuccessful.
|
||||||
Constant *ConstantFoldCall(ImmutableCallSite CS, Function *F,
|
Constant *ConstantFoldCall(const CallBase *Call, Function *F,
|
||||||
ArrayRef<Constant *> Operands,
|
ArrayRef<Constant *> Operands,
|
||||||
const TargetLibraryInfo *TLI = nullptr);
|
const TargetLibraryInfo *TLI = nullptr);
|
||||||
|
|
||||||
@ -154,7 +153,7 @@ Constant *ConstantFoldLoadThroughBitcast(Constant *C, Type *DestTy,
|
|||||||
|
|
||||||
/// Check whether the given call has no side-effects.
|
/// Check whether the given call has no side-effects.
|
||||||
/// Specifically checks for math routimes which sometimes set errno.
|
/// Specifically checks for math routimes which sometimes set errno.
|
||||||
bool isMathLibCallNoop(CallSite CS, const TargetLibraryInfo *TLI);
|
bool isMathLibCallNoop(const CallBase *Call, const TargetLibraryInfo *TLI);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -1024,9 +1024,9 @@ Constant *ConstantFoldInstOperandsImpl(const Value *InstOrCE, unsigned Opcode,
|
|||||||
case Instruction::FCmp: llvm_unreachable("Invalid for compares");
|
case Instruction::FCmp: llvm_unreachable("Invalid for compares");
|
||||||
case Instruction::Call:
|
case Instruction::Call:
|
||||||
if (auto *F = dyn_cast<Function>(Ops.back())) {
|
if (auto *F = dyn_cast<Function>(Ops.back())) {
|
||||||
ImmutableCallSite CS(cast<CallInst>(InstOrCE));
|
const auto *Call = cast<CallBase>(InstOrCE);
|
||||||
if (canConstantFoldCallTo(CS, F))
|
if (canConstantFoldCallTo(Call, F))
|
||||||
return ConstantFoldCall(CS, F, Ops.slice(0, Ops.size() - 1), TLI);
|
return ConstantFoldCall(Call, F, Ops.slice(0, Ops.size() - 1), TLI);
|
||||||
}
|
}
|
||||||
return nullptr;
|
return nullptr;
|
||||||
case Instruction::Select:
|
case Instruction::Select:
|
||||||
@ -1366,8 +1366,8 @@ llvm::ConstantFoldLoadThroughGEPIndices(Constant *C,
|
|||||||
// Constant Folding for Calls
|
// Constant Folding for Calls
|
||||||
//
|
//
|
||||||
|
|
||||||
bool llvm::canConstantFoldCallTo(ImmutableCallSite CS, const Function *F) {
|
bool llvm::canConstantFoldCallTo(const CallBase *Call, const Function *F) {
|
||||||
if (CS.isNoBuiltin() || CS.isStrictFP())
|
if (Call->isNoBuiltin() || Call->isStrictFP())
|
||||||
return false;
|
return false;
|
||||||
switch (F->getIntrinsicID()) {
|
switch (F->getIntrinsicID()) {
|
||||||
case Intrinsic::fabs:
|
case Intrinsic::fabs:
|
||||||
@ -1643,7 +1643,7 @@ static bool getConstIntOrUndef(Value *Op, const APInt *&C) {
|
|||||||
Constant *ConstantFoldScalarCall(StringRef Name, unsigned IntrinsicID, Type *Ty,
|
Constant *ConstantFoldScalarCall(StringRef Name, unsigned IntrinsicID, Type *Ty,
|
||||||
ArrayRef<Constant *> Operands,
|
ArrayRef<Constant *> Operands,
|
||||||
const TargetLibraryInfo *TLI,
|
const TargetLibraryInfo *TLI,
|
||||||
ImmutableCallSite CS) {
|
const CallBase *Call) {
|
||||||
if (Operands.size() == 1) {
|
if (Operands.size() == 1) {
|
||||||
if (IntrinsicID == Intrinsic::is_constant) {
|
if (IntrinsicID == Intrinsic::is_constant) {
|
||||||
// We know we have a "Constant" argument. But we want to only
|
// We know we have a "Constant" argument. But we want to only
|
||||||
@ -1671,9 +1671,10 @@ Constant *ConstantFoldScalarCall(StringRef Name, unsigned IntrinsicID, Type *Ty,
|
|||||||
if (IntrinsicID == Intrinsic::launder_invariant_group ||
|
if (IntrinsicID == Intrinsic::launder_invariant_group ||
|
||||||
IntrinsicID == Intrinsic::strip_invariant_group) {
|
IntrinsicID == Intrinsic::strip_invariant_group) {
|
||||||
// If instruction is not yet put in a basic block (e.g. when cloning
|
// If instruction is not yet put in a basic block (e.g. when cloning
|
||||||
// a function during inlining), CS caller may not be available.
|
// a function during inlining), Call's caller may not be available.
|
||||||
// So check CS's BB first before querying CS.getCaller.
|
// So check Call's BB first before querying Call->getCaller.
|
||||||
const Function *Caller = CS.getParent() ? CS.getCaller() : nullptr;
|
const Function *Caller =
|
||||||
|
Call->getParent() ? Call->getCaller() : nullptr;
|
||||||
if (Caller &&
|
if (Caller &&
|
||||||
!NullPointerIsDefined(
|
!NullPointerIsDefined(
|
||||||
Caller, Operands[0]->getType()->getPointerAddressSpace())) {
|
Caller, Operands[0]->getType()->getPointerAddressSpace())) {
|
||||||
@ -2215,7 +2216,7 @@ Constant *ConstantFoldVectorCall(StringRef Name, unsigned IntrinsicID,
|
|||||||
VectorType *VTy, ArrayRef<Constant *> Operands,
|
VectorType *VTy, ArrayRef<Constant *> Operands,
|
||||||
const DataLayout &DL,
|
const DataLayout &DL,
|
||||||
const TargetLibraryInfo *TLI,
|
const TargetLibraryInfo *TLI,
|
||||||
ImmutableCallSite CS) {
|
const CallBase *Call) {
|
||||||
SmallVector<Constant *, 4> Result(VTy->getNumElements());
|
SmallVector<Constant *, 4> Result(VTy->getNumElements());
|
||||||
SmallVector<Constant *, 4> Lane(Operands.size());
|
SmallVector<Constant *, 4> Lane(Operands.size());
|
||||||
Type *Ty = VTy->getElementType();
|
Type *Ty = VTy->getElementType();
|
||||||
@ -2278,7 +2279,8 @@ Constant *ConstantFoldVectorCall(StringRef Name, unsigned IntrinsicID,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Use the regular scalar folding to simplify this column.
|
// Use the regular scalar folding to simplify this column.
|
||||||
Constant *Folded = ConstantFoldScalarCall(Name, IntrinsicID, Ty, Lane, TLI, CS);
|
Constant *Folded =
|
||||||
|
ConstantFoldScalarCall(Name, IntrinsicID, Ty, Lane, TLI, Call);
|
||||||
if (!Folded)
|
if (!Folded)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
Result[I] = Folded;
|
Result[I] = Folded;
|
||||||
@ -2289,11 +2291,10 @@ Constant *ConstantFoldVectorCall(StringRef Name, unsigned IntrinsicID,
|
|||||||
|
|
||||||
} // end anonymous namespace
|
} // end anonymous namespace
|
||||||
|
|
||||||
Constant *
|
Constant *llvm::ConstantFoldCall(const CallBase *Call, Function *F,
|
||||||
llvm::ConstantFoldCall(ImmutableCallSite CS, Function *F,
|
|
||||||
ArrayRef<Constant *> Operands,
|
ArrayRef<Constant *> Operands,
|
||||||
const TargetLibraryInfo *TLI) {
|
const TargetLibraryInfo *TLI) {
|
||||||
if (CS.isNoBuiltin() || CS.isStrictFP())
|
if (Call->isNoBuiltin() || Call->isStrictFP())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
if (!F->hasName())
|
if (!F->hasName())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
@ -2303,17 +2304,19 @@ llvm::ConstantFoldCall(ImmutableCallSite CS, Function *F,
|
|||||||
|
|
||||||
if (auto *VTy = dyn_cast<VectorType>(Ty))
|
if (auto *VTy = dyn_cast<VectorType>(Ty))
|
||||||
return ConstantFoldVectorCall(Name, F->getIntrinsicID(), VTy, Operands,
|
return ConstantFoldVectorCall(Name, F->getIntrinsicID(), VTy, Operands,
|
||||||
F->getParent()->getDataLayout(), TLI, CS);
|
F->getParent()->getDataLayout(), TLI, Call);
|
||||||
|
|
||||||
return ConstantFoldScalarCall(Name, F->getIntrinsicID(), Ty, Operands, TLI, CS);
|
return ConstantFoldScalarCall(Name, F->getIntrinsicID(), Ty, Operands, TLI,
|
||||||
|
Call);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool llvm::isMathLibCallNoop(CallSite CS, const TargetLibraryInfo *TLI) {
|
bool llvm::isMathLibCallNoop(const CallBase *Call,
|
||||||
|
const TargetLibraryInfo *TLI) {
|
||||||
// FIXME: Refactor this code; this duplicates logic in LibCallsShrinkWrap
|
// FIXME: Refactor this code; this duplicates logic in LibCallsShrinkWrap
|
||||||
// (and to some extent ConstantFoldScalarCall).
|
// (and to some extent ConstantFoldScalarCall).
|
||||||
if (CS.isNoBuiltin() || CS.isStrictFP())
|
if (Call->isNoBuiltin() || Call->isStrictFP())
|
||||||
return false;
|
return false;
|
||||||
Function *F = CS.getCalledFunction();
|
Function *F = Call->getCalledFunction();
|
||||||
if (!F)
|
if (!F)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
@ -2321,8 +2324,8 @@ bool llvm::isMathLibCallNoop(CallSite CS, const TargetLibraryInfo *TLI) {
|
|||||||
if (!TLI || !TLI->getLibFunc(*F, Func))
|
if (!TLI || !TLI->getLibFunc(*F, Func))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (CS.getNumArgOperands() == 1) {
|
if (Call->getNumArgOperands() == 1) {
|
||||||
if (ConstantFP *OpC = dyn_cast<ConstantFP>(CS.getArgOperand(0))) {
|
if (ConstantFP *OpC = dyn_cast<ConstantFP>(Call->getArgOperand(0))) {
|
||||||
const APFloat &Op = OpC->getValueAPF();
|
const APFloat &Op = OpC->getValueAPF();
|
||||||
switch (Func) {
|
switch (Func) {
|
||||||
case LibFunc_logl:
|
case LibFunc_logl:
|
||||||
@ -2420,9 +2423,9 @@ bool llvm::isMathLibCallNoop(CallSite CS, const TargetLibraryInfo *TLI) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (CS.getNumArgOperands() == 2) {
|
if (Call->getNumArgOperands() == 2) {
|
||||||
ConstantFP *Op0C = dyn_cast<ConstantFP>(CS.getArgOperand(0));
|
ConstantFP *Op0C = dyn_cast<ConstantFP>(Call->getArgOperand(0));
|
||||||
ConstantFP *Op1C = dyn_cast<ConstantFP>(CS.getArgOperand(1));
|
ConstantFP *Op1C = dyn_cast<ConstantFP>(Call->getArgOperand(1));
|
||||||
if (Op0C && Op1C) {
|
if (Op0C && Op1C) {
|
||||||
const APFloat &Op0 = Op0C->getValueAPF();
|
const APFloat &Op0 = Op0C->getValueAPF();
|
||||||
const APFloat &Op1 = Op1C->getValueAPF();
|
const APFloat &Op1 = Op1C->getValueAPF();
|
||||||
|
@ -1177,7 +1177,7 @@ bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
|
|||||||
// because we have to continually rebuild the argument list even when no
|
// because we have to continually rebuild the argument list even when no
|
||||||
// simplifications can be performed. Until that is fixed with remapping
|
// simplifications can be performed. Until that is fixed with remapping
|
||||||
// inside of instsimplify, directly constant fold calls here.
|
// inside of instsimplify, directly constant fold calls here.
|
||||||
if (!canConstantFoldCallTo(CS, F))
|
if (!canConstantFoldCallTo(cast<CallBase>(CS.getInstruction()), F))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// Try to re-map the arguments to constants.
|
// Try to re-map the arguments to constants.
|
||||||
@ -1193,7 +1193,8 @@ bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
|
|||||||
|
|
||||||
ConstantArgs.push_back(C);
|
ConstantArgs.push_back(C);
|
||||||
}
|
}
|
||||||
if (Constant *C = ConstantFoldCall(CS, F, ConstantArgs)) {
|
if (Constant *C = ConstantFoldCall(cast<CallBase>(CS.getInstruction()), F,
|
||||||
|
ConstantArgs)) {
|
||||||
SimplifiedValues[CS.getInstruction()] = C;
|
SimplifiedValues[CS.getInstruction()] = C;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -5166,7 +5166,7 @@ static Value *SimplifyCall(ImmutableCallSite CS, Value *V, IterTy ArgBegin,
|
|||||||
if (Value *Ret = simplifyIntrinsic(F, ArgBegin, ArgEnd, Q))
|
if (Value *Ret = simplifyIntrinsic(F, ArgBegin, ArgEnd, Q))
|
||||||
return Ret;
|
return Ret;
|
||||||
|
|
||||||
if (!canConstantFoldCallTo(CS, F))
|
if (!canConstantFoldCallTo(cast<CallBase>(CS.getInstruction()), F))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
SmallVector<Constant *, 4> ConstantArgs;
|
SmallVector<Constant *, 4> ConstantArgs;
|
||||||
@ -5178,7 +5178,8 @@ static Value *SimplifyCall(ImmutableCallSite CS, Value *V, IterTy ArgBegin,
|
|||||||
ConstantArgs.push_back(C);
|
ConstantArgs.push_back(C);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ConstantFoldCall(CS, F, ConstantArgs, Q.TLI);
|
return ConstantFoldCall(cast<CallBase>(CS.getInstruction()), F, ConstantArgs,
|
||||||
|
Q.TLI);
|
||||||
}
|
}
|
||||||
|
|
||||||
Value *llvm::SimplifyCall(ImmutableCallSite CS, Value *V,
|
Value *llvm::SimplifyCall(ImmutableCallSite CS, Value *V,
|
||||||
|
@ -1243,7 +1243,7 @@ CallOverdefined:
|
|||||||
// Otherwise, if we have a single return value case, and if the function is
|
// Otherwise, if we have a single return value case, and if the function is
|
||||||
// a declaration, maybe we can constant fold it.
|
// a declaration, maybe we can constant fold it.
|
||||||
if (F && F->isDeclaration() && !I->getType()->isStructTy() &&
|
if (F && F->isDeclaration() && !I->getType()->isStructTy() &&
|
||||||
canConstantFoldCallTo(CS, F)) {
|
canConstantFoldCallTo(cast<CallBase>(CS.getInstruction()), F)) {
|
||||||
SmallVector<Constant*, 8> Operands;
|
SmallVector<Constant*, 8> Operands;
|
||||||
for (CallSite::arg_iterator AI = CS.arg_begin(), E = CS.arg_end();
|
for (CallSite::arg_iterator AI = CS.arg_begin(), E = CS.arg_end();
|
||||||
AI != E; ++AI) {
|
AI != E; ++AI) {
|
||||||
@ -1264,7 +1264,8 @@ CallOverdefined:
|
|||||||
|
|
||||||
// If we can constant fold this, mark the result of the call as a
|
// If we can constant fold this, mark the result of the call as a
|
||||||
// constant.
|
// constant.
|
||||||
if (Constant *C = ConstantFoldCall(CS, F, Operands, TLI)) {
|
if (Constant *C = ConstantFoldCall(cast<CallBase>(CS.getInstruction()), F,
|
||||||
|
Operands, TLI)) {
|
||||||
// call -> undef.
|
// call -> undef.
|
||||||
if (isa<UndefValue>(C))
|
if (isa<UndefValue>(C))
|
||||||
return;
|
return;
|
||||||
|
@ -540,7 +540,8 @@ bool Evaluator::EvaluateBlock(BasicBlock::iterator CurInst,
|
|||||||
|
|
||||||
if (Callee->isDeclaration()) {
|
if (Callee->isDeclaration()) {
|
||||||
// If this is a function we can constant fold, do it.
|
// If this is a function we can constant fold, do it.
|
||||||
if (Constant *C = ConstantFoldCall(CS, Callee, Formals, TLI)) {
|
if (Constant *C = ConstantFoldCall(cast<CallBase>(CS.getInstruction()),
|
||||||
|
Callee, Formals, TLI)) {
|
||||||
InstResult = castCallResultIfNeeded(CS.getCalledValue(), C);
|
InstResult = castCallResultIfNeeded(CS.getCalledValue(), C);
|
||||||
if (!InstResult)
|
if (!InstResult)
|
||||||
return false;
|
return false;
|
||||||
|
@ -415,8 +415,8 @@ bool llvm::wouldInstructionBeTriviallyDead(Instruction *I,
|
|||||||
if (Constant *C = dyn_cast<Constant>(CI->getArgOperand(0)))
|
if (Constant *C = dyn_cast<Constant>(CI->getArgOperand(0)))
|
||||||
return C->isNullValue() || isa<UndefValue>(C);
|
return C->isNullValue() || isa<UndefValue>(C);
|
||||||
|
|
||||||
if (CallSite CS = CallSite(I))
|
if (auto *Call = dyn_cast<CallBase>(I))
|
||||||
if (isMathLibCallNoop(CS, TLI))
|
if (isMathLibCallNoop(Call, TLI))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
|
Loading…
Reference in New Issue
Block a user