1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-26 04:32:44 +01:00

[Evaluator] Look through invariant.group intrinsics

Turning on -fstrict-vtable-pointers in Chrome caused an extra global
initializer. Turns out that a llvm.strip.invariant.group intrinsic was
causing GlobalOpt to fail to step through some simple code.

We can treat *.invariant.group uses as simply their operand.
Value::stripPointerCastsForAliasAnalysis() does exactly this. This
should be safe because the Evaluator does not skip memory accesses due
to invariants or alias analysis.

However, we don't want to leak that we've stripped arbitrary pointer
casts to users of Evaluator, so we bail out if we evaluate a function to
any constant, since we may have looked through *.invariant.group calls
and aliasing pointers cannot be arbitrarily substituted.

Reviewed By: rnk

Differential Revision: https://reviews.llvm.org/D98843
This commit is contained in:
Arthur Eubanks 2021-03-17 21:26:26 -07:00
parent ffdadec0fb
commit fa4e254b0f
3 changed files with 131 additions and 71 deletions

View File

@ -57,10 +57,17 @@ public:
bool EvaluateFunction(Function *F, Constant *&RetVal,
const SmallVectorImpl<Constant*> &ActualArgs);
/// Evaluate all instructions in block BB, returning true if successful, false
/// if we can't evaluate it. NewBB returns the next BB that control flows
/// into, or null upon return.
bool EvaluateBlock(BasicBlock::iterator CurInst, BasicBlock *&NextBB);
const DenseMap<Constant *, Constant *> &getMutatedMemory() const {
return MutatedMemory;
}
const SmallPtrSetImpl<GlobalVariable *> &getInvariants() const {
return Invariants;
}
private:
bool EvaluateBlock(BasicBlock::iterator CurInst, BasicBlock *&NextBB,
bool &StrippedPointerCastsForAliasAnalysis);
Constant *getVal(Value *V) {
if (Constant *CV = dyn_cast<Constant>(V)) return CV;
@ -76,15 +83,6 @@ public:
/// Casts call result to a type of bitcast call expression
Constant *castCallResultIfNeeded(Value *CallExpr, Constant *RV);
const DenseMap<Constant*, Constant*> &getMutatedMemory() const {
return MutatedMemory;
}
const SmallPtrSetImpl<GlobalVariable*> &getInvariants() const {
return Invariants;
}
private:
/// Given call site return callee and list of its formal arguments
Function *getCalleeWithFormalArgs(CallBase &CB,
SmallVectorImpl<Constant *> &Formals);

View File

@ -318,9 +318,10 @@ Constant *Evaluator::castCallResultIfNeeded(Value *CallExpr, Constant *RV) {
/// Evaluate all instructions in block BB, returning true if successful, false
/// if we can't evaluate it. NewBB returns the next BB that control flows into,
/// or null upon return.
bool Evaluator::EvaluateBlock(BasicBlock::iterator CurInst,
BasicBlock *&NextBB) {
/// or null upon return. StrippedPointerCastsForAliasAnalysis is set to true if
/// we looked through pointer casts to evaluate something.
bool Evaluator::EvaluateBlock(BasicBlock::iterator CurInst, BasicBlock *&NextBB,
bool &StrippedPointerCastsForAliasAnalysis) {
// This is the main evaluation loop.
while (true) {
Constant *InstResult = nullptr;
@ -550,56 +551,73 @@ bool Evaluator::EvaluateBlock(BasicBlock::iterator CurInst,
LLVM_DEBUG(dbgs() << "Skipping pseudoprobe intrinsic.\n");
++CurInst;
continue;
}
LLVM_DEBUG(dbgs() << "Unknown intrinsic. Can not evaluate.\n");
return false;
}
// Resolve function pointers.
SmallVector<Constant *, 8> Formals;
Function *Callee = getCalleeWithFormalArgs(CB, Formals);
if (!Callee || Callee->isInterposable()) {
LLVM_DEBUG(dbgs() << "Can not resolve function pointer.\n");
return false; // Cannot resolve.
}
if (Callee->isDeclaration()) {
// If this is a function we can constant fold, do it.
if (Constant *C = ConstantFoldCall(&CB, Callee, Formals, TLI)) {
InstResult = castCallResultIfNeeded(CB.getCalledOperand(), C);
if (!InstResult)
} else {
Value *Stripped = CurInst->stripPointerCastsForAliasAnalysis();
// Only attempt to getVal() if we've actually managed to strip
// anything away, or else we'll call getVal() on the current
// instruction.
if (Stripped != &*CurInst) {
InstResult = getVal(Stripped);
}
if (InstResult) {
LLVM_DEBUG(dbgs()
<< "Stripped pointer casts for alias analysis for "
"intrinsic call.\n");
StrippedPointerCastsForAliasAnalysis = true;
} else {
LLVM_DEBUG(dbgs() << "Unknown intrinsic. Cannot evaluate.\n");
return false;
LLVM_DEBUG(dbgs() << "Constant folded function call. Result: "
<< *InstResult << "\n");
} else {
LLVM_DEBUG(dbgs() << "Can not constant fold function call.\n");
return false;
}
}
} else {
if (Callee->getFunctionType()->isVarArg()) {
LLVM_DEBUG(dbgs() << "Can not constant fold vararg function call.\n");
return false;
}
if (!InstResult) {
// Resolve function pointers.
SmallVector<Constant *, 8> Formals;
Function *Callee = getCalleeWithFormalArgs(CB, Formals);
if (!Callee || Callee->isInterposable()) {
LLVM_DEBUG(dbgs() << "Can not resolve function pointer.\n");
return false; // Cannot resolve.
}
Constant *RetVal = nullptr;
// Execute the call, if successful, use the return value.
ValueStack.emplace_back();
if (!EvaluateFunction(Callee, RetVal, Formals)) {
LLVM_DEBUG(dbgs() << "Failed to evaluate function.\n");
return false;
}
ValueStack.pop_back();
InstResult = castCallResultIfNeeded(CB.getCalledOperand(), RetVal);
if (RetVal && !InstResult)
return false;
if (InstResult) {
LLVM_DEBUG(dbgs() << "Successfully evaluated function. Result: "
<< *InstResult << "\n\n");
if (Callee->isDeclaration()) {
// If this is a function we can constant fold, do it.
if (Constant *C = ConstantFoldCall(&CB, Callee, Formals, TLI)) {
InstResult = castCallResultIfNeeded(CB.getCalledOperand(), C);
if (!InstResult)
return false;
LLVM_DEBUG(dbgs() << "Constant folded function call. Result: "
<< *InstResult << "\n");
} else {
LLVM_DEBUG(dbgs() << "Can not constant fold function call.\n");
return false;
}
} else {
LLVM_DEBUG(dbgs()
<< "Successfully evaluated function. Result: 0\n\n");
if (Callee->getFunctionType()->isVarArg()) {
LLVM_DEBUG(dbgs()
<< "Can not constant fold vararg function call.\n");
return false;
}
Constant *RetVal = nullptr;
// Execute the call, if successful, use the return value.
ValueStack.emplace_back();
if (!EvaluateFunction(Callee, RetVal, Formals)) {
LLVM_DEBUG(dbgs() << "Failed to evaluate function.\n");
return false;
}
ValueStack.pop_back();
InstResult = castCallResultIfNeeded(CB.getCalledOperand(), RetVal);
if (RetVal && !InstResult)
return false;
if (InstResult) {
LLVM_DEBUG(dbgs() << "Successfully evaluated function. Result: "
<< *InstResult << "\n\n");
} else {
LLVM_DEBUG(dbgs()
<< "Successfully evaluated function. Result: 0\n\n");
}
}
}
} else if (CurInst->isTerminator()) {
@ -694,15 +712,27 @@ bool Evaluator::EvaluateFunction(Function *F, Constant *&RetVal,
BasicBlock *NextBB = nullptr; // Initialized to avoid compiler warnings.
LLVM_DEBUG(dbgs() << "Trying to evaluate BB: " << *CurBB << "\n");
if (!EvaluateBlock(CurInst, NextBB))
bool StrippedPointerCastsForAliasAnalysis = false;
if (!EvaluateBlock(CurInst, NextBB, StrippedPointerCastsForAliasAnalysis))
return false;
if (!NextBB) {
// Successfully running until there's no next block means that we found
// the return. Fill it the return value and pop the call stack.
ReturnInst *RI = cast<ReturnInst>(CurBB->getTerminator());
if (RI->getNumOperands())
if (RI->getNumOperands()) {
// The Evaluator can look through pointer casts as long as alias
// analysis holds because it's just a simple interpreter and doesn't
// skip memory accesses due to invariant group metadata, but we can't
// let users of Evaluator use a value that's been gleaned looking
// through stripping pointer casts.
if (StrippedPointerCastsForAliasAnalysis &&
!RI->getReturnValue()->getType()->isVoidTy()) {
return false;
}
RetVal = getVal(RI->getOperand(0));
}
CallStack.pop_back();
return true;
}

View File

@ -1,18 +1,16 @@
; RUN: opt -S -globalopt < %s | FileCheck %s
; This test is hint, what could globalOpt optimize and what it can't
; FIXME: @tmp and @tmp2 can be safely set to 42
; CHECK: @tmp = local_unnamed_addr global i32 0
; CHECK: @tmp2 = local_unnamed_addr global i32 0
; CHECK: @tmp3 = global i32 0
; CHECK: @llvm.global_ctors = appending global [1 x {{.*}}@_GLOBAL__I_c
@llvm.global_ctors = appending global [3 x { i32, void ()*, i8* }] [{ i32, void ()*, i8* } { i32 65535, void ()* @_GLOBAL__I_a, i8* null }, { i32, void ()*, i8* } { i32 65535, void ()* @_GLOBAL__I_b, i8* null }, { i32, void ()*, i8* } { i32 65535, void ()* @_GLOBAL__I_c, i8* null }]
; CHECK: @tmp = local_unnamed_addr global i32 42
; CHECK: @tmp2 = local_unnamed_addr global i32 42
; CHECK: @tmp3 = global i32 42
@tmp = global i32 0
@tmp2 = global i32 0
@tmp3 = global i32 0
@ptrToTmp3 = global i32* null
@llvm.global_ctors = appending global [1 x { i32, void ()*, i8* }] [{ i32, void ()*, i8* } { i32 65535, void ()* @_GLOBAL__I_a, i8* null }]
define i32 @TheAnswerToLifeTheUniverseAndEverything() {
ret i32 42
}
@ -43,7 +41,7 @@ enter:
; We can't step through launder.invariant.group here, because that would change
; this load in @usage_of_globals()
; val = load i32, i32* %ptrVal, !invariant.group !0
; %val = load i32, i32* %ptrVal, !invariant.group !0
; into
; %val = load i32, i32* @tmp3, !invariant.group !0
; and then we could assume that %val and %val2 to be the same, which coud be
@ -62,6 +60,7 @@ enter:
ret void
}
define void @usage_of_globals() {
entry:
%ptrVal = load i32*, i32** @ptrToTmp3
@ -72,8 +71,41 @@ entry:
ret void;
}
@tmp4 = global i32 0
define void @_GLOBAL__I_b() {
enter:
%val = call i32 @TheAnswerToLifeTheUniverseAndEverything()
%p1 = bitcast i32* @tmp4 to i8*
%p2 = call i8* @llvm.strip.invariant.group.p0i8(i8* %p1)
%p3 = bitcast i8* %p2 to i32*
store i32 %val, i32* %p3
ret void
}
@tmp5 = global i32 0
@tmp6 = global i32* null
; CHECK: @tmp6 = local_unnamed_addr global i32* null
define i32* @_dont_return_param(i32* %p) {
%p1 = bitcast i32* %p to i8*
%p2 = call i8* @llvm.launder.invariant.group(i8* %p1)
%p3 = bitcast i8* %p2 to i32*
ret i32* %p3
}
; We should bail out if we return any pointers derived via invariant.group intrinsics at any point.
define void @_GLOBAL__I_c() {
enter:
%tmp5 = call i32* @_dont_return_param(i32* @tmp5)
store i32* %tmp5, i32** @tmp6
ret void
}
declare void @changeTmp3ValAndCallBarrierInside()
declare i8* @llvm.launder.invariant.group(i8*)
declare i8* @llvm.strip.invariant.group.p0i8(i8*)
!0 = !{}