mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-25 20:23:11 +01:00
Add a subclass of IntrinsicInst for llvm.assume [nfc]
Add the subclass, update a few places which check for the intrinsic to use idiomatic dyn_cast, and update the public interface of AssumptionCache to use the new class. A follow up change will do the same for the newer assumption query/bundle mechanisms.
This commit is contained in:
parent
abf4528faf
commit
411b9be9f5
@ -26,7 +26,7 @@
|
||||
|
||||
namespace llvm {
|
||||
|
||||
class CallInst;
|
||||
class AssumeInst;
|
||||
class Function;
|
||||
class raw_ostream;
|
||||
class Value;
|
||||
@ -116,15 +116,15 @@ public:
|
||||
///
|
||||
/// The call passed in must be an instruction within this function and must
|
||||
/// not already be in the cache.
|
||||
void registerAssumption(CallInst *CI);
|
||||
void registerAssumption(AssumeInst *CI);
|
||||
|
||||
/// Remove an \@llvm.assume intrinsic from this function's cache if it has
|
||||
/// been added to the cache earlier.
|
||||
void unregisterAssumption(CallInst *CI);
|
||||
void unregisterAssumption(AssumeInst *CI);
|
||||
|
||||
/// Update the cache of values being affected by this assumption (i.e.
|
||||
/// the values about which this assumption provides information).
|
||||
void updateAffectedValues(CallInst *CI);
|
||||
void updateAffectedValues(AssumeInst *CI);
|
||||
|
||||
/// Clear the cache of \@llvm.assume intrinsics for a function.
|
||||
///
|
||||
|
@ -1217,6 +1217,18 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/// This represents the llvm.assume intrinsic.
|
||||
class AssumeInst : public IntrinsicInst {
|
||||
public:
|
||||
static bool classof(const IntrinsicInst *I) {
|
||||
return I->getIntrinsicID() == Intrinsic::assume;
|
||||
}
|
||||
static bool classof(const Value *V) {
|
||||
return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
|
||||
}
|
||||
};
|
||||
|
||||
} // end namespace llvm
|
||||
|
||||
#endif // LLVM_IR_INTRINSICINST_H
|
||||
|
@ -56,7 +56,7 @@ AssumptionCache::getOrInsertAffectedValues(Value *V) {
|
||||
}
|
||||
|
||||
static void
|
||||
findAffectedValues(CallInst *CI,
|
||||
findAffectedValues(CallBase *CI,
|
||||
SmallVectorImpl<AssumptionCache::ResultElem> &Affected) {
|
||||
// Note: This code must be kept in-sync with the code in
|
||||
// computeKnownBitsFromAssume in ValueTracking.
|
||||
@ -126,7 +126,7 @@ findAffectedValues(CallInst *CI,
|
||||
}
|
||||
}
|
||||
|
||||
void AssumptionCache::updateAffectedValues(CallInst *CI) {
|
||||
void AssumptionCache::updateAffectedValues(AssumeInst *CI) {
|
||||
SmallVector<AssumptionCache::ResultElem, 16> Affected;
|
||||
findAffectedValues(CI, Affected);
|
||||
|
||||
@ -139,7 +139,7 @@ void AssumptionCache::updateAffectedValues(CallInst *CI) {
|
||||
}
|
||||
}
|
||||
|
||||
void AssumptionCache::unregisterAssumption(CallInst *CI) {
|
||||
void AssumptionCache::unregisterAssumption(AssumeInst *CI) {
|
||||
SmallVector<AssumptionCache::ResultElem, 16> Affected;
|
||||
findAffectedValues(CI, Affected);
|
||||
|
||||
@ -211,13 +211,10 @@ void AssumptionCache::scanFunction() {
|
||||
|
||||
// Update affected values.
|
||||
for (auto &A : AssumeHandles)
|
||||
updateAffectedValues(cast<CallInst>(A));
|
||||
updateAffectedValues(cast<AssumeInst>(A));
|
||||
}
|
||||
|
||||
void AssumptionCache::registerAssumption(CallInst *CI) {
|
||||
assert(match(CI, m_Intrinsic<Intrinsic::assume>()) &&
|
||||
"Registered call does not call @llvm.assume");
|
||||
|
||||
void AssumptionCache::registerAssumption(AssumeInst *CI) {
|
||||
// If we haven't scanned the function yet, just drop this assumption. It will
|
||||
// be found when we scan later.
|
||||
if (!Scanned)
|
||||
|
@ -1619,7 +1619,7 @@ Instruction *InstCombinerImpl::visitCallInst(CallInst &CI) {
|
||||
{RetainedKnowledge{Attribute::NonNull, 0, A}}, Next, &AC, &DT)) {
|
||||
|
||||
Replacement->insertBefore(Next);
|
||||
AC.registerAssumption(Replacement);
|
||||
AC.registerAssumption(cast<AssumeInst>(Replacement));
|
||||
return RemoveConditionFromAssume(II);
|
||||
}
|
||||
}
|
||||
@ -1651,7 +1651,7 @@ Instruction *InstCombinerImpl::visitCallInst(CallInst &CI) {
|
||||
buildAssumeFromKnowledge(RK, Next, &AC, &DT)) {
|
||||
|
||||
Replacement->insertAfter(II);
|
||||
AC.registerAssumption(Replacement);
|
||||
AC.registerAssumption(cast<AssumeInst>(Replacement));
|
||||
}
|
||||
return RemoveConditionFromAssume(II);
|
||||
}
|
||||
@ -1699,7 +1699,7 @@ Instruction *InstCombinerImpl::visitCallInst(CallInst &CI) {
|
||||
|
||||
// Update the cache of affected values for this assumption (we might be
|
||||
// here because we just simplified the condition).
|
||||
AC.updateAffectedValues(II);
|
||||
AC.updateAffectedValues(cast<AssumeInst>(II));
|
||||
break;
|
||||
}
|
||||
case Intrinsic::experimental_guard: {
|
||||
|
@ -3983,7 +3983,7 @@ static bool combineInstructionsOverFunction(
|
||||
IRBuilderCallbackInserter([&Worklist, &AC](Instruction *I) {
|
||||
Worklist.add(I);
|
||||
if (match(I, m_Intrinsic<Intrinsic::assume>()))
|
||||
AC.registerAssumption(cast<CallInst>(I));
|
||||
AC.registerAssumption(cast<AssumeInst>(I));
|
||||
}));
|
||||
|
||||
// Lower dbg.declare intrinsics otherwise their value may be clobbered
|
||||
|
@ -1436,8 +1436,7 @@ void LoopUnswitch::unswitchNontrivialCondition(
|
||||
for (Instruction &I : *NewBlocks[NBI]) {
|
||||
RemapInstruction(&I, VMap,
|
||||
RF_NoModuleLevelChanges | RF_IgnoreMissingLocals);
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(&I))
|
||||
if (II->getIntrinsicID() == Intrinsic::assume)
|
||||
if (auto *II = dyn_cast<AssumeInst>(&I))
|
||||
AC->registerAssumption(II);
|
||||
}
|
||||
}
|
||||
|
@ -1113,8 +1113,7 @@ static BasicBlock *buildClonedLoopBlocks(
|
||||
for (Instruction &I : *ClonedBB) {
|
||||
RemapInstruction(&I, VMap,
|
||||
RF_NoModuleLevelChanges | RF_IgnoreMissingLocals);
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(&I))
|
||||
if (II->getIntrinsicID() == Intrinsic::assume)
|
||||
if (auto *II = dyn_cast<AssumeInst>(&I))
|
||||
AC.registerAssumption(II);
|
||||
}
|
||||
|
||||
|
@ -297,7 +297,7 @@ void llvm::salvageKnowledge(Instruction *I, AssumptionCache *AC,
|
||||
if (IntrinsicInst *Intr = Builder.build()) {
|
||||
Intr->insertBefore(I);
|
||||
if (AC)
|
||||
AC->registerAssumption(Intr);
|
||||
AC->registerAssumption(cast<AssumeInst>(Intr));
|
||||
}
|
||||
}
|
||||
|
||||
@ -499,7 +499,7 @@ struct AssumeSimplify {
|
||||
return;
|
||||
MadeChange = true;
|
||||
MergedAssume->insertBefore(InsertPt);
|
||||
AC.registerAssumption(MergedAssume);
|
||||
AC.registerAssumption(cast<AssumeInst>(MergedAssume));
|
||||
}
|
||||
|
||||
/// Merge assume when they are in the same BasicBlock and for all instruction
|
||||
|
@ -1593,10 +1593,10 @@ CodeExtractor::extractCodeRegion(const CodeExtractorAnalysisCache &CEAC) {
|
||||
Instruction *I = &*It;
|
||||
++It;
|
||||
|
||||
if (match(I, m_Intrinsic<Intrinsic::assume>())) {
|
||||
if (auto *AI = dyn_cast<AssumeInst>(I)) {
|
||||
if (AC)
|
||||
AC->unregisterAssumption(cast<CallInst>(I));
|
||||
I->eraseFromParent();
|
||||
AC->unregisterAssumption(AI);
|
||||
AI->eraseFromParent();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1296,7 +1296,7 @@ static void AddAlignmentAssumptions(CallBase &CB, InlineFunctionInfo &IFI) {
|
||||
|
||||
CallInst *NewAsmp =
|
||||
IRBuilder<>(&CB).CreateAlignmentAssumption(DL, ArgVal, Align);
|
||||
AC->registerAssumption(NewAsmp);
|
||||
AC->registerAssumption(cast<AssumeInst>(NewAsmp));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2050,8 +2050,7 @@ llvm::InlineResult llvm::InlineFunction(CallBase &CB, InlineFunctionInfo &IFI,
|
||||
for (BasicBlock &NewBlock :
|
||||
make_range(FirstNewBlock->getIterator(), Caller->end()))
|
||||
for (Instruction &I : NewBlock)
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(&I))
|
||||
if (II->getIntrinsicID() == Intrinsic::assume)
|
||||
if (auto *II = dyn_cast<AssumeInst>(&I))
|
||||
IFI.GetAssumptionCache(*Caller).registerAssumption(II);
|
||||
}
|
||||
|
||||
|
@ -467,8 +467,7 @@ bool LoopRotate::rotateLoop(Loop *L, bool SimplifiedLatch) {
|
||||
C->setName(Inst->getName());
|
||||
C->insertBefore(LoopEntryBranch);
|
||||
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(C))
|
||||
if (II->getIntrinsicID() == Intrinsic::assume)
|
||||
if (auto *II = dyn_cast<AssumeInst>(C))
|
||||
AC->registerAssumption(II);
|
||||
// MemorySSA cares whether the cloned instruction was inserted or not, and
|
||||
// not whether it can be remapped to a simplified value.
|
||||
|
@ -681,13 +681,10 @@ LoopUnrollResult llvm::UnrollLoop(Loop *L, UnrollLoopOptions ULO, LoopInfo *LI,
|
||||
|
||||
// Remap all instructions in the most recent iteration
|
||||
remapInstructionsInBlocks(NewBlocks, LastValueMap);
|
||||
for (BasicBlock *NewBlock : NewBlocks) {
|
||||
for (Instruction &I : *NewBlock) {
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(&I))
|
||||
if (II->getIntrinsicID() == Intrinsic::assume)
|
||||
for (BasicBlock *NewBlock : NewBlocks)
|
||||
for (Instruction &I : *NewBlock)
|
||||
if (auto *II = dyn_cast<AssumeInst>(&I))
|
||||
AC->registerAssumption(II);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// Identify what other metadata depends on the cloned version. After
|
||||
|
@ -432,8 +432,7 @@ llvm::UnrollAndJamLoop(Loop *L, unsigned Count, unsigned TripCount,
|
||||
remapInstructionsInBlocks(NewBlocks, LastValueMap);
|
||||
for (BasicBlock *NewBlock : NewBlocks) {
|
||||
for (Instruction &I : *NewBlock) {
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(&I))
|
||||
if (II->getIntrinsicID() == Intrinsic::assume)
|
||||
if (auto *II = dyn_cast<AssumeInst>(&I))
|
||||
AC->registerAssumption(II);
|
||||
}
|
||||
}
|
||||
|
@ -306,7 +306,7 @@ static void addAssumeNonNull(AssumptionCache *AC, LoadInst *LI) {
|
||||
LoadNotNull->insertAfter(LI);
|
||||
CallInst *CI = CallInst::Create(AssumeIntrinsic, {LoadNotNull});
|
||||
CI->insertAfter(LoadNotNull);
|
||||
AC->registerAssumption(CI);
|
||||
AC->registerAssumption(cast<AssumeInst>(CI));
|
||||
}
|
||||
|
||||
static void removeIntrinsicUsers(AllocaInst *AI) {
|
||||
|
@ -2530,7 +2530,7 @@ static bool FoldCondBranchOnPHI(BranchInst *BI, DomTreeUpdater *DTU,
|
||||
|
||||
// Register the new instruction with the assumption cache if necessary.
|
||||
if (AC && match(N, m_Intrinsic<Intrinsic::assume>()))
|
||||
AC->registerAssumption(cast<IntrinsicInst>(N));
|
||||
AC->registerAssumption(cast<AssumeInst>(N));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3022,8 +3022,7 @@ void InnerLoopVectorizer::scalarizeInstruction(Instruction *Instr, VPValue *Def,
|
||||
State.set(Def, Cloned, Instance);
|
||||
|
||||
// If we just cloned a new assumption, add it the assumption cache.
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(Cloned))
|
||||
if (II->getIntrinsicID() == Intrinsic::assume)
|
||||
if (auto *II = dyn_cast<AssumeInst>(Cloned))
|
||||
AC->registerAssumption(II);
|
||||
|
||||
// End if-block.
|
||||
|
@ -542,7 +542,7 @@ TEST(AssumeQueryAPI, AssumptionCache) {
|
||||
ASSERT_EQ(AR.size(), 1u);
|
||||
ASSERT_EQ(AR[0].Assume, &*Second);
|
||||
ASSERT_EQ(AR[0].Index, AssumptionCache::ExprResultIdx);
|
||||
AC.unregisterAssumption(cast<CallInst>(&*Second));
|
||||
AC.unregisterAssumption(cast<AssumeInst>(&*Second));
|
||||
AR = AC.assumptionsFor(F->getArg(1));
|
||||
ASSERT_EQ(AR.size(), 0u);
|
||||
AR = AC.assumptionsFor(F->getArg(0));
|
||||
|
Loading…
Reference in New Issue
Block a user