2016-12-19 09:32:13 +01:00
|
|
|
//===- AssumptionCache.cpp - Cache finding @llvm.assume calls -------------===//
|
|
|
|
//
|
2019-01-19 09:50:56 +01:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2016-12-19 09:32:13 +01:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file contains a pass that keeps track of @llvm.assume intrinsics in
|
|
|
|
// the functions of a module.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2020-04-13 11:27:27 +02:00
|
|
|
#include "llvm/Analysis/AssumeBundleQueries.h"
|
2016-12-19 09:32:13 +01:00
|
|
|
#include "llvm/Analysis/AssumptionCache.h"
|
2017-09-01 23:37:29 +02:00
|
|
|
#include "llvm/ADT/STLExtras.h"
|
|
|
|
#include "llvm/ADT/SmallPtrSet.h"
|
|
|
|
#include "llvm/ADT/SmallVector.h"
|
|
|
|
#include "llvm/IR/BasicBlock.h"
|
2016-12-19 09:32:13 +01:00
|
|
|
#include "llvm/IR/Function.h"
|
2017-09-01 23:37:29 +02:00
|
|
|
#include "llvm/IR/InstrTypes.h"
|
|
|
|
#include "llvm/IR/Instruction.h"
|
2016-12-19 09:32:13 +01:00
|
|
|
#include "llvm/IR/Instructions.h"
|
2017-09-01 23:37:29 +02:00
|
|
|
#include "llvm/IR/Intrinsics.h"
|
2016-12-19 09:32:13 +01:00
|
|
|
#include "llvm/IR/PassManager.h"
|
|
|
|
#include "llvm/IR/PatternMatch.h"
|
Sink all InitializePasses.h includes
This file lists every pass in LLVM, and is included by Pass.h, which is
very popular. Every time we add, remove, or rename a pass in LLVM, it
caused lots of recompilation.
I found this fact by looking at this table, which is sorted by the
number of times a file was changed over the last 100,000 git commits
multiplied by the number of object files that depend on it in the
current checkout:
recompiles touches affected_files header
342380 95 3604 llvm/include/llvm/ADT/STLExtras.h
314730 234 1345 llvm/include/llvm/InitializePasses.h
307036 118 2602 llvm/include/llvm/ADT/APInt.h
213049 59 3611 llvm/include/llvm/Support/MathExtras.h
170422 47 3626 llvm/include/llvm/Support/Compiler.h
162225 45 3605 llvm/include/llvm/ADT/Optional.h
158319 63 2513 llvm/include/llvm/ADT/Triple.h
140322 39 3598 llvm/include/llvm/ADT/StringRef.h
137647 59 2333 llvm/include/llvm/Support/Error.h
131619 73 1803 llvm/include/llvm/Support/FileSystem.h
Before this change, touching InitializePasses.h would cause 1345 files
to recompile. After this change, touching it only causes 550 compiles in
an incremental rebuild.
Reviewers: bkramer, asbirlea, bollu, jdoerfert
Differential Revision: https://reviews.llvm.org/D70211
2019-11-13 22:15:01 +01:00
|
|
|
#include "llvm/InitializePasses.h"
|
2017-09-01 23:37:29 +02:00
|
|
|
#include "llvm/Pass.h"
|
|
|
|
#include "llvm/Support/Casting.h"
|
|
|
|
#include "llvm/Support/CommandLine.h"
|
|
|
|
#include "llvm/Support/ErrorHandling.h"
|
|
|
|
#include "llvm/Support/raw_ostream.h"
|
|
|
|
#include <algorithm>
|
|
|
|
#include <cassert>
|
|
|
|
#include <utility>
|
|
|
|
|
2016-12-19 09:32:13 +01:00
|
|
|
using namespace llvm;
|
|
|
|
using namespace llvm::PatternMatch;
|
|
|
|
|
2017-02-15 22:10:09 +01:00
|
|
|
static cl::opt<bool>
|
|
|
|
VerifyAssumptionCache("verify-assumption-cache", cl::Hidden,
|
|
|
|
cl::desc("Enable verification of assumption cache"),
|
|
|
|
cl::init(false));
|
|
|
|
|
2020-04-13 11:27:27 +02:00
|
|
|
SmallVector<AssumptionCache::ResultElem, 1> &
|
2017-05-01 19:07:49 +02:00
|
|
|
AssumptionCache::getOrInsertAffectedValues(Value *V) {
|
2017-01-11 14:24:24 +01:00
|
|
|
// Try using find_as first to avoid creating extra value handles just for the
|
|
|
|
// purpose of doing the lookup.
|
|
|
|
auto AVI = AffectedValues.find_as(V);
|
|
|
|
if (AVI != AffectedValues.end())
|
|
|
|
return AVI->second;
|
|
|
|
|
2017-05-01 19:07:49 +02:00
|
|
|
auto AVIP = AffectedValues.insert(
|
2020-04-13 11:27:27 +02:00
|
|
|
{AffectedValueCallbackVH(V, this), SmallVector<ResultElem, 1>()});
|
2017-01-11 14:24:24 +01:00
|
|
|
return AVIP.first->second;
|
|
|
|
}
|
|
|
|
|
2020-04-13 11:27:27 +02:00
|
|
|
static void
|
|
|
|
findAffectedValues(CallInst *CI,
|
|
|
|
SmallVectorImpl<AssumptionCache::ResultElem> &Affected) {
|
2017-01-11 14:24:24 +01:00
|
|
|
// Note: This code must be kept in-sync with the code in
|
|
|
|
// computeKnownBitsFromAssume in ValueTracking.
|
|
|
|
|
2020-04-13 11:27:27 +02:00
|
|
|
auto AddAffected = [&Affected](Value *V, unsigned Idx =
|
|
|
|
AssumptionCache::ExprResultIdx) {
|
2017-01-11 14:24:24 +01:00
|
|
|
if (isa<Argument>(V)) {
|
2020-04-13 11:27:27 +02:00
|
|
|
Affected.push_back({V, Idx});
|
2017-01-11 14:24:24 +01:00
|
|
|
} else if (auto *I = dyn_cast<Instruction>(V)) {
|
2020-04-13 11:27:27 +02:00
|
|
|
Affected.push_back({I, Idx});
|
2017-01-11 14:24:24 +01:00
|
|
|
|
2017-01-17 19:15:49 +01:00
|
|
|
// Peek through unary operators to find the source of the condition.
|
|
|
|
Value *Op;
|
|
|
|
if (match(I, m_BitCast(m_Value(Op))) ||
|
2020-04-13 11:27:27 +02:00
|
|
|
match(I, m_PtrToInt(m_Value(Op))) || match(I, m_Not(m_Value(Op)))) {
|
2017-01-11 14:24:24 +01:00
|
|
|
if (isa<Instruction>(Op) || isa<Argument>(Op))
|
2020-04-13 11:27:27 +02:00
|
|
|
Affected.push_back({Op, Idx});
|
2017-01-11 14:24:24 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-04-13 11:27:27 +02:00
|
|
|
for (unsigned Idx = 0; Idx != CI->getNumOperandBundles(); Idx++) {
|
|
|
|
if (CI->getOperandBundleAt(Idx).Inputs.size() > ABA_WasOn &&
|
2020-05-07 13:41:20 +02:00
|
|
|
CI->getOperandBundleAt(Idx).getTagName() != IgnoreBundleTag)
|
2020-04-13 11:27:27 +02:00
|
|
|
AddAffected(CI->getOperandBundleAt(Idx).Inputs[ABA_WasOn], Idx);
|
|
|
|
}
|
|
|
|
|
2017-01-11 14:24:24 +01:00
|
|
|
Value *Cond = CI->getArgOperand(0), *A, *B;
|
|
|
|
AddAffected(Cond);
|
|
|
|
|
|
|
|
CmpInst::Predicate Pred;
|
|
|
|
if (match(Cond, m_ICmp(Pred, m_Value(A), m_Value(B)))) {
|
|
|
|
AddAffected(A);
|
|
|
|
AddAffected(B);
|
|
|
|
|
|
|
|
if (Pred == ICmpInst::ICMP_EQ) {
|
|
|
|
// For equality comparisons, we handle the case of bit inversion.
|
|
|
|
auto AddAffectedFromEq = [&AddAffected](Value *V) {
|
|
|
|
Value *A;
|
|
|
|
if (match(V, m_Not(m_Value(A)))) {
|
|
|
|
AddAffected(A);
|
|
|
|
V = A;
|
|
|
|
}
|
|
|
|
|
|
|
|
Value *B;
|
|
|
|
ConstantInt *C;
|
|
|
|
// (A & B) or (A | B) or (A ^ B).
|
2017-06-24 08:27:14 +02:00
|
|
|
if (match(V, m_BitwiseLogic(m_Value(A), m_Value(B)))) {
|
2017-01-11 14:24:24 +01:00
|
|
|
AddAffected(A);
|
|
|
|
AddAffected(B);
|
|
|
|
// (A << C) or (A >>_s C) or (A >>_u C) where C is some constant.
|
2017-06-24 08:27:14 +02:00
|
|
|
} else if (match(V, m_Shift(m_Value(A), m_ConstantInt(C)))) {
|
2017-01-11 14:24:24 +01:00
|
|
|
AddAffected(A);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
AddAffectedFromEq(A);
|
|
|
|
AddAffectedFromEq(B);
|
|
|
|
}
|
|
|
|
}
|
2019-02-08 07:55:18 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void AssumptionCache::updateAffectedValues(CallInst *CI) {
|
2020-04-13 11:27:27 +02:00
|
|
|
SmallVector<AssumptionCache::ResultElem, 16> Affected;
|
2019-02-08 07:55:18 +01:00
|
|
|
findAffectedValues(CI, Affected);
|
2017-01-11 14:24:24 +01:00
|
|
|
|
|
|
|
for (auto &AV : Affected) {
|
2020-04-13 11:27:27 +02:00
|
|
|
auto &AVV = getOrInsertAffectedValues(AV.Assume);
|
|
|
|
if (std::find_if(AVV.begin(), AVV.end(), [&](ResultElem &Elem) {
|
|
|
|
return Elem.Assume == CI && Elem.Index == AV.Index;
|
|
|
|
}) == AVV.end())
|
|
|
|
AVV.push_back({CI, AV.Index});
|
2017-01-11 14:24:24 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:55:18 +01:00
|
|
|
void AssumptionCache::unregisterAssumption(CallInst *CI) {
|
2020-04-13 11:27:27 +02:00
|
|
|
SmallVector<AssumptionCache::ResultElem, 16> Affected;
|
2019-02-08 07:55:18 +01:00
|
|
|
findAffectedValues(CI, Affected);
|
|
|
|
|
|
|
|
for (auto &AV : Affected) {
|
2020-04-13 11:27:27 +02:00
|
|
|
auto AVI = AffectedValues.find_as(AV.Assume);
|
|
|
|
if (AVI == AffectedValues.end())
|
|
|
|
continue;
|
|
|
|
bool Found = false;
|
|
|
|
bool HasNonnull = false;
|
|
|
|
for (ResultElem &Elem : AVI->second) {
|
|
|
|
if (Elem.Assume == CI) {
|
|
|
|
Found = true;
|
|
|
|
Elem.Assume = nullptr;
|
|
|
|
}
|
|
|
|
HasNonnull |= !!Elem.Assume;
|
|
|
|
if (HasNonnull && Found)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
assert(Found && "already unregistered or incorrect cache state");
|
|
|
|
if (!HasNonnull)
|
2019-02-08 07:55:18 +01:00
|
|
|
AffectedValues.erase(AVI);
|
|
|
|
}
|
2019-10-02 19:35:06 +02:00
|
|
|
|
|
|
|
AssumeHandles.erase(
|
2020-04-13 11:27:27 +02:00
|
|
|
remove_if(AssumeHandles, [CI](ResultElem &RE) { return CI == RE; }),
|
2019-10-02 19:35:06 +02:00
|
|
|
AssumeHandles.end());
|
2019-02-08 07:55:18 +01:00
|
|
|
}
|
|
|
|
|
2017-01-11 14:24:24 +01:00
|
|
|
void AssumptionCache::AffectedValueCallbackVH::deleted() {
|
|
|
|
auto AVI = AC->AffectedValues.find(getValPtr());
|
|
|
|
if (AVI != AC->AffectedValues.end())
|
|
|
|
AC->AffectedValues.erase(AVI);
|
|
|
|
// 'this' now dangles!
|
|
|
|
}
|
|
|
|
|
2019-08-16 11:34:27 +02:00
|
|
|
void AssumptionCache::transferAffectedValuesInCache(Value *OV, Value *NV) {
|
2017-01-16 16:22:01 +01:00
|
|
|
auto &NAVV = getOrInsertAffectedValues(NV);
|
|
|
|
auto AVI = AffectedValues.find(OV);
|
|
|
|
if (AVI == AffectedValues.end())
|
|
|
|
return;
|
|
|
|
|
|
|
|
for (auto &A : AVI->second)
|
2020-07-24 08:13:44 +02:00
|
|
|
if (!llvm::is_contained(NAVV, A))
|
2017-01-16 16:22:01 +01:00
|
|
|
NAVV.push_back(A);
|
2019-08-16 11:34:27 +02:00
|
|
|
AffectedValues.erase(OV);
|
2017-01-16 16:22:01 +01:00
|
|
|
}
|
|
|
|
|
2017-01-11 14:24:24 +01:00
|
|
|
void AssumptionCache::AffectedValueCallbackVH::allUsesReplacedWith(Value *NV) {
|
|
|
|
if (!isa<Instruction>(NV) && !isa<Argument>(NV))
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Any assumptions that affected this value now affect the new value.
|
|
|
|
|
2019-08-16 11:34:27 +02:00
|
|
|
AC->transferAffectedValuesInCache(getValPtr(), NV);
|
2017-01-16 16:22:01 +01:00
|
|
|
// 'this' now might dangle! If the AffectedValues map was resized to add an
|
|
|
|
// entry for NV then this object might have been destroyed in favor of some
|
|
|
|
// copy in the grown map.
|
2017-01-11 14:24:24 +01:00
|
|
|
}
|
|
|
|
|
2016-12-19 09:32:13 +01:00
|
|
|
void AssumptionCache::scanFunction() {
|
|
|
|
assert(!Scanned && "Tried to scan the function twice!");
|
|
|
|
assert(AssumeHandles.empty() && "Already have assumes when scanning!");
|
|
|
|
|
|
|
|
// Go through all instructions in all blocks, add all calls to @llvm.assume
|
|
|
|
// to this cache.
|
|
|
|
for (BasicBlock &B : F)
|
|
|
|
for (Instruction &II : B)
|
|
|
|
if (match(&II, m_Intrinsic<Intrinsic::assume>()))
|
2020-04-13 11:27:27 +02:00
|
|
|
AssumeHandles.push_back({&II, ExprResultIdx});
|
2016-12-19 09:32:13 +01:00
|
|
|
|
|
|
|
// Mark the scan as complete.
|
|
|
|
Scanned = true;
|
2017-01-11 14:24:24 +01:00
|
|
|
|
|
|
|
// Update affected values.
|
|
|
|
for (auto &A : AssumeHandles)
|
|
|
|
updateAffectedValues(cast<CallInst>(A));
|
2016-12-19 09:32:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void AssumptionCache::registerAssumption(CallInst *CI) {
|
|
|
|
assert(match(CI, m_Intrinsic<Intrinsic::assume>()) &&
|
|
|
|
"Registered call does not call @llvm.assume");
|
|
|
|
|
|
|
|
// If we haven't scanned the function yet, just drop this assumption. It will
|
|
|
|
// be found when we scan later.
|
|
|
|
if (!Scanned)
|
|
|
|
return;
|
|
|
|
|
2020-04-13 11:27:27 +02:00
|
|
|
AssumeHandles.push_back({CI, ExprResultIdx});
|
2016-12-19 09:32:13 +01:00
|
|
|
|
|
|
|
#ifndef NDEBUG
|
|
|
|
assert(CI->getParent() &&
|
|
|
|
"Cannot register @llvm.assume call not in a basic block");
|
|
|
|
assert(&F == CI->getParent()->getParent() &&
|
|
|
|
"Cannot register @llvm.assume call not in this function");
|
|
|
|
|
|
|
|
// We expect the number of assumptions to be small, so in an asserts build
|
|
|
|
// check that we don't accumulate duplicates and that all assumptions point
|
|
|
|
// to the same function.
|
|
|
|
SmallPtrSet<Value *, 16> AssumptionSet;
|
|
|
|
for (auto &VH : AssumeHandles) {
|
|
|
|
if (!VH)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
assert(&F == cast<Instruction>(VH)->getParent()->getParent() &&
|
|
|
|
"Cached assumption not inside this function!");
|
|
|
|
assert(match(cast<CallInst>(VH), m_Intrinsic<Intrinsic::assume>()) &&
|
|
|
|
"Cached something other than a call to @llvm.assume!");
|
|
|
|
assert(AssumptionSet.insert(VH).second &&
|
|
|
|
"Cache contains multiple copies of a call!");
|
|
|
|
}
|
|
|
|
#endif
|
2017-01-11 14:24:24 +01:00
|
|
|
|
|
|
|
updateAffectedValues(CI);
|
2016-12-19 09:32:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
AnalysisKey AssumptionAnalysis::Key;
|
|
|
|
|
|
|
|
PreservedAnalyses AssumptionPrinterPass::run(Function &F,
|
|
|
|
FunctionAnalysisManager &AM) {
|
|
|
|
AssumptionCache &AC = AM.getResult<AssumptionAnalysis>(F);
|
|
|
|
|
|
|
|
OS << "Cached assumptions for function: " << F.getName() << "\n";
|
|
|
|
for (auto &VH : AC.assumptions())
|
|
|
|
if (VH)
|
|
|
|
OS << " " << *cast<CallInst>(VH)->getArgOperand(0) << "\n";
|
|
|
|
|
|
|
|
return PreservedAnalyses::all();
|
|
|
|
}
|
|
|
|
|
|
|
|
void AssumptionCacheTracker::FunctionCallbackVH::deleted() {
|
|
|
|
auto I = ACT->AssumptionCaches.find_as(cast<Function>(getValPtr()));
|
|
|
|
if (I != ACT->AssumptionCaches.end())
|
|
|
|
ACT->AssumptionCaches.erase(I);
|
|
|
|
// 'this' now dangles!
|
|
|
|
}
|
|
|
|
|
|
|
|
AssumptionCache &AssumptionCacheTracker::getAssumptionCache(Function &F) {
|
|
|
|
// We probe the function map twice to try and avoid creating a value handle
|
|
|
|
// around the function in common cases. This makes insertion a bit slower,
|
|
|
|
// but if we have to insert we're going to scan the whole function so that
|
|
|
|
// shouldn't matter.
|
|
|
|
auto I = AssumptionCaches.find_as(&F);
|
|
|
|
if (I != AssumptionCaches.end())
|
|
|
|
return *I->second;
|
|
|
|
|
|
|
|
// Ok, build a new cache by scanning the function, insert it and the value
|
|
|
|
// handle into our map, and return the newly populated cache.
|
|
|
|
auto IP = AssumptionCaches.insert(std::make_pair(
|
2019-08-15 17:54:37 +02:00
|
|
|
FunctionCallbackVH(&F, this), std::make_unique<AssumptionCache>(F)));
|
2016-12-19 09:32:13 +01:00
|
|
|
assert(IP.second && "Scanning function already in the map?");
|
|
|
|
return *IP.first->second;
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:55:18 +01:00
|
|
|
AssumptionCache *AssumptionCacheTracker::lookupAssumptionCache(Function &F) {
|
|
|
|
auto I = AssumptionCaches.find_as(&F);
|
|
|
|
if (I != AssumptionCaches.end())
|
|
|
|
return I->second.get();
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2016-12-19 09:32:13 +01:00
|
|
|
void AssumptionCacheTracker::verifyAnalysis() const {
|
2017-02-15 22:10:09 +01:00
|
|
|
// FIXME: In the long term the verifier should not be controllable with a
|
|
|
|
// flag. We should either fix all passes to correctly update the assumption
|
|
|
|
// cache and enable the verifier unconditionally or somehow arrange for the
|
|
|
|
// assumption list to be updated automatically by passes.
|
|
|
|
if (!VerifyAssumptionCache)
|
|
|
|
return;
|
|
|
|
|
2016-12-19 09:32:13 +01:00
|
|
|
SmallPtrSet<const CallInst *, 4> AssumptionSet;
|
|
|
|
for (const auto &I : AssumptionCaches) {
|
|
|
|
for (auto &VH : I.second->assumptions())
|
|
|
|
if (VH)
|
|
|
|
AssumptionSet.insert(cast<CallInst>(VH));
|
|
|
|
|
|
|
|
for (const BasicBlock &B : cast<Function>(*I.first))
|
|
|
|
for (const Instruction &II : B)
|
2017-02-15 22:10:09 +01:00
|
|
|
if (match(&II, m_Intrinsic<Intrinsic::assume>()) &&
|
|
|
|
!AssumptionSet.count(cast<CallInst>(&II)))
|
|
|
|
report_fatal_error("Assumption in scanned function not in cache");
|
2016-12-19 09:32:13 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
AssumptionCacheTracker::AssumptionCacheTracker() : ImmutablePass(ID) {
|
|
|
|
initializeAssumptionCacheTrackerPass(*PassRegistry::getPassRegistry());
|
|
|
|
}
|
|
|
|
|
2017-09-01 23:37:29 +02:00
|
|
|
AssumptionCacheTracker::~AssumptionCacheTracker() = default;
|
|
|
|
|
|
|
|
char AssumptionCacheTracker::ID = 0;
|
2016-12-19 09:32:13 +01:00
|
|
|
|
|
|
|
INITIALIZE_PASS(AssumptionCacheTracker, "assumption-cache-tracker",
|
|
|
|
"Assumption Cache Tracker", false, true)
|