mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-22 10:42:39 +01:00
[AliasAnalysis] Second prototype to cache BasicAA / anyAA state.
Summary: Adding contained caching to AliasAnalysis. BasicAA is currently the only one using it. AA changes: - This patch is pulling the caches from BasicAAResults to AAResults, meaning the getModRefInfo call benefits from the IsCapturedCache as well when in "batch mode". - All AAResultBase implementations add the QueryInfo member to all APIs. AAResults APIs maintain wrapper APIs such that all alias()/getModRefInfo call sites are unchanged. - AA now provides a BatchAAResults type as a wrapper to AAResults. It keeps the AAResults instance and a QueryInfo instantiated to batch mode. It delegates all work to the AAResults instance with the batched QueryInfo. More API wrappers may be needed in BatchAAResults; only the minimum needed is currently added. MemorySSA changes: - All walkers are now templated on the AA used (AliasAnalysis=AAResults or BatchAAResults). - At build time, we optimize uses; now we create a local walker (lives only as long as OptimizeUses does) using BatchAAResults. - All Walkers have an internal AA and only use that now, never the AA in MemorySSA. The Walkers receive the AA they will use when built. - The walker we use for queries after the build is instantiated on AliasAnalysis and is built after building MemorySSA and setting AA. - All static methods doing walking are now templated on AliasAnalysisType if they are used both during build and after. If used only during build, the method now only takes a BatchAAResults. If used only after build, the method now takes an AliasAnalysis. Subscribers: sanjoy, arsenm, jvesely, nhaehnle, jlebar, george.burgess.iv, llvm-commits Tags: #llvm Differential Revision: https://reviews.llvm.org/D59315 llvm-svn: 356783
This commit is contained in:
parent
6c3e551ea2
commit
3fc1696aca
@ -37,6 +37,7 @@
|
||||
#ifndef LLVM_ANALYSIS_ALIASANALYSIS_H
|
||||
#define LLVM_ANALYSIS_ALIASANALYSIS_H
|
||||
|
||||
#include "llvm/ADT/DenseMap.h"
|
||||
#include "llvm/ADT/None.h"
|
||||
#include "llvm/ADT/Optional.h"
|
||||
#include "llvm/ADT/SmallVector.h"
|
||||
@ -285,6 +286,28 @@ createModRefInfo(const FunctionModRefBehavior FMRB) {
|
||||
return ModRefInfo(FMRB & static_cast<int>(ModRefInfo::ModRef));
|
||||
}
|
||||
|
||||
/// This class stores info we want to provide to or retain within an alias
|
||||
/// query. By default, the root query is stateless and starts with a freshly
|
||||
/// constructed info object. Specific alias analyses can use this query info to
|
||||
/// store per-query state that is important for recursive or nested queries to
|
||||
/// avoid recomputing. To enable preserving this state across multiple queries
|
||||
/// where safe (due to the IR not changing), use a `BatchAAResults` wrapper.
|
||||
/// The information stored in an `AAQueryInfo` is currently limitted to the
|
||||
/// caches used by BasicAA, but can further be extended to fit other AA needs.
|
||||
class AAQueryInfo {
|
||||
public:
|
||||
using LocPair = std::pair<MemoryLocation, MemoryLocation>;
|
||||
using AliasCacheT = SmallDenseMap<LocPair, AliasResult, 8>;
|
||||
AliasCacheT AliasCache;
|
||||
|
||||
using IsCapturedCacheT = SmallDenseMap<const Value *, bool, 8>;
|
||||
IsCapturedCacheT IsCapturedCache;
|
||||
|
||||
AAQueryInfo() : AliasCache(), IsCapturedCache() {}
|
||||
};
|
||||
|
||||
class BatchAAResults;
|
||||
|
||||
class AAResults {
|
||||
public:
|
||||
// Make these results default constructable and movable. We have to spell
|
||||
@ -599,32 +622,8 @@ public:
|
||||
/// helpers above.
|
||||
ModRefInfo getModRefInfo(const Instruction *I,
|
||||
const Optional<MemoryLocation> &OptLoc) {
|
||||
if (OptLoc == None) {
|
||||
if (const auto *Call = dyn_cast<CallBase>(I)) {
|
||||
return createModRefInfo(getModRefBehavior(Call));
|
||||
}
|
||||
}
|
||||
|
||||
const MemoryLocation &Loc = OptLoc.getValueOr(MemoryLocation());
|
||||
|
||||
switch (I->getOpcode()) {
|
||||
case Instruction::VAArg: return getModRefInfo((const VAArgInst*)I, Loc);
|
||||
case Instruction::Load: return getModRefInfo((const LoadInst*)I, Loc);
|
||||
case Instruction::Store: return getModRefInfo((const StoreInst*)I, Loc);
|
||||
case Instruction::Fence: return getModRefInfo((const FenceInst*)I, Loc);
|
||||
case Instruction::AtomicCmpXchg:
|
||||
return getModRefInfo((const AtomicCmpXchgInst*)I, Loc);
|
||||
case Instruction::AtomicRMW:
|
||||
return getModRefInfo((const AtomicRMWInst*)I, Loc);
|
||||
case Instruction::Call: return getModRefInfo((const CallInst*)I, Loc);
|
||||
case Instruction::Invoke: return getModRefInfo((const InvokeInst*)I,Loc);
|
||||
case Instruction::CatchPad:
|
||||
return getModRefInfo((const CatchPadInst *)I, Loc);
|
||||
case Instruction::CatchRet:
|
||||
return getModRefInfo((const CatchReturnInst *)I, Loc);
|
||||
default:
|
||||
return ModRefInfo::NoModRef;
|
||||
}
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(I, OptLoc, AAQIP);
|
||||
}
|
||||
|
||||
/// A convenience wrapper for constructing the memory location.
|
||||
@ -691,6 +690,69 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal = false);
|
||||
ModRefInfo getModRefInfo(Instruction *I, const CallBase *Call2,
|
||||
AAQueryInfo &AAQIP);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const VAArgInst *V, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const LoadInst *L, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const StoreInst *S, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const FenceInst *S, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const AtomicCmpXchgInst *CX,
|
||||
const MemoryLocation &Loc, AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const AtomicRMWInst *RMW, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const CatchPadInst *I, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const CatchReturnInst *I, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const Instruction *I,
|
||||
const Optional<MemoryLocation> &OptLoc,
|
||||
AAQueryInfo &AAQIP) {
|
||||
if (OptLoc == None) {
|
||||
if (const auto *Call = dyn_cast<CallBase>(I)) {
|
||||
return createModRefInfo(getModRefBehavior(Call));
|
||||
}
|
||||
}
|
||||
|
||||
const MemoryLocation &Loc = OptLoc.getValueOr(MemoryLocation());
|
||||
|
||||
switch (I->getOpcode()) {
|
||||
case Instruction::VAArg:
|
||||
return getModRefInfo((const VAArgInst *)I, Loc, AAQIP);
|
||||
case Instruction::Load:
|
||||
return getModRefInfo((const LoadInst *)I, Loc, AAQIP);
|
||||
case Instruction::Store:
|
||||
return getModRefInfo((const StoreInst *)I, Loc, AAQIP);
|
||||
case Instruction::Fence:
|
||||
return getModRefInfo((const FenceInst *)I, Loc, AAQIP);
|
||||
case Instruction::AtomicCmpXchg:
|
||||
return getModRefInfo((const AtomicCmpXchgInst *)I, Loc, AAQIP);
|
||||
case Instruction::AtomicRMW:
|
||||
return getModRefInfo((const AtomicRMWInst *)I, Loc, AAQIP);
|
||||
case Instruction::Call:
|
||||
return getModRefInfo((const CallInst *)I, Loc, AAQIP);
|
||||
case Instruction::Invoke:
|
||||
return getModRefInfo((const InvokeInst *)I, Loc, AAQIP);
|
||||
case Instruction::CatchPad:
|
||||
return getModRefInfo((const CatchPadInst *)I, Loc, AAQIP);
|
||||
case Instruction::CatchRet:
|
||||
return getModRefInfo((const CatchReturnInst *)I, Loc, AAQIP);
|
||||
default:
|
||||
return ModRefInfo::NoModRef;
|
||||
}
|
||||
}
|
||||
|
||||
class Concept;
|
||||
|
||||
template <typename T> class Model;
|
||||
@ -702,6 +764,47 @@ private:
|
||||
std::vector<std::unique_ptr<Concept>> AAs;
|
||||
|
||||
std::vector<AnalysisKey *> AADeps;
|
||||
|
||||
friend class BatchAAResults;
|
||||
};
|
||||
|
||||
/// This class is a wrapper over an AAResults, and it is intended to be used
|
||||
/// only when there are no IR changes inbetween queries. BatchAAResults is
|
||||
/// reusing the same `AAQueryInfo` to preserve the state across queries,
|
||||
/// esentially making AA work in "batch mode". The internal state cannot be
|
||||
/// cleared, so to go "out-of-batch-mode", the user must either use AAResults,
|
||||
/// or create a new BatchAAResults.
|
||||
class BatchAAResults {
|
||||
AAResults &AA;
|
||||
AAQueryInfo AAQI;
|
||||
|
||||
public:
|
||||
BatchAAResults(AAResults &AAR) : AA(AAR), AAQI() {}
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB) {
|
||||
return AA.alias(LocA, LocB, AAQI);
|
||||
}
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal = false) {
|
||||
return AA.pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc) {
|
||||
return AA.getModRefInfo(Call, Loc, AAQI);
|
||||
}
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2) {
|
||||
return AA.getModRefInfo(Call1, Call2, AAQI);
|
||||
}
|
||||
ModRefInfo getModRefInfo(const Instruction *I,
|
||||
const Optional<MemoryLocation> &OptLoc) {
|
||||
return AA.getModRefInfo(I, OptLoc, AAQI);
|
||||
}
|
||||
ModRefInfo getModRefInfo(Instruction *I, const CallBase *Call2) {
|
||||
return AA.getModRefInfo(I, Call2, AAQI);
|
||||
}
|
||||
ModRefInfo getArgModRefInfo(const CallBase *Call, unsigned ArgIdx) {
|
||||
return AA.getArgModRefInfo(Call, ArgIdx);
|
||||
}
|
||||
FunctionModRefBehavior getModRefBehavior(const CallBase *Call) {
|
||||
return AA.getModRefBehavior(Call);
|
||||
}
|
||||
};
|
||||
|
||||
/// Temporary typedef for legacy code that uses a generic \c AliasAnalysis
|
||||
@ -734,12 +837,12 @@ public:
|
||||
/// each other. This is the interface that must be implemented by specific
|
||||
/// alias analysis implementations.
|
||||
virtual AliasResult alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) = 0;
|
||||
const MemoryLocation &LocB, AAQueryInfo &AAQI) = 0;
|
||||
|
||||
/// Checks whether the given location points to constant memory, or if
|
||||
/// \p OrLocal is true whether it points to a local alloca.
|
||||
virtual bool pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
bool OrLocal) = 0;
|
||||
AAQueryInfo &AAQI, bool OrLocal) = 0;
|
||||
|
||||
/// @}
|
||||
//===--------------------------------------------------------------------===//
|
||||
@ -763,13 +866,14 @@ public:
|
||||
/// getModRefInfo (for call sites) - Return information about whether
|
||||
/// a particular call site modifies or reads the specified memory location.
|
||||
virtual ModRefInfo getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) = 0;
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) = 0;
|
||||
|
||||
/// Return information about whether two call sites may refer to the same set
|
||||
/// of memory locations. See the AA documentation for details:
|
||||
/// http://llvm.org/docs/AliasAnalysis.html#ModRefInfo
|
||||
virtual ModRefInfo getModRefInfo(const CallBase *Call1,
|
||||
const CallBase *Call2) = 0;
|
||||
virtual ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) = 0;
|
||||
|
||||
/// @}
|
||||
};
|
||||
@ -791,14 +895,14 @@ public:
|
||||
|
||||
void setAAResults(AAResults *NewAAR) override { Result.setAAResults(NewAAR); }
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) override {
|
||||
return Result.alias(LocA, LocB);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) override {
|
||||
return Result.alias(LocA, LocB, AAQI);
|
||||
}
|
||||
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal) override {
|
||||
return Result.pointsToConstantMemory(Loc, OrLocal);
|
||||
return Result.pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
|
||||
ModRefInfo getArgModRefInfo(const CallBase *Call, unsigned ArgIdx) override {
|
||||
@ -813,14 +917,14 @@ public:
|
||||
return Result.getModRefBehavior(F);
|
||||
}
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) override {
|
||||
return Result.getModRefInfo(Call, Loc);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) override {
|
||||
return Result.getModRefInfo(Call, Loc, AAQI);
|
||||
}
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1,
|
||||
const CallBase *Call2) override {
|
||||
return Result.getModRefInfo(Call1, Call2);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) override {
|
||||
return Result.getModRefInfo(Call1, Call2, AAQI);
|
||||
}
|
||||
};
|
||||
|
||||
@ -866,13 +970,16 @@ protected:
|
||||
AAResultsProxy(AAResults *AAR, DerivedT &CurrentResult)
|
||||
: AAR(AAR), CurrentResult(CurrentResult) {}
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB) {
|
||||
return AAR ? AAR->alias(LocA, LocB) : CurrentResult.alias(LocA, LocB);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
return AAR ? AAR->alias(LocA, LocB, AAQI)
|
||||
: CurrentResult.alias(LocA, LocB, AAQI);
|
||||
}
|
||||
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal) {
|
||||
return AAR ? AAR->pointsToConstantMemory(Loc, OrLocal)
|
||||
: CurrentResult.pointsToConstantMemory(Loc, OrLocal);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal) {
|
||||
return AAR ? AAR->pointsToConstantMemory(Loc, AAQI, OrLocal)
|
||||
: CurrentResult.pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
|
||||
ModRefInfo getArgModRefInfo(const CallBase *Call, unsigned ArgIdx) {
|
||||
@ -889,14 +996,16 @@ protected:
|
||||
return AAR ? AAR->getModRefBehavior(F) : CurrentResult.getModRefBehavior(F);
|
||||
}
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc) {
|
||||
return AAR ? AAR->getModRefInfo(Call, Loc)
|
||||
: CurrentResult.getModRefInfo(Call, Loc);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
return AAR ? AAR->getModRefInfo(Call, Loc, AAQI)
|
||||
: CurrentResult.getModRefInfo(Call, Loc, AAQI);
|
||||
}
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2) {
|
||||
return AAR ? AAR->getModRefInfo(Call1, Call2)
|
||||
: CurrentResult.getModRefInfo(Call1, Call2);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) {
|
||||
return AAR ? AAR->getModRefInfo(Call1, Call2, AAQI)
|
||||
: CurrentResult.getModRefInfo(Call1, Call2, AAQI);
|
||||
}
|
||||
};
|
||||
|
||||
@ -920,11 +1029,13 @@ protected:
|
||||
AAResultsProxy getBestAAResults() { return AAResultsProxy(AAR, derived()); }
|
||||
|
||||
public:
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB) {
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
return MayAlias;
|
||||
}
|
||||
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal) {
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -940,11 +1051,13 @@ public:
|
||||
return FMRB_UnknownModRefBehavior;
|
||||
}
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc) {
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
return ModRefInfo::ModRef;
|
||||
}
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2) {
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) {
|
||||
return ModRefInfo::ModRef;
|
||||
}
|
||||
};
|
||||
|
@ -81,14 +81,18 @@ public:
|
||||
bool invalidate(Function &Fn, const PreservedAnalyses &PA,
|
||||
FunctionAnalysisManager::Invalidator &Inv);
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
/// Chases pointers until we find a (constant global) or not.
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal);
|
||||
|
||||
/// Get the location associated with a pointer argument of a callsite.
|
||||
ModRefInfo getArgModRefInfo(const CallBase *Call, unsigned ArgIdx);
|
||||
@ -140,13 +144,6 @@ private:
|
||||
SmallVector<VariableGEPIndex, 4> VarIndices;
|
||||
};
|
||||
|
||||
/// Track alias queries to guard against recursion.
|
||||
using LocPair = std::pair<MemoryLocation, MemoryLocation>;
|
||||
using AliasCacheTy = SmallDenseMap<LocPair, AliasResult, 8>;
|
||||
AliasCacheTy AliasCache;
|
||||
using IsCapturedCacheTy = SmallDenseMap<const Value *, bool, 8>;
|
||||
IsCapturedCacheTy IsCapturedCache;
|
||||
|
||||
/// Tracks phi nodes we have visited.
|
||||
///
|
||||
/// When interpret "Value" pointer equality as value equality we need to make
|
||||
@ -201,22 +198,24 @@ private:
|
||||
AliasResult aliasGEP(const GEPOperator *V1, LocationSize V1Size,
|
||||
const AAMDNodes &V1AAInfo, const Value *V2,
|
||||
LocationSize V2Size, const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderlyingV1, const Value *UnderlyingV2);
|
||||
const Value *UnderlyingV1, const Value *UnderlyingV2,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
AliasResult aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
const AAMDNodes &PNAAInfo, const Value *V2,
|
||||
LocationSize V2Size, const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderV2);
|
||||
const Value *UnderV2, AAQueryInfo &AAQI);
|
||||
|
||||
AliasResult aliasSelect(const SelectInst *SI, LocationSize SISize,
|
||||
const AAMDNodes &SIAAInfo, const Value *V2,
|
||||
LocationSize V2Size, const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderV2);
|
||||
const Value *UnderV2, AAQueryInfo &AAQI);
|
||||
|
||||
AliasResult aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
AAMDNodes V1AATag, const Value *V2,
|
||||
LocationSize V2Size, AAMDNodes V2AATag,
|
||||
const Value *O1 = nullptr, const Value *O2 = nullptr);
|
||||
AAQueryInfo &AAQI, const Value *O1 = nullptr,
|
||||
const Value *O2 = nullptr);
|
||||
};
|
||||
|
||||
/// Analysis pass providing a never-invalidated alias analysis result.
|
||||
|
@ -60,7 +60,8 @@ public:
|
||||
const cflaa::AliasSummary *getAliasSummary(const Function &);
|
||||
|
||||
AliasResult query(const MemoryLocation &, const MemoryLocation &);
|
||||
AliasResult alias(const MemoryLocation &, const MemoryLocation &);
|
||||
AliasResult alias(const MemoryLocation &, const MemoryLocation &,
|
||||
AAQueryInfo &);
|
||||
|
||||
private:
|
||||
/// Ensures that the given function is available in the cache.
|
||||
|
@ -69,7 +69,8 @@ public:
|
||||
|
||||
AliasResult query(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB) {
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (LocA.Ptr == LocB.Ptr)
|
||||
return MustAlias;
|
||||
|
||||
@ -79,11 +80,11 @@ public:
|
||||
// ConstantExpr, but every query needs to have at least one Value tied to a
|
||||
// Function, and neither GlobalValues nor ConstantExprs are.
|
||||
if (isa<Constant>(LocA.Ptr) && isa<Constant>(LocB.Ptr))
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
AliasResult QueryResult = query(LocA, LocB);
|
||||
if (QueryResult == MayAlias)
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
return QueryResult;
|
||||
}
|
||||
|
@ -84,10 +84,12 @@ public:
|
||||
//------------------------------------------------
|
||||
// Implement the AliasAnalysis API
|
||||
//
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
using AAResultBase::getModRefInfo;
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
/// getModRefBehavior - Return the behavior of the specified function if
|
||||
/// called from the specified call site. The call site may be null in which
|
||||
@ -113,7 +115,7 @@ private:
|
||||
|
||||
bool isNonEscapingGlobalNoAlias(const GlobalValue *GV, const Value *V);
|
||||
ModRefInfo getModRefInfoForArgument(const CallBase *Call,
|
||||
const GlobalValue *GV);
|
||||
const GlobalValue *GV, AAQueryInfo &AAQI);
|
||||
};
|
||||
|
||||
/// Analysis pass providing a never-invalidated alias analysis result.
|
||||
|
@ -830,13 +830,13 @@ protected:
|
||||
const MemoryUseOrDef *Template = nullptr);
|
||||
|
||||
private:
|
||||
class ClobberWalkerBase;
|
||||
class CachingWalker;
|
||||
class SkipSelfWalker;
|
||||
template <class AliasAnalysisType> class ClobberWalkerBase;
|
||||
template <class AliasAnalysisType> class CachingWalker;
|
||||
template <class AliasAnalysisType> class SkipSelfWalker;
|
||||
class OptimizeUses;
|
||||
|
||||
CachingWalker *getWalkerImpl();
|
||||
void buildMemorySSA();
|
||||
CachingWalker<AliasAnalysis> *getWalkerImpl();
|
||||
void buildMemorySSA(BatchAAResults &BAA);
|
||||
void optimizeUses();
|
||||
|
||||
void prepareForMoveTo(MemoryAccess *, BasicBlock *);
|
||||
@ -850,7 +850,8 @@ private:
|
||||
void markUnreachableAsLiveOnEntry(BasicBlock *BB);
|
||||
bool dominatesUse(const MemoryAccess *, const MemoryAccess *) const;
|
||||
MemoryPhi *createMemoryPhi(BasicBlock *BB);
|
||||
MemoryUseOrDef *createNewAccess(Instruction *,
|
||||
template <typename AliasAnalysisType>
|
||||
MemoryUseOrDef *createNewAccess(Instruction *, AliasAnalysisType *,
|
||||
const MemoryUseOrDef *Template = nullptr);
|
||||
MemoryAccess *findDominatingDef(BasicBlock *, enum InsertionPlace);
|
||||
void placePHINodes(const SmallPtrSetImpl<BasicBlock *> &);
|
||||
@ -886,9 +887,9 @@ private:
|
||||
mutable DenseMap<const MemoryAccess *, unsigned long> BlockNumbering;
|
||||
|
||||
// Memory SSA building info
|
||||
std::unique_ptr<ClobberWalkerBase> WalkerBase;
|
||||
std::unique_ptr<CachingWalker> Walker;
|
||||
std::unique_ptr<SkipSelfWalker> SkipWalker;
|
||||
std::unique_ptr<ClobberWalkerBase<AliasAnalysis>> WalkerBase;
|
||||
std::unique_ptr<CachingWalker<AliasAnalysis>> Walker;
|
||||
std::unique_ptr<SkipSelfWalker<AliasAnalysis>> SkipWalker;
|
||||
unsigned NextID;
|
||||
};
|
||||
|
||||
|
@ -52,14 +52,17 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal);
|
||||
|
||||
using AAResultBase::getModRefBehavior;
|
||||
FunctionModRefBehavior getModRefBehavior(const Function *F);
|
||||
|
||||
using AAResultBase::getModRefInfo;
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
};
|
||||
|
||||
/// Analysis pass providing a never-invalidated alias analysis result.
|
||||
|
@ -30,7 +30,8 @@ public:
|
||||
explicit SCEVAAResult(ScalarEvolution &SE) : AAResultBase(), SE(SE) {}
|
||||
SCEVAAResult(SCEVAAResult &&Arg) : AAResultBase(std::move(Arg)), SE(Arg.SE) {}
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
private:
|
||||
Value *GetBaseValue(const SCEV *S);
|
||||
|
@ -39,9 +39,12 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
private:
|
||||
bool mayAliasInScopes(const MDNode *Scopes, const MDNode *NoAlias) const;
|
||||
|
@ -40,12 +40,16 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal);
|
||||
FunctionModRefBehavior getModRefBehavior(const CallBase *Call);
|
||||
FunctionModRefBehavior getModRefBehavior(const Function *F);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI);
|
||||
ModRefInfo getModRefInfo(const CallBase *Call1, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI);
|
||||
|
||||
private:
|
||||
bool Aliases(const MDNode *A, const MDNode *B) const;
|
||||
|
@ -99,8 +99,14 @@ bool AAResults::invalidate(Function &F, const PreservedAnalyses &PA,
|
||||
|
||||
AliasResult AAResults::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
AAQueryInfo AAQIP;
|
||||
return alias(LocA, LocB, AAQIP);
|
||||
}
|
||||
|
||||
AliasResult AAResults::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB, AAQueryInfo &AAQI) {
|
||||
for (const auto &AA : AAs) {
|
||||
auto Result = AA->alias(LocA, LocB);
|
||||
auto Result = AA->alias(LocA, LocB, AAQI);
|
||||
if (Result != MayAlias)
|
||||
return Result;
|
||||
}
|
||||
@ -109,8 +115,14 @@ AliasResult AAResults::alias(const MemoryLocation &LocA,
|
||||
|
||||
bool AAResults::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
bool OrLocal) {
|
||||
AAQueryInfo AAQIP;
|
||||
return pointsToConstantMemory(Loc, AAQIP, OrLocal);
|
||||
}
|
||||
|
||||
bool AAResults::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI, bool OrLocal) {
|
||||
for (const auto &AA : AAs)
|
||||
if (AA->pointsToConstantMemory(Loc, OrLocal))
|
||||
if (AA->pointsToConstantMemory(Loc, AAQI, OrLocal))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
@ -131,10 +143,16 @@ ModRefInfo AAResults::getArgModRefInfo(const CallBase *Call, unsigned ArgIdx) {
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(Instruction *I, const CallBase *Call2) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(I, Call2, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(Instruction *I, const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) {
|
||||
// We may have two calls.
|
||||
if (const auto *Call1 = dyn_cast<CallBase>(I)) {
|
||||
// Check if the two calls modify the same memory.
|
||||
return getModRefInfo(Call1, Call2);
|
||||
return getModRefInfo(Call1, Call2, AAQI);
|
||||
} else if (I->isFenceLike()) {
|
||||
// If this is a fence, just return ModRef.
|
||||
return ModRefInfo::ModRef;
|
||||
@ -144,7 +162,7 @@ ModRefInfo AAResults::getModRefInfo(Instruction *I, const CallBase *Call2) {
|
||||
// is that if the call references what this instruction
|
||||
// defines, it must be clobbered by this location.
|
||||
const MemoryLocation DefLoc = MemoryLocation::get(I);
|
||||
ModRefInfo MR = getModRefInfo(Call2, DefLoc);
|
||||
ModRefInfo MR = getModRefInfo(Call2, DefLoc, AAQI);
|
||||
if (isModOrRefSet(MR))
|
||||
return setModAndRef(MR);
|
||||
}
|
||||
@ -153,10 +171,17 @@ ModRefInfo AAResults::getModRefInfo(Instruction *I, const CallBase *Call2) {
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(Call, Loc, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
ModRefInfo Result = ModRefInfo::ModRef;
|
||||
|
||||
for (const auto &AA : AAs) {
|
||||
Result = intersectModRef(Result, AA->getModRefInfo(Call, Loc));
|
||||
Result = intersectModRef(Result, AA->getModRefInfo(Call, Loc, AAQI));
|
||||
|
||||
// Early-exit the moment we reach the bottom of the lattice.
|
||||
if (isNoModRef(Result))
|
||||
@ -214,10 +239,16 @@ ModRefInfo AAResults::getModRefInfo(const CallBase *Call,
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CallBase *Call1,
|
||||
const CallBase *Call2) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(Call1, Call2, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CallBase *Call1,
|
||||
const CallBase *Call2, AAQueryInfo &AAQI) {
|
||||
ModRefInfo Result = ModRefInfo::ModRef;
|
||||
|
||||
for (const auto &AA : AAs) {
|
||||
Result = intersectModRef(Result, AA->getModRefInfo(Call1, Call2));
|
||||
Result = intersectModRef(Result, AA->getModRefInfo(Call1, Call2, AAQI));
|
||||
|
||||
// Early-exit the moment we reach the bottom of the lattice.
|
||||
if (isNoModRef(Result))
|
||||
@ -396,6 +427,12 @@ raw_ostream &llvm::operator<<(raw_ostream &OS, AliasResult AR) {
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(L, Loc, AAQIP);
|
||||
}
|
||||
ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
// Be conservative in the face of atomic.
|
||||
if (isStrongerThan(L->getOrdering(), AtomicOrdering::Unordered))
|
||||
return ModRefInfo::ModRef;
|
||||
@ -403,7 +440,7 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
|
||||
// If the load address doesn't alias the given address, it doesn't read
|
||||
// or write the specified memory.
|
||||
if (Loc.Ptr) {
|
||||
AliasResult AR = alias(MemoryLocation::get(L), Loc);
|
||||
AliasResult AR = alias(MemoryLocation::get(L), Loc, AAQI);
|
||||
if (AR == NoAlias)
|
||||
return ModRefInfo::NoModRef;
|
||||
if (AR == MustAlias)
|
||||
@ -415,12 +452,18 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(S, Loc, AAQIP);
|
||||
}
|
||||
ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
// Be conservative in the face of atomic.
|
||||
if (isStrongerThan(S->getOrdering(), AtomicOrdering::Unordered))
|
||||
return ModRefInfo::ModRef;
|
||||
|
||||
if (Loc.Ptr) {
|
||||
AliasResult AR = alias(MemoryLocation::get(S), Loc);
|
||||
AliasResult AR = alias(MemoryLocation::get(S), Loc, AAQI);
|
||||
// If the store address cannot alias the pointer in question, then the
|
||||
// specified memory cannot be modified by the store.
|
||||
if (AR == NoAlias)
|
||||
@ -428,7 +471,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
|
||||
|
||||
// If the pointer is a pointer to constant memory, then it could not have
|
||||
// been modified by this store.
|
||||
if (pointsToConstantMemory(Loc))
|
||||
if (pointsToConstantMemory(Loc, AAQI))
|
||||
return ModRefInfo::NoModRef;
|
||||
|
||||
// If the store address aliases the pointer as must alias, set Must.
|
||||
@ -441,17 +484,31 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const FenceInst *S, const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(S, Loc, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const FenceInst *S,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
// If we know that the location is a constant memory location, the fence
|
||||
// cannot modify this location.
|
||||
if (Loc.Ptr && pointsToConstantMemory(Loc))
|
||||
if (Loc.Ptr && pointsToConstantMemory(Loc, AAQI))
|
||||
return ModRefInfo::Ref;
|
||||
return ModRefInfo::ModRef;
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const VAArgInst *V,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(V, Loc, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const VAArgInst *V,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (Loc.Ptr) {
|
||||
AliasResult AR = alias(MemoryLocation::get(V), Loc);
|
||||
AliasResult AR = alias(MemoryLocation::get(V), Loc, AAQI);
|
||||
// If the va_arg address cannot alias the pointer in question, then the
|
||||
// specified memory cannot be accessed by the va_arg.
|
||||
if (AR == NoAlias)
|
||||
@ -459,7 +516,7 @@ ModRefInfo AAResults::getModRefInfo(const VAArgInst *V,
|
||||
|
||||
// If the pointer is a pointer to constant memory, then it could not have
|
||||
// been modified by this va_arg.
|
||||
if (pointsToConstantMemory(Loc))
|
||||
if (pointsToConstantMemory(Loc, AAQI))
|
||||
return ModRefInfo::NoModRef;
|
||||
|
||||
// If the va_arg aliases the pointer as must alias, set Must.
|
||||
@ -473,10 +530,17 @@ ModRefInfo AAResults::getModRefInfo(const VAArgInst *V,
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CatchPadInst *CatchPad,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(CatchPad, Loc, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CatchPadInst *CatchPad,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (Loc.Ptr) {
|
||||
// If the pointer is a pointer to constant memory,
|
||||
// then it could not have been modified by this catchpad.
|
||||
if (pointsToConstantMemory(Loc))
|
||||
if (pointsToConstantMemory(Loc, AAQI))
|
||||
return ModRefInfo::NoModRef;
|
||||
}
|
||||
|
||||
@ -486,10 +550,17 @@ ModRefInfo AAResults::getModRefInfo(const CatchPadInst *CatchPad,
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CatchReturnInst *CatchRet,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(CatchRet, Loc, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const CatchReturnInst *CatchRet,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (Loc.Ptr) {
|
||||
// If the pointer is a pointer to constant memory,
|
||||
// then it could not have been modified by this catchpad.
|
||||
if (pointsToConstantMemory(Loc))
|
||||
if (pointsToConstantMemory(Loc, AAQI))
|
||||
return ModRefInfo::NoModRef;
|
||||
}
|
||||
|
||||
@ -499,12 +570,19 @@ ModRefInfo AAResults::getModRefInfo(const CatchReturnInst *CatchRet,
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const AtomicCmpXchgInst *CX,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(CX, Loc, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const AtomicCmpXchgInst *CX,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
// Acquire/Release cmpxchg has properties that matter for arbitrary addresses.
|
||||
if (isStrongerThanMonotonic(CX->getSuccessOrdering()))
|
||||
return ModRefInfo::ModRef;
|
||||
|
||||
if (Loc.Ptr) {
|
||||
AliasResult AR = alias(MemoryLocation::get(CX), Loc);
|
||||
AliasResult AR = alias(MemoryLocation::get(CX), Loc, AAQI);
|
||||
// If the cmpxchg address does not alias the location, it does not access
|
||||
// it.
|
||||
if (AR == NoAlias)
|
||||
@ -520,12 +598,19 @@ ModRefInfo AAResults::getModRefInfo(const AtomicCmpXchgInst *CX,
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const AtomicRMWInst *RMW,
|
||||
const MemoryLocation &Loc) {
|
||||
AAQueryInfo AAQIP;
|
||||
return getModRefInfo(RMW, Loc, AAQIP);
|
||||
}
|
||||
|
||||
ModRefInfo AAResults::getModRefInfo(const AtomicRMWInst *RMW,
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
// Acquire/Release atomicrmw has properties that matter for arbitrary addresses.
|
||||
if (isStrongerThanMonotonic(RMW->getOrdering()))
|
||||
return ModRefInfo::ModRef;
|
||||
|
||||
if (Loc.Ptr) {
|
||||
AliasResult AR = alias(MemoryLocation::get(RMW), Loc);
|
||||
AliasResult AR = alias(MemoryLocation::get(RMW), Loc, AAQI);
|
||||
// If the atomicrmw address does not alias the location, it does not access
|
||||
// it.
|
||||
if (AR == NoAlias)
|
||||
|
@ -631,7 +631,7 @@ bool BasicAAResult::DecomposeGEPExpression(const Value *V,
|
||||
/// the function, with global constants being considered local to all
|
||||
/// functions.
|
||||
bool BasicAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
bool OrLocal) {
|
||||
AAQueryInfo &AAQI, bool OrLocal) {
|
||||
assert(Visited.empty() && "Visited must be cleared after use!");
|
||||
|
||||
unsigned MaxLookup = 8;
|
||||
@ -641,7 +641,7 @@ bool BasicAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
const Value *V = GetUnderlyingObject(Worklist.pop_back_val(), DL);
|
||||
if (!Visited.insert(V).second) {
|
||||
Visited.clear();
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
|
||||
// An alloca instruction defines local memory.
|
||||
@ -655,7 +655,7 @@ bool BasicAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
// others. GV may even be a declaration, not a definition.
|
||||
if (!GV->isConstant()) {
|
||||
Visited.clear();
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@ -673,7 +673,7 @@ bool BasicAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
// Don't bother inspecting phi nodes with many operands.
|
||||
if (PN->getNumIncomingValues() > MaxLookup) {
|
||||
Visited.clear();
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
for (Value *IncValue : PN->incoming_values())
|
||||
Worklist.push_back(IncValue);
|
||||
@ -682,7 +682,7 @@ bool BasicAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
|
||||
// Otherwise be conservative.
|
||||
Visited.clear();
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
} while (!Worklist.empty() && --MaxLookup);
|
||||
|
||||
Visited.clear();
|
||||
@ -817,25 +817,25 @@ static bool notDifferentParent(const Value *O1, const Value *O2) {
|
||||
#endif
|
||||
|
||||
AliasResult BasicAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
assert(notDifferentParent(LocA.Ptr, LocB.Ptr) &&
|
||||
"BasicAliasAnalysis doesn't support interprocedural queries.");
|
||||
|
||||
// If we have a directly cached entry for these locations, we have recursed
|
||||
// through this once, so just return the cached results. Notably, when this
|
||||
// happens, we don't clear the cache.
|
||||
auto CacheIt = AliasCache.find(LocPair(LocA, LocB));
|
||||
if (CacheIt != AliasCache.end())
|
||||
auto CacheIt = AAQI.AliasCache.find(AAQueryInfo::LocPair(LocA, LocB));
|
||||
if (CacheIt != AAQI.AliasCache.end())
|
||||
return CacheIt->second;
|
||||
|
||||
CacheIt = AAQI.AliasCache.find(AAQueryInfo::LocPair(LocB, LocA));
|
||||
if (CacheIt != AAQI.AliasCache.end())
|
||||
return CacheIt->second;
|
||||
|
||||
AliasResult Alias = aliasCheck(LocA.Ptr, LocA.Size, LocA.AATags, LocB.Ptr,
|
||||
LocB.Size, LocB.AATags);
|
||||
// AliasCache rarely has more than 1 or 2 elements, always use
|
||||
// shrink_and_clear so it quickly returns to the inline capacity of the
|
||||
// SmallDenseMap if it ever grows larger.
|
||||
// FIXME: This should really be shrink_to_inline_capacity_and_clear().
|
||||
AliasCache.shrink_and_clear();
|
||||
IsCapturedCache.shrink_and_clear();
|
||||
LocB.Size, LocB.AATags, AAQI);
|
||||
|
||||
VisitedPhiBBs.clear();
|
||||
return Alias;
|
||||
}
|
||||
@ -847,7 +847,8 @@ AliasResult BasicAAResult::alias(const MemoryLocation &LocA,
|
||||
/// say much about this query. We do, however, use simple "address taken"
|
||||
/// analysis on local objects.
|
||||
ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) {
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
assert(notDifferentParent(Call, Loc.Ptr) &&
|
||||
"AliasAnalysis query involving multiple functions!");
|
||||
|
||||
@ -874,7 +875,7 @@ ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
|
||||
// then the call can not mod/ref the pointer unless the call takes the pointer
|
||||
// as an argument, and itself doesn't capture it.
|
||||
if (!isa<Constant>(Object) && Call != Object &&
|
||||
isNonEscapingLocalObject(Object)) {
|
||||
isNonEscapingLocalObject(Object, &AAQI.IsCapturedCache)) {
|
||||
|
||||
// Optimistically assume that call doesn't touch Object and check this
|
||||
// assumption in the following loop.
|
||||
@ -900,8 +901,8 @@ ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
|
||||
|
||||
// If this is a no-capture pointer argument, see if we can tell that it
|
||||
// is impossible to alias the pointer we're checking.
|
||||
AliasResult AR =
|
||||
getBestAAResults().alias(MemoryLocation(*CI), MemoryLocation(Object));
|
||||
AliasResult AR = getBestAAResults().alias(MemoryLocation(*CI),
|
||||
MemoryLocation(Object), AAQI);
|
||||
if (AR != MustAlias)
|
||||
IsMustAlias = false;
|
||||
// Operand doesn't alias 'Object', continue looking for other aliases
|
||||
@ -947,7 +948,7 @@ ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
|
||||
if (isMallocOrCallocLikeFn(Call, &TLI)) {
|
||||
// Be conservative if the accessed pointer may alias the allocation -
|
||||
// fallback to the generic handling below.
|
||||
if (getBestAAResults().alias(MemoryLocation(Call), Loc) == NoAlias)
|
||||
if (getBestAAResults().alias(MemoryLocation(Call), Loc, AAQI) == NoAlias)
|
||||
return ModRefInfo::NoModRef;
|
||||
}
|
||||
|
||||
@ -959,11 +960,11 @@ ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
|
||||
AliasResult SrcAA, DestAA;
|
||||
|
||||
if ((SrcAA = getBestAAResults().alias(MemoryLocation::getForSource(Inst),
|
||||
Loc)) == MustAlias)
|
||||
Loc, AAQI)) == MustAlias)
|
||||
// Loc is exactly the memcpy source thus disjoint from memcpy dest.
|
||||
return ModRefInfo::Ref;
|
||||
if ((DestAA = getBestAAResults().alias(MemoryLocation::getForDest(Inst),
|
||||
Loc)) == MustAlias)
|
||||
Loc, AAQI)) == MustAlias)
|
||||
// The converse case.
|
||||
return ModRefInfo::Mod;
|
||||
|
||||
@ -1019,11 +1020,12 @@ ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
|
||||
return ModRefInfo::Ref;
|
||||
|
||||
// The AAResultBase base class has some smarts, lets use them.
|
||||
return AAResultBase::getModRefInfo(Call, Loc);
|
||||
return AAResultBase::getModRefInfo(Call, Loc, AAQI);
|
||||
}
|
||||
|
||||
ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call1,
|
||||
const CallBase *Call2) {
|
||||
const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) {
|
||||
// While the assume intrinsic is marked as arbitrarily writing so that
|
||||
// proper control dependencies will be maintained, it never aliases any
|
||||
// particular memory location.
|
||||
@ -1053,7 +1055,7 @@ ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call1,
|
||||
: ModRefInfo::NoModRef;
|
||||
|
||||
// The AAResultBase base class has some smarts, lets use them.
|
||||
return AAResultBase::getModRefInfo(Call1, Call2);
|
||||
return AAResultBase::getModRefInfo(Call1, Call2, AAQI);
|
||||
}
|
||||
|
||||
/// Provide ad-hoc rules to disambiguate accesses through two GEP operators,
|
||||
@ -1285,11 +1287,10 @@ bool BasicAAResult::isGEPBaseAtNegativeOffset(const GEPOperator *GEPOp,
|
||||
/// We know that V1 is a GEP, but we don't know anything about V2.
|
||||
/// UnderlyingV1 is GetUnderlyingObject(GEP1, DL), UnderlyingV2 is the same for
|
||||
/// V2.
|
||||
AliasResult
|
||||
BasicAAResult::aliasGEP(const GEPOperator *GEP1, LocationSize V1Size,
|
||||
const AAMDNodes &V1AAInfo, const Value *V2,
|
||||
LocationSize V2Size, const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderlyingV1, const Value *UnderlyingV2) {
|
||||
AliasResult BasicAAResult::aliasGEP(
|
||||
const GEPOperator *GEP1, LocationSize V1Size, const AAMDNodes &V1AAInfo,
|
||||
const Value *V2, LocationSize V2Size, const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderlyingV1, const Value *UnderlyingV2, AAQueryInfo &AAQI) {
|
||||
DecomposedGEP DecompGEP1, DecompGEP2;
|
||||
unsigned MaxPointerSize = getMaxPointerSize(DL);
|
||||
DecompGEP1.StructOffset = DecompGEP1.OtherOffset = APInt(MaxPointerSize, 0);
|
||||
@ -1325,14 +1326,14 @@ BasicAAResult::aliasGEP(const GEPOperator *GEP1, LocationSize V1Size,
|
||||
// Do the base pointers alias?
|
||||
AliasResult BaseAlias =
|
||||
aliasCheck(UnderlyingV1, LocationSize::unknown(), AAMDNodes(),
|
||||
UnderlyingV2, LocationSize::unknown(), AAMDNodes());
|
||||
UnderlyingV2, LocationSize::unknown(), AAMDNodes(), AAQI);
|
||||
|
||||
// Check for geps of non-aliasing underlying pointers where the offsets are
|
||||
// identical.
|
||||
if ((BaseAlias == MayAlias) && V1Size == V2Size) {
|
||||
// Do the base pointers alias assuming type and size.
|
||||
AliasResult PreciseBaseAlias = aliasCheck(UnderlyingV1, V1Size, V1AAInfo,
|
||||
UnderlyingV2, V2Size, V2AAInfo);
|
||||
AliasResult PreciseBaseAlias = aliasCheck(
|
||||
UnderlyingV1, V1Size, V1AAInfo, UnderlyingV2, V2Size, V2AAInfo, AAQI);
|
||||
if (PreciseBaseAlias == NoAlias) {
|
||||
// See if the computed offset from the common pointer tells us about the
|
||||
// relation of the resulting pointer.
|
||||
@ -1387,9 +1388,9 @@ BasicAAResult::aliasGEP(const GEPOperator *GEP1, LocationSize V1Size,
|
||||
if (V1Size == LocationSize::unknown() && V2Size == LocationSize::unknown())
|
||||
return MayAlias;
|
||||
|
||||
AliasResult R =
|
||||
aliasCheck(UnderlyingV1, LocationSize::unknown(), AAMDNodes(), V2,
|
||||
LocationSize::unknown(), V2AAInfo, nullptr, UnderlyingV2);
|
||||
AliasResult R = aliasCheck(UnderlyingV1, LocationSize::unknown(),
|
||||
AAMDNodes(), V2, LocationSize::unknown(),
|
||||
V2AAInfo, AAQI, nullptr, UnderlyingV2);
|
||||
if (R != MustAlias) {
|
||||
// If V2 may alias GEP base pointer, conservatively returns MayAlias.
|
||||
// If V2 is known not to alias GEP base pointer, then the two values
|
||||
@ -1523,37 +1524,35 @@ static AliasResult MergeAliasResults(AliasResult A, AliasResult B) {
|
||||
|
||||
/// Provides a bunch of ad-hoc rules to disambiguate a Select instruction
|
||||
/// against another.
|
||||
AliasResult BasicAAResult::aliasSelect(const SelectInst *SI,
|
||||
LocationSize SISize,
|
||||
const AAMDNodes &SIAAInfo,
|
||||
const Value *V2, LocationSize V2Size,
|
||||
const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderV2) {
|
||||
AliasResult
|
||||
BasicAAResult::aliasSelect(const SelectInst *SI, LocationSize SISize,
|
||||
const AAMDNodes &SIAAInfo, const Value *V2,
|
||||
LocationSize V2Size, const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderV2, AAQueryInfo &AAQI) {
|
||||
// If the values are Selects with the same condition, we can do a more precise
|
||||
// check: just check for aliases between the values on corresponding arms.
|
||||
if (const SelectInst *SI2 = dyn_cast<SelectInst>(V2))
|
||||
if (SI->getCondition() == SI2->getCondition()) {
|
||||
AliasResult Alias = aliasCheck(SI->getTrueValue(), SISize, SIAAInfo,
|
||||
SI2->getTrueValue(), V2Size, V2AAInfo);
|
||||
AliasResult Alias =
|
||||
aliasCheck(SI->getTrueValue(), SISize, SIAAInfo, SI2->getTrueValue(),
|
||||
V2Size, V2AAInfo, AAQI);
|
||||
if (Alias == MayAlias)
|
||||
return MayAlias;
|
||||
AliasResult ThisAlias =
|
||||
aliasCheck(SI->getFalseValue(), SISize, SIAAInfo,
|
||||
SI2->getFalseValue(), V2Size, V2AAInfo);
|
||||
SI2->getFalseValue(), V2Size, V2AAInfo, AAQI);
|
||||
return MergeAliasResults(ThisAlias, Alias);
|
||||
}
|
||||
|
||||
// If both arms of the Select node NoAlias or MustAlias V2, then returns
|
||||
// NoAlias / MustAlias. Otherwise, returns MayAlias.
|
||||
AliasResult Alias =
|
||||
aliasCheck(V2, V2Size, V2AAInfo, SI->getTrueValue(),
|
||||
SISize, SIAAInfo, UnderV2);
|
||||
AliasResult Alias = aliasCheck(V2, V2Size, V2AAInfo, SI->getTrueValue(),
|
||||
SISize, SIAAInfo, AAQI, UnderV2);
|
||||
if (Alias == MayAlias)
|
||||
return MayAlias;
|
||||
|
||||
AliasResult ThisAlias =
|
||||
aliasCheck(V2, V2Size, V2AAInfo, SI->getFalseValue(), SISize, SIAAInfo,
|
||||
UnderV2);
|
||||
AliasResult ThisAlias = aliasCheck(V2, V2Size, V2AAInfo, SI->getFalseValue(),
|
||||
SISize, SIAAInfo, AAQI, UnderV2);
|
||||
return MergeAliasResults(ThisAlias, Alias);
|
||||
}
|
||||
|
||||
@ -1563,7 +1562,7 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
const AAMDNodes &PNAAInfo, const Value *V2,
|
||||
LocationSize V2Size,
|
||||
const AAMDNodes &V2AAInfo,
|
||||
const Value *UnderV2) {
|
||||
const Value *UnderV2, AAQueryInfo &AAQI) {
|
||||
// Track phi nodes we have visited. We use this information when we determine
|
||||
// value equivalence.
|
||||
VisitedPhiBBs.insert(PN->getParent());
|
||||
@ -1573,8 +1572,8 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
// on corresponding edges.
|
||||
if (const PHINode *PN2 = dyn_cast<PHINode>(V2))
|
||||
if (PN2->getParent() == PN->getParent()) {
|
||||
LocPair Locs(MemoryLocation(PN, PNSize, PNAAInfo),
|
||||
MemoryLocation(V2, V2Size, V2AAInfo));
|
||||
AAQueryInfo::LocPair Locs(MemoryLocation(PN, PNSize, PNAAInfo),
|
||||
MemoryLocation(V2, V2Size, V2AAInfo));
|
||||
if (PN > V2)
|
||||
std::swap(Locs.first, Locs.second);
|
||||
// Analyse the PHIs' inputs under the assumption that the PHIs are
|
||||
@ -1588,8 +1587,8 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
AliasResult OrigAliasResult;
|
||||
{
|
||||
// Limited lifetime iterator invalidated by the aliasCheck call below.
|
||||
auto CacheIt = AliasCache.find(Locs);
|
||||
assert((CacheIt != AliasCache.end()) &&
|
||||
auto CacheIt = AAQI.AliasCache.find(Locs);
|
||||
assert((CacheIt != AAQI.AliasCache.end()) &&
|
||||
"There must exist an entry for the phi node");
|
||||
OrigAliasResult = CacheIt->second;
|
||||
CacheIt->second = NoAlias;
|
||||
@ -1599,7 +1598,7 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
AliasResult ThisAlias =
|
||||
aliasCheck(PN->getIncomingValue(i), PNSize, PNAAInfo,
|
||||
PN2->getIncomingValueForBlock(PN->getIncomingBlock(i)),
|
||||
V2Size, V2AAInfo);
|
||||
V2Size, V2AAInfo, AAQI);
|
||||
Alias = MergeAliasResults(ThisAlias, Alias);
|
||||
if (Alias == MayAlias)
|
||||
break;
|
||||
@ -1607,7 +1606,8 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
|
||||
// Reset if speculation failed.
|
||||
if (Alias != NoAlias) {
|
||||
auto Pair = AliasCache.insert(std::make_pair(Locs, OrigAliasResult));
|
||||
auto Pair =
|
||||
AAQI.AliasCache.insert(std::make_pair(Locs, OrigAliasResult));
|
||||
assert(!Pair.second && "Entry must have existed");
|
||||
Pair.first->second = OrigAliasResult;
|
||||
}
|
||||
@ -1684,9 +1684,8 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
if (isRecursive)
|
||||
PNSize = LocationSize::unknown();
|
||||
|
||||
AliasResult Alias =
|
||||
aliasCheck(V2, V2Size, V2AAInfo, V1Srcs[0],
|
||||
PNSize, PNAAInfo, UnderV2);
|
||||
AliasResult Alias = aliasCheck(V2, V2Size, V2AAInfo, V1Srcs[0], PNSize,
|
||||
PNAAInfo, AAQI, UnderV2);
|
||||
|
||||
// Early exit if the check of the first PHI source against V2 is MayAlias.
|
||||
// Other results are not possible.
|
||||
@ -1699,7 +1698,7 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
Value *V = V1Srcs[i];
|
||||
|
||||
AliasResult ThisAlias =
|
||||
aliasCheck(V2, V2Size, V2AAInfo, V, PNSize, PNAAInfo, UnderV2);
|
||||
aliasCheck(V2, V2Size, V2AAInfo, V, PNSize, PNAAInfo, AAQI, UnderV2);
|
||||
Alias = MergeAliasResults(ThisAlias, Alias);
|
||||
if (Alias == MayAlias)
|
||||
break;
|
||||
@ -1713,7 +1712,8 @@ AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
|
||||
AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
AAMDNodes V1AAInfo, const Value *V2,
|
||||
LocationSize V2Size, AAMDNodes V2AAInfo,
|
||||
const Value *O1, const Value *O2) {
|
||||
AAQueryInfo &AAQI, const Value *O1,
|
||||
const Value *O2) {
|
||||
// If either of the memory references is empty, it doesn't matter what the
|
||||
// pointer values are.
|
||||
if (V1Size.isZero() || V2Size.isZero())
|
||||
@ -1781,9 +1781,11 @@ AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
// temporary store the nocapture argument's value in a temporary memory
|
||||
// location if that memory location doesn't escape. Or it may pass a
|
||||
// nocapture value to other functions as long as they don't capture it.
|
||||
if (isEscapeSource(O1) && isNonEscapingLocalObject(O2, &IsCapturedCache))
|
||||
if (isEscapeSource(O1) &&
|
||||
isNonEscapingLocalObject(O2, &AAQI.IsCapturedCache))
|
||||
return NoAlias;
|
||||
if (isEscapeSource(O2) && isNonEscapingLocalObject(O1, &IsCapturedCache))
|
||||
if (isEscapeSource(O2) &&
|
||||
isNonEscapingLocalObject(O1, &AAQI.IsCapturedCache))
|
||||
return NoAlias;
|
||||
}
|
||||
|
||||
@ -1798,12 +1800,12 @@ AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
|
||||
// Check the cache before climbing up use-def chains. This also terminates
|
||||
// otherwise infinitely recursive queries.
|
||||
LocPair Locs(MemoryLocation(V1, V1Size, V1AAInfo),
|
||||
MemoryLocation(V2, V2Size, V2AAInfo));
|
||||
AAQueryInfo::LocPair Locs(MemoryLocation(V1, V1Size, V1AAInfo),
|
||||
MemoryLocation(V2, V2Size, V2AAInfo));
|
||||
if (V1 > V2)
|
||||
std::swap(Locs.first, Locs.second);
|
||||
std::pair<AliasCacheTy::iterator, bool> Pair =
|
||||
AliasCache.try_emplace(Locs, MayAlias);
|
||||
std::pair<AAQueryInfo::AliasCacheT::iterator, bool> Pair =
|
||||
AAQI.AliasCache.try_emplace(Locs, MayAlias);
|
||||
if (!Pair.second)
|
||||
return Pair.first->second;
|
||||
|
||||
@ -1817,9 +1819,13 @@ AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
}
|
||||
if (const GEPOperator *GV1 = dyn_cast<GEPOperator>(V1)) {
|
||||
AliasResult Result =
|
||||
aliasGEP(GV1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O1, O2);
|
||||
if (Result != MayAlias)
|
||||
return AliasCache[Locs] = Result;
|
||||
aliasGEP(GV1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O1, O2, AAQI);
|
||||
if (Result != MayAlias) {
|
||||
auto ItInsPair = AAQI.AliasCache.insert(std::make_pair(Locs, Result));
|
||||
assert(!ItInsPair.second && "Entry must have existed");
|
||||
ItInsPair.first->second = Result;
|
||||
return Result;
|
||||
}
|
||||
}
|
||||
|
||||
if (isa<PHINode>(V2) && !isa<PHINode>(V1)) {
|
||||
@ -1829,10 +1835,10 @@ AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
std::swap(V1AAInfo, V2AAInfo);
|
||||
}
|
||||
if (const PHINode *PN = dyn_cast<PHINode>(V1)) {
|
||||
AliasResult Result = aliasPHI(PN, V1Size, V1AAInfo,
|
||||
V2, V2Size, V2AAInfo, O2);
|
||||
AliasResult Result =
|
||||
aliasPHI(PN, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O2, AAQI);
|
||||
if (Result != MayAlias) {
|
||||
Pair = AliasCache.try_emplace(Locs, Result);
|
||||
Pair = AAQI.AliasCache.try_emplace(Locs, Result);
|
||||
assert(!Pair.second && "Entry must have existed");
|
||||
return Pair.first->second = Result;
|
||||
}
|
||||
@ -1846,9 +1852,9 @@ AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
}
|
||||
if (const SelectInst *S1 = dyn_cast<SelectInst>(V1)) {
|
||||
AliasResult Result =
|
||||
aliasSelect(S1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O2);
|
||||
aliasSelect(S1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O2, AAQI);
|
||||
if (Result != MayAlias) {
|
||||
Pair = AliasCache.try_emplace(Locs, Result);
|
||||
Pair = AAQI.AliasCache.try_emplace(Locs, Result);
|
||||
assert(!Pair.second && "Entry must have existed");
|
||||
return Pair.first->second = Result;
|
||||
}
|
||||
@ -1860,7 +1866,7 @@ AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
if (V1Size.isPrecise() && V2Size.isPrecise() &&
|
||||
(isObjectSize(O1, V1Size.getValue(), DL, TLI, NullIsValidLocation) ||
|
||||
isObjectSize(O2, V2Size.getValue(), DL, TLI, NullIsValidLocation))) {
|
||||
Pair = AliasCache.try_emplace(Locs, PartialAlias);
|
||||
Pair = AAQI.AliasCache.try_emplace(Locs, PartialAlias);
|
||||
assert(!Pair.second && "Entry must have existed");
|
||||
return Pair.first->second = PartialAlias;
|
||||
}
|
||||
@ -1868,8 +1874,8 @@ AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
|
||||
// Recurse back into the best AA results we have, potentially with refined
|
||||
// memory locations. We have already ensured that BasicAA has a MayAlias
|
||||
// cache result for these, so any recursion back into BasicAA won't loop.
|
||||
AliasResult Result = getBestAAResults().alias(Locs.first, Locs.second);
|
||||
Pair = AliasCache.try_emplace(Locs, Result);
|
||||
AliasResult Result = getBestAAResults().alias(Locs.first, Locs.second, AAQI);
|
||||
Pair = AAQI.AliasCache.try_emplace(Locs, Result);
|
||||
assert(!Pair.second && "Entry must have existed");
|
||||
return Pair.first->second = Result;
|
||||
}
|
||||
|
@ -875,7 +875,8 @@ AliasResult CFLAndersAAResult::query(const MemoryLocation &LocA,
|
||||
}
|
||||
|
||||
AliasResult CFLAndersAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (LocA.Ptr == LocB.Ptr)
|
||||
return MustAlias;
|
||||
|
||||
@ -885,11 +886,11 @@ AliasResult CFLAndersAAResult::alias(const MemoryLocation &LocA,
|
||||
// ConstantExpr, but every query needs to have at least one Value tied to a
|
||||
// Function, and neither GlobalValues nor ConstantExprs are.
|
||||
if (isa<Constant>(LocA.Ptr) && isa<Constant>(LocB.Ptr))
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
AliasResult QueryResult = query(LocA, LocB);
|
||||
if (QueryResult == MayAlias)
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
return QueryResult;
|
||||
}
|
||||
|
@ -806,7 +806,8 @@ bool GlobalsAAResult::isNonEscapingGlobalNoAlias(const GlobalValue *GV,
|
||||
/// other is some random pointer, we know there cannot be an alias, because the
|
||||
/// address of the global isn't taken.
|
||||
AliasResult GlobalsAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
// Get the base object these pointers point to.
|
||||
const Value *UV1 = GetUnderlyingObject(LocA.Ptr, DL);
|
||||
const Value *UV2 = GetUnderlyingObject(LocB.Ptr, DL);
|
||||
@ -881,11 +882,12 @@ AliasResult GlobalsAAResult::alias(const MemoryLocation &LocA,
|
||||
if ((GV1 || GV2) && GV1 != GV2)
|
||||
return NoAlias;
|
||||
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
}
|
||||
|
||||
ModRefInfo GlobalsAAResult::getModRefInfoForArgument(const CallBase *Call,
|
||||
const GlobalValue *GV) {
|
||||
const GlobalValue *GV,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (Call->doesNotAccessMemory())
|
||||
return ModRefInfo::NoModRef;
|
||||
ModRefInfo ConservativeResult =
|
||||
@ -901,7 +903,8 @@ ModRefInfo GlobalsAAResult::getModRefInfoForArgument(const CallBase *Call,
|
||||
if (!all_of(Objects, isIdentifiedObject) &&
|
||||
// Try ::alias to see if all objects are known not to alias GV.
|
||||
!all_of(Objects, [&](Value *V) {
|
||||
return this->alias(MemoryLocation(V), MemoryLocation(GV)) == NoAlias;
|
||||
return this->alias(MemoryLocation(V), MemoryLocation(GV), AAQI) ==
|
||||
NoAlias;
|
||||
}))
|
||||
return ConservativeResult;
|
||||
|
||||
@ -914,7 +917,8 @@ ModRefInfo GlobalsAAResult::getModRefInfoForArgument(const CallBase *Call,
|
||||
}
|
||||
|
||||
ModRefInfo GlobalsAAResult::getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) {
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
ModRefInfo Known = ModRefInfo::ModRef;
|
||||
|
||||
// If we are asking for mod/ref info of a direct call with a pointer to a
|
||||
@ -926,11 +930,11 @@ ModRefInfo GlobalsAAResult::getModRefInfo(const CallBase *Call,
|
||||
if (NonAddressTakenGlobals.count(GV))
|
||||
if (const FunctionInfo *FI = getFunctionInfo(F))
|
||||
Known = unionModRef(FI->getModRefInfoForGlobal(*GV),
|
||||
getModRefInfoForArgument(Call, GV));
|
||||
getModRefInfoForArgument(Call, GV, AAQI));
|
||||
|
||||
if (!isModOrRefSet(Known))
|
||||
return ModRefInfo::NoModRef; // No need to query other mod/ref analyses
|
||||
return intersectModRef(Known, AAResultBase::getModRefInfo(Call, Loc));
|
||||
return intersectModRef(Known, AAResultBase::getModRefInfo(Call, Loc, AAQI));
|
||||
}
|
||||
|
||||
GlobalsAAResult::GlobalsAAResult(const DataLayout &DL,
|
||||
|
@ -251,10 +251,10 @@ struct ClobberAlias {
|
||||
|
||||
// Return a pair of {IsClobber (bool), AR (AliasResult)}. It relies on AR being
|
||||
// ignored if IsClobber = false.
|
||||
static ClobberAlias instructionClobbersQuery(const MemoryDef *MD,
|
||||
const MemoryLocation &UseLoc,
|
||||
const Instruction *UseInst,
|
||||
AliasAnalysis &AA) {
|
||||
template <typename AliasAnalysisType>
|
||||
static ClobberAlias
|
||||
instructionClobbersQuery(const MemoryDef *MD, const MemoryLocation &UseLoc,
|
||||
const Instruction *UseInst, AliasAnalysisType &AA) {
|
||||
Instruction *DefInst = MD->getMemoryInst();
|
||||
assert(DefInst && "Defining instruction not actually an instruction");
|
||||
const auto *UseCall = dyn_cast<CallBase>(UseInst);
|
||||
@ -299,10 +299,11 @@ static ClobberAlias instructionClobbersQuery(const MemoryDef *MD,
|
||||
return {isModSet(I), AR};
|
||||
}
|
||||
|
||||
template <typename AliasAnalysisType>
|
||||
static ClobberAlias instructionClobbersQuery(MemoryDef *MD,
|
||||
const MemoryUseOrDef *MU,
|
||||
const MemoryLocOrCall &UseMLOC,
|
||||
AliasAnalysis &AA) {
|
||||
AliasAnalysisType &AA) {
|
||||
// FIXME: This is a temporary hack to allow a single instructionClobbersQuery
|
||||
// to exist while MemoryLocOrCall is pushed through places.
|
||||
if (UseMLOC.IsCall)
|
||||
@ -345,12 +346,12 @@ struct UpwardsMemoryQuery {
|
||||
} // end anonymous namespace
|
||||
|
||||
static bool lifetimeEndsAt(MemoryDef *MD, const MemoryLocation &Loc,
|
||||
AliasAnalysis &AA) {
|
||||
BatchAAResults &AA) {
|
||||
Instruction *Inst = MD->getMemoryInst();
|
||||
if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) {
|
||||
switch (II->getIntrinsicID()) {
|
||||
case Intrinsic::lifetime_end:
|
||||
return AA.isMustAlias(MemoryLocation(II->getArgOperand(1)), Loc);
|
||||
return AA.alias(MemoryLocation(II->getArgOperand(1)), Loc) == MustAlias;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@ -358,13 +359,14 @@ static bool lifetimeEndsAt(MemoryDef *MD, const MemoryLocation &Loc,
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysis &AA,
|
||||
template <typename AliasAnalysisType>
|
||||
static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysisType &AA,
|
||||
const Instruction *I) {
|
||||
// If the memory can't be changed, then loads of the memory can't be
|
||||
// clobbered.
|
||||
return isa<LoadInst>(I) && (I->getMetadata(LLVMContext::MD_invariant_load) ||
|
||||
AA.pointsToConstantMemory(cast<LoadInst>(I)->
|
||||
getPointerOperand()));
|
||||
AA.pointsToConstantMemory(MemoryLocation(
|
||||
cast<LoadInst>(I)->getPointerOperand())));
|
||||
}
|
||||
|
||||
/// Verifies that `Start` is clobbered by `ClobberAt`, and that nothing
|
||||
@ -380,10 +382,12 @@ static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysis &AA,
|
||||
/// \param Query The UpwardsMemoryQuery we used for our search.
|
||||
/// \param AA The AliasAnalysis we used for our search.
|
||||
/// \param AllowImpreciseClobber Always false, unless we do relaxed verify.
|
||||
|
||||
template <typename AliasAnalysisType>
|
||||
LLVM_ATTRIBUTE_UNUSED static void
|
||||
checkClobberSanity(const MemoryAccess *Start, MemoryAccess *ClobberAt,
|
||||
const MemoryLocation &StartLoc, const MemorySSA &MSSA,
|
||||
const UpwardsMemoryQuery &Query, AliasAnalysis &AA,
|
||||
const UpwardsMemoryQuery &Query, AliasAnalysisType &AA,
|
||||
bool AllowImpreciseClobber = false) {
|
||||
assert(MSSA.dominates(ClobberAt, Start) && "Clobber doesn't dominate start?");
|
||||
|
||||
@ -473,7 +477,7 @@ namespace {
|
||||
|
||||
/// Our algorithm for walking (and trying to optimize) clobbers, all wrapped up
|
||||
/// in one class.
|
||||
class ClobberWalker {
|
||||
template <class AliasAnalysisType> class ClobberWalker {
|
||||
/// Save a few bytes by using unsigned instead of size_t.
|
||||
using ListIndex = unsigned;
|
||||
|
||||
@ -497,7 +501,7 @@ class ClobberWalker {
|
||||
};
|
||||
|
||||
const MemorySSA &MSSA;
|
||||
AliasAnalysis &AA;
|
||||
AliasAnalysisType &AA;
|
||||
DominatorTree &DT;
|
||||
UpwardsMemoryQuery *Query;
|
||||
|
||||
@ -886,9 +890,10 @@ class ClobberWalker {
|
||||
}
|
||||
|
||||
public:
|
||||
ClobberWalker(const MemorySSA &MSSA, AliasAnalysis &AA, DominatorTree &DT)
|
||||
ClobberWalker(const MemorySSA &MSSA, AliasAnalysisType &AA, DominatorTree &DT)
|
||||
: MSSA(MSSA), AA(AA), DT(DT) {}
|
||||
|
||||
AliasAnalysisType *getAA() { return &AA; }
|
||||
/// Finds the nearest clobber for the given query, optimizing phis if
|
||||
/// possible.
|
||||
MemoryAccess *findClobber(MemoryAccess *Start, UpwardsMemoryQuery &Q) {
|
||||
@ -944,12 +949,12 @@ struct RenamePassData {
|
||||
|
||||
namespace llvm {
|
||||
|
||||
class MemorySSA::ClobberWalkerBase {
|
||||
ClobberWalker Walker;
|
||||
template <class AliasAnalysisType> class MemorySSA::ClobberWalkerBase {
|
||||
ClobberWalker<AliasAnalysisType> Walker;
|
||||
MemorySSA *MSSA;
|
||||
|
||||
public:
|
||||
ClobberWalkerBase(MemorySSA *M, AliasAnalysis *A, DominatorTree *D)
|
||||
ClobberWalkerBase(MemorySSA *M, AliasAnalysisType *A, DominatorTree *D)
|
||||
: Walker(*M, *A, *D), MSSA(M) {}
|
||||
|
||||
MemoryAccess *getClobberingMemoryAccessBase(MemoryAccess *,
|
||||
@ -966,19 +971,24 @@ public:
|
||||
/// A MemorySSAWalker that does AA walks to disambiguate accesses. It no
|
||||
/// longer does caching on its own, but the name has been retained for the
|
||||
/// moment.
|
||||
template <class AliasAnalysisType>
|
||||
class MemorySSA::CachingWalker final : public MemorySSAWalker {
|
||||
ClobberWalkerBase *Walker;
|
||||
ClobberWalkerBase<AliasAnalysisType> *Walker;
|
||||
|
||||
public:
|
||||
CachingWalker(MemorySSA *M, ClobberWalkerBase *W)
|
||||
CachingWalker(MemorySSA *M, ClobberWalkerBase<AliasAnalysisType> *W)
|
||||
: MemorySSAWalker(M), Walker(W) {}
|
||||
~CachingWalker() override = default;
|
||||
|
||||
using MemorySSAWalker::getClobberingMemoryAccess;
|
||||
|
||||
MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA) override;
|
||||
MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA) override {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, false);
|
||||
}
|
||||
MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA,
|
||||
const MemoryLocation &Loc) override;
|
||||
const MemoryLocation &Loc) override {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, Loc);
|
||||
}
|
||||
|
||||
void invalidateInfo(MemoryAccess *MA) override {
|
||||
if (auto *MUD = dyn_cast<MemoryUseOrDef>(MA))
|
||||
@ -986,19 +996,24 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
template <class AliasAnalysisType>
|
||||
class MemorySSA::SkipSelfWalker final : public MemorySSAWalker {
|
||||
ClobberWalkerBase *Walker;
|
||||
ClobberWalkerBase<AliasAnalysisType> *Walker;
|
||||
|
||||
public:
|
||||
SkipSelfWalker(MemorySSA *M, ClobberWalkerBase *W)
|
||||
SkipSelfWalker(MemorySSA *M, ClobberWalkerBase<AliasAnalysisType> *W)
|
||||
: MemorySSAWalker(M), Walker(W) {}
|
||||
~SkipSelfWalker() override = default;
|
||||
|
||||
using MemorySSAWalker::getClobberingMemoryAccess;
|
||||
|
||||
MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA) override;
|
||||
MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA) override {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, true);
|
||||
}
|
||||
MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA,
|
||||
const MemoryLocation &Loc) override;
|
||||
const MemoryLocation &Loc) override {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, Loc);
|
||||
}
|
||||
|
||||
void invalidateInfo(MemoryAccess *MA) override {
|
||||
if (auto *MUD = dyn_cast<MemoryUseOrDef>(MA))
|
||||
@ -1140,9 +1155,20 @@ void MemorySSA::markUnreachableAsLiveOnEntry(BasicBlock *BB) {
|
||||
}
|
||||
|
||||
MemorySSA::MemorySSA(Function &Func, AliasAnalysis *AA, DominatorTree *DT)
|
||||
: AA(AA), DT(DT), F(Func), LiveOnEntryDef(nullptr), Walker(nullptr),
|
||||
: AA(nullptr), DT(DT), F(Func), LiveOnEntryDef(nullptr), Walker(nullptr),
|
||||
SkipWalker(nullptr), NextID(0) {
|
||||
buildMemorySSA();
|
||||
// Build MemorySSA using a batch alias analysis. This reuses the internal
|
||||
// state that AA collects during an alias()/getModRefInfo() call. This is
|
||||
// safe because there are no CFG changes while building MemorySSA and can
|
||||
// significantly reduce the time spent by the compiler in AA, because we will
|
||||
// make queries about all the instructions in the Function.
|
||||
BatchAAResults BatchAA(*AA);
|
||||
buildMemorySSA(BatchAA);
|
||||
// Intentionally leave AA to nullptr while building so we don't accidently
|
||||
// use non-batch AliasAnalysis.
|
||||
this->AA = AA;
|
||||
// Also create the walker here.
|
||||
getWalker();
|
||||
}
|
||||
|
||||
MemorySSA::~MemorySSA() {
|
||||
@ -1179,9 +1205,9 @@ namespace llvm {
|
||||
/// which is walking bottom-up.
|
||||
class MemorySSA::OptimizeUses {
|
||||
public:
|
||||
OptimizeUses(MemorySSA *MSSA, MemorySSAWalker *Walker, AliasAnalysis *AA,
|
||||
OptimizeUses(MemorySSA *MSSA, MemorySSAWalker *Walker, BatchAAResults *BAA,
|
||||
DominatorTree *DT)
|
||||
: MSSA(MSSA), Walker(Walker), AA(AA), DT(DT) {}
|
||||
: MSSA(MSSA), Walker(Walker), AA(BAA), DT(DT) {}
|
||||
|
||||
void optimizeUses();
|
||||
|
||||
@ -1210,7 +1236,7 @@ private:
|
||||
|
||||
MemorySSA *MSSA;
|
||||
MemorySSAWalker *Walker;
|
||||
AliasAnalysis *AA;
|
||||
BatchAAResults *AA;
|
||||
DominatorTree *DT;
|
||||
};
|
||||
|
||||
@ -1407,7 +1433,7 @@ void MemorySSA::placePHINodes(
|
||||
createMemoryPhi(BB);
|
||||
}
|
||||
|
||||
void MemorySSA::buildMemorySSA() {
|
||||
void MemorySSA::buildMemorySSA(BatchAAResults &BAA) {
|
||||
// We create an access to represent "live on entry", for things like
|
||||
// arguments or users of globals, where the memory they use is defined before
|
||||
// the beginning of the function. We do not actually insert it into the IR.
|
||||
@ -1429,7 +1455,7 @@ void MemorySSA::buildMemorySSA() {
|
||||
AccessList *Accesses = nullptr;
|
||||
DefsList *Defs = nullptr;
|
||||
for (Instruction &I : B) {
|
||||
MemoryUseOrDef *MUD = createNewAccess(&I);
|
||||
MemoryUseOrDef *MUD = createNewAccess(&I, &BAA);
|
||||
if (!MUD)
|
||||
continue;
|
||||
|
||||
@ -1453,9 +1479,9 @@ void MemorySSA::buildMemorySSA() {
|
||||
SmallPtrSet<BasicBlock *, 16> Visited;
|
||||
renamePass(DT->getRootNode(), LiveOnEntryDef.get(), Visited);
|
||||
|
||||
CachingWalker *Walker = getWalkerImpl();
|
||||
|
||||
OptimizeUses(this, Walker, AA, DT).optimizeUses();
|
||||
ClobberWalkerBase<BatchAAResults> WalkerBase(this, &BAA, DT);
|
||||
CachingWalker<BatchAAResults> WalkerLocal(this, &WalkerBase);
|
||||
OptimizeUses(this, &WalkerLocal, &BAA, DT).optimizeUses();
|
||||
|
||||
// Mark the uses in unreachable blocks as live on entry, so that they go
|
||||
// somewhere.
|
||||
@ -1466,14 +1492,16 @@ void MemorySSA::buildMemorySSA() {
|
||||
|
||||
MemorySSAWalker *MemorySSA::getWalker() { return getWalkerImpl(); }
|
||||
|
||||
MemorySSA::CachingWalker *MemorySSA::getWalkerImpl() {
|
||||
MemorySSA::CachingWalker<AliasAnalysis> *MemorySSA::getWalkerImpl() {
|
||||
if (Walker)
|
||||
return Walker.get();
|
||||
|
||||
if (!WalkerBase)
|
||||
WalkerBase = llvm::make_unique<ClobberWalkerBase>(this, AA, DT);
|
||||
WalkerBase =
|
||||
llvm::make_unique<ClobberWalkerBase<AliasAnalysis>>(this, AA, DT);
|
||||
|
||||
Walker = llvm::make_unique<CachingWalker>(this, WalkerBase.get());
|
||||
Walker =
|
||||
llvm::make_unique<CachingWalker<AliasAnalysis>>(this, WalkerBase.get());
|
||||
return Walker.get();
|
||||
}
|
||||
|
||||
@ -1482,9 +1510,11 @@ MemorySSAWalker *MemorySSA::getSkipSelfWalker() {
|
||||
return SkipWalker.get();
|
||||
|
||||
if (!WalkerBase)
|
||||
WalkerBase = llvm::make_unique<ClobberWalkerBase>(this, AA, DT);
|
||||
WalkerBase =
|
||||
llvm::make_unique<ClobberWalkerBase<AliasAnalysis>>(this, AA, DT);
|
||||
|
||||
SkipWalker = llvm::make_unique<SkipSelfWalker>(this, WalkerBase.get());
|
||||
SkipWalker =
|
||||
llvm::make_unique<SkipSelfWalker<AliasAnalysis>>(this, WalkerBase.get());
|
||||
return SkipWalker.get();
|
||||
}
|
||||
|
||||
@ -1603,7 +1633,7 @@ MemoryUseOrDef *MemorySSA::createDefinedAccess(Instruction *I,
|
||||
MemoryAccess *Definition,
|
||||
const MemoryUseOrDef *Template) {
|
||||
assert(!isa<PHINode>(I) && "Cannot create a defined access for a PHI");
|
||||
MemoryUseOrDef *NewAccess = createNewAccess(I, Template);
|
||||
MemoryUseOrDef *NewAccess = createNewAccess(I, AA, Template);
|
||||
assert(
|
||||
NewAccess != nullptr &&
|
||||
"Tried to create a memory access for a non-memory touching instruction");
|
||||
@ -1626,7 +1656,9 @@ static inline bool isOrdered(const Instruction *I) {
|
||||
}
|
||||
|
||||
/// Helper function to create new memory accesses
|
||||
template <typename AliasAnalysisType>
|
||||
MemoryUseOrDef *MemorySSA::createNewAccess(Instruction *I,
|
||||
AliasAnalysisType *AAP,
|
||||
const MemoryUseOrDef *Template) {
|
||||
// The assume intrinsic has a control dependency which we model by claiming
|
||||
// that it writes arbitrarily. Ignore that fake memory dependency here.
|
||||
@ -1641,7 +1673,7 @@ MemoryUseOrDef *MemorySSA::createNewAccess(Instruction *I,
|
||||
Def = dyn_cast_or_null<MemoryDef>(Template) != nullptr;
|
||||
Use = dyn_cast_or_null<MemoryUse>(Template) != nullptr;
|
||||
#if !defined(NDEBUG)
|
||||
ModRefInfo ModRef = AA->getModRefInfo(I, None);
|
||||
ModRefInfo ModRef = AAP->getModRefInfo(I, None);
|
||||
bool DefCheck, UseCheck;
|
||||
DefCheck = isModSet(ModRef) || isOrdered(I);
|
||||
UseCheck = isRefSet(ModRef);
|
||||
@ -1649,7 +1681,7 @@ MemoryUseOrDef *MemorySSA::createNewAccess(Instruction *I,
|
||||
#endif
|
||||
} else {
|
||||
// Find out what affect this instruction has on memory.
|
||||
ModRefInfo ModRef = AA->getModRefInfo(I, None);
|
||||
ModRefInfo ModRef = AAP->getModRefInfo(I, None);
|
||||
// The isOrdered check is used to ensure that volatiles end up as defs
|
||||
// (atomics end up as ModRef right now anyway). Until we separate the
|
||||
// ordering chain from the memory chain, this enables people to see at least
|
||||
@ -1702,7 +1734,7 @@ void MemorySSA::removeFromLookups(MemoryAccess *MA) {
|
||||
MUD->setDefiningAccess(nullptr);
|
||||
// Invalidate our walker's cache if necessary
|
||||
if (!isa<MemoryUse>(MA))
|
||||
Walker->invalidateInfo(MA);
|
||||
getWalker()->invalidateInfo(MA);
|
||||
|
||||
Value *MemoryInst;
|
||||
if (const auto *MUD = dyn_cast<MemoryUseOrDef>(MA))
|
||||
@ -2175,7 +2207,9 @@ MemorySSAWalker::MemorySSAWalker(MemorySSA *M) : MSSA(M) {}
|
||||
/// the MemoryAccess that actually clobbers Loc.
|
||||
///
|
||||
/// \returns our clobbering memory access
|
||||
MemoryAccess *MemorySSA::ClobberWalkerBase::getClobberingMemoryAccessBase(
|
||||
template <typename AliasAnalysisType>
|
||||
MemoryAccess *
|
||||
MemorySSA::ClobberWalkerBase<AliasAnalysisType>::getClobberingMemoryAccessBase(
|
||||
MemoryAccess *StartingAccess, const MemoryLocation &Loc) {
|
||||
if (isa<MemoryPhi>(StartingAccess))
|
||||
return StartingAccess;
|
||||
@ -2212,9 +2246,10 @@ MemoryAccess *MemorySSA::ClobberWalkerBase::getClobberingMemoryAccessBase(
|
||||
return Clobber;
|
||||
}
|
||||
|
||||
template <typename AliasAnalysisType>
|
||||
MemoryAccess *
|
||||
MemorySSA::ClobberWalkerBase::getClobberingMemoryAccessBase(MemoryAccess *MA,
|
||||
bool SkipSelf) {
|
||||
MemorySSA::ClobberWalkerBase<AliasAnalysisType>::getClobberingMemoryAccessBase(
|
||||
MemoryAccess *MA, bool SkipSelf) {
|
||||
auto *StartingAccess = dyn_cast<MemoryUseOrDef>(MA);
|
||||
// If this is a MemoryPhi, we can't do anything.
|
||||
if (!StartingAccess)
|
||||
@ -2240,7 +2275,7 @@ MemorySSA::ClobberWalkerBase::getClobberingMemoryAccessBase(MemoryAccess *MA,
|
||||
|
||||
UpwardsMemoryQuery Q(I, StartingAccess);
|
||||
|
||||
if (isUseTriviallyOptimizableToLiveOnEntry(*MSSA->AA, I)) {
|
||||
if (isUseTriviallyOptimizableToLiveOnEntry(*Walker.getAA(), I)) {
|
||||
MemoryAccess *LiveOnEntry = MSSA->getLiveOnEntryDef();
|
||||
StartingAccess->setOptimized(LiveOnEntry);
|
||||
StartingAccess->setOptimizedAccessType(None);
|
||||
@ -2289,28 +2324,6 @@ MemorySSA::ClobberWalkerBase::getClobberingMemoryAccessBase(MemoryAccess *MA,
|
||||
return Result;
|
||||
}
|
||||
|
||||
MemoryAccess *
|
||||
MemorySSA::CachingWalker::getClobberingMemoryAccess(MemoryAccess *MA) {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, false);
|
||||
}
|
||||
|
||||
MemoryAccess *
|
||||
MemorySSA::CachingWalker::getClobberingMemoryAccess(MemoryAccess *MA,
|
||||
const MemoryLocation &Loc) {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, Loc);
|
||||
}
|
||||
|
||||
MemoryAccess *
|
||||
MemorySSA::SkipSelfWalker::getClobberingMemoryAccess(MemoryAccess *MA) {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, true);
|
||||
}
|
||||
|
||||
MemoryAccess *
|
||||
MemorySSA::SkipSelfWalker::getClobberingMemoryAccess(MemoryAccess *MA,
|
||||
const MemoryLocation &Loc) {
|
||||
return Walker->getClobberingMemoryAccessBase(MA, Loc);
|
||||
}
|
||||
|
||||
MemoryAccess *
|
||||
DoNothingMemorySSAWalker::getClobberingMemoryAccess(MemoryAccess *MA) {
|
||||
if (auto *Use = dyn_cast<MemoryUseOrDef>(MA))
|
||||
|
@ -37,9 +37,10 @@ using namespace llvm;
|
||||
using namespace llvm::objcarc;
|
||||
|
||||
AliasResult ObjCARCAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableARCOpts)
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
// First, strip off no-ops, including ObjC-specific no-ops, and try making a
|
||||
// precise alias query.
|
||||
@ -47,7 +48,7 @@ AliasResult ObjCARCAAResult::alias(const MemoryLocation &LocA,
|
||||
const Value *SB = GetRCIdentityRoot(LocB.Ptr);
|
||||
AliasResult Result =
|
||||
AAResultBase::alias(MemoryLocation(SA, LocA.Size, LocA.AATags),
|
||||
MemoryLocation(SB, LocB.Size, LocB.AATags));
|
||||
MemoryLocation(SB, LocB.Size, LocB.AATags), AAQI);
|
||||
if (Result != MayAlias)
|
||||
return Result;
|
||||
|
||||
@ -56,7 +57,7 @@ AliasResult ObjCARCAAResult::alias(const MemoryLocation &LocA,
|
||||
const Value *UA = GetUnderlyingObjCPtr(SA, DL);
|
||||
const Value *UB = GetUnderlyingObjCPtr(SB, DL);
|
||||
if (UA != SA || UB != SB) {
|
||||
Result = AAResultBase::alias(MemoryLocation(UA), MemoryLocation(UB));
|
||||
Result = AAResultBase::alias(MemoryLocation(UA), MemoryLocation(UB), AAQI);
|
||||
// We can't use MustAlias or PartialAlias results here because
|
||||
// GetUnderlyingObjCPtr may return an offsetted pointer value.
|
||||
if (Result == NoAlias)
|
||||
@ -69,22 +70,23 @@ AliasResult ObjCARCAAResult::alias(const MemoryLocation &LocA,
|
||||
}
|
||||
|
||||
bool ObjCARCAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
bool OrLocal) {
|
||||
AAQueryInfo &AAQI, bool OrLocal) {
|
||||
if (!EnableARCOpts)
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
|
||||
// First, strip off no-ops, including ObjC-specific no-ops, and try making
|
||||
// a precise alias query.
|
||||
const Value *S = GetRCIdentityRoot(Loc.Ptr);
|
||||
if (AAResultBase::pointsToConstantMemory(
|
||||
MemoryLocation(S, Loc.Size, Loc.AATags), OrLocal))
|
||||
MemoryLocation(S, Loc.Size, Loc.AATags), AAQI, OrLocal))
|
||||
return true;
|
||||
|
||||
// If that failed, climb to the underlying object, including climbing through
|
||||
// ObjC-specific no-ops, and try making an imprecise alias query.
|
||||
const Value *U = GetUnderlyingObjCPtr(S, DL);
|
||||
if (U != S)
|
||||
return AAResultBase::pointsToConstantMemory(MemoryLocation(U), OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(MemoryLocation(U), AAQI,
|
||||
OrLocal);
|
||||
|
||||
// If that failed, fail. We don't need to chain here, since that's covered
|
||||
// by the earlier precise query.
|
||||
@ -106,9 +108,10 @@ FunctionModRefBehavior ObjCARCAAResult::getModRefBehavior(const Function *F) {
|
||||
}
|
||||
|
||||
ModRefInfo ObjCARCAAResult::getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) {
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableARCOpts)
|
||||
return AAResultBase::getModRefInfo(Call, Loc);
|
||||
return AAResultBase::getModRefInfo(Call, Loc, AAQI);
|
||||
|
||||
switch (GetBasicARCInstKind(Call)) {
|
||||
case ARCInstKind::Retain:
|
||||
@ -127,7 +130,7 @@ ModRefInfo ObjCARCAAResult::getModRefInfo(const CallBase *Call,
|
||||
break;
|
||||
}
|
||||
|
||||
return AAResultBase::getModRefInfo(Call, Loc);
|
||||
return AAResultBase::getModRefInfo(Call, Loc, AAQI);
|
||||
}
|
||||
|
||||
ObjCARCAAResult ObjCARCAA::run(Function &F, FunctionAnalysisManager &AM) {
|
||||
|
@ -22,7 +22,7 @@
|
||||
using namespace llvm;
|
||||
|
||||
AliasResult SCEVAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB, AAQueryInfo &AAQI) {
|
||||
// If either of the memory references is empty, it doesn't matter what the
|
||||
// pointer values are. This allows the code below to ignore this special
|
||||
// case.
|
||||
@ -85,11 +85,12 @@ AliasResult SCEVAAResult::alias(const MemoryLocation &LocA,
|
||||
AO ? AAMDNodes() : LocA.AATags),
|
||||
MemoryLocation(BO ? BO : LocB.Ptr,
|
||||
BO ? LocationSize::unknown() : LocB.Size,
|
||||
BO ? AAMDNodes() : LocB.AATags)) == NoAlias)
|
||||
BO ? AAMDNodes() : LocB.AATags),
|
||||
AAQI) == NoAlias)
|
||||
return NoAlias;
|
||||
|
||||
// Forward the query to the next analysis.
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
}
|
||||
|
||||
/// Given an expression, try to find a base value.
|
||||
|
@ -75,9 +75,10 @@ public:
|
||||
} // end anonymous namespace
|
||||
|
||||
AliasResult ScopedNoAliasAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableScopedNoAlias)
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
// Get the attached MDNodes.
|
||||
const MDNode *AScopes = LocA.AATags.Scope, *BScopes = LocB.AATags.Scope;
|
||||
@ -91,13 +92,14 @@ AliasResult ScopedNoAliasAAResult::alias(const MemoryLocation &LocA,
|
||||
return NoAlias;
|
||||
|
||||
// If they may alias, chain to the next AliasAnalysis.
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
}
|
||||
|
||||
ModRefInfo ScopedNoAliasAAResult::getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) {
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableScopedNoAlias)
|
||||
return AAResultBase::getModRefInfo(Call, Loc);
|
||||
return AAResultBase::getModRefInfo(Call, Loc, AAQI);
|
||||
|
||||
if (!mayAliasInScopes(Loc.AATags.Scope,
|
||||
Call->getMetadata(LLVMContext::MD_noalias)))
|
||||
@ -107,13 +109,14 @@ ModRefInfo ScopedNoAliasAAResult::getModRefInfo(const CallBase *Call,
|
||||
Loc.AATags.NoAlias))
|
||||
return ModRefInfo::NoModRef;
|
||||
|
||||
return AAResultBase::getModRefInfo(Call, Loc);
|
||||
return AAResultBase::getModRefInfo(Call, Loc, AAQI);
|
||||
}
|
||||
|
||||
ModRefInfo ScopedNoAliasAAResult::getModRefInfo(const CallBase *Call1,
|
||||
const CallBase *Call2) {
|
||||
const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableScopedNoAlias)
|
||||
return AAResultBase::getModRefInfo(Call1, Call2);
|
||||
return AAResultBase::getModRefInfo(Call1, Call2, AAQI);
|
||||
|
||||
if (!mayAliasInScopes(Call1->getMetadata(LLVMContext::MD_alias_scope),
|
||||
Call2->getMetadata(LLVMContext::MD_noalias)))
|
||||
@ -123,7 +126,7 @@ ModRefInfo ScopedNoAliasAAResult::getModRefInfo(const CallBase *Call1,
|
||||
Call1->getMetadata(LLVMContext::MD_noalias)))
|
||||
return ModRefInfo::NoModRef;
|
||||
|
||||
return AAResultBase::getModRefInfo(Call1, Call2);
|
||||
return AAResultBase::getModRefInfo(Call1, Call2, AAQI);
|
||||
}
|
||||
|
||||
static void collectMDInDomain(const MDNode *List, const MDNode *Domain,
|
||||
|
@ -367,26 +367,28 @@ static bool isStructPathTBAA(const MDNode *MD) {
|
||||
}
|
||||
|
||||
AliasResult TypeBasedAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableTBAA)
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
// If accesses may alias, chain to the next AliasAnalysis.
|
||||
if (Aliases(LocA.AATags.TBAA, LocB.AATags.TBAA))
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
|
||||
// Otherwise return a definitive result.
|
||||
return NoAlias;
|
||||
}
|
||||
|
||||
bool TypeBasedAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI,
|
||||
bool OrLocal) {
|
||||
if (!EnableTBAA)
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
|
||||
const MDNode *M = Loc.AATags.TBAA;
|
||||
if (!M)
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
|
||||
// If this is an "immutable" type, we can assume the pointer is pointing
|
||||
// to constant memory.
|
||||
@ -394,7 +396,7 @@ bool TypeBasedAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
(isStructPathTBAA(M) && TBAAStructTagNode(M).isTypeImmutable()))
|
||||
return true;
|
||||
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
|
||||
FunctionModRefBehavior
|
||||
@ -420,29 +422,31 @@ FunctionModRefBehavior TypeBasedAAResult::getModRefBehavior(const Function *F) {
|
||||
}
|
||||
|
||||
ModRefInfo TypeBasedAAResult::getModRefInfo(const CallBase *Call,
|
||||
const MemoryLocation &Loc) {
|
||||
const MemoryLocation &Loc,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableTBAA)
|
||||
return AAResultBase::getModRefInfo(Call, Loc);
|
||||
return AAResultBase::getModRefInfo(Call, Loc, AAQI);
|
||||
|
||||
if (const MDNode *L = Loc.AATags.TBAA)
|
||||
if (const MDNode *M = Call->getMetadata(LLVMContext::MD_tbaa))
|
||||
if (!Aliases(L, M))
|
||||
return ModRefInfo::NoModRef;
|
||||
|
||||
return AAResultBase::getModRefInfo(Call, Loc);
|
||||
return AAResultBase::getModRefInfo(Call, Loc, AAQI);
|
||||
}
|
||||
|
||||
ModRefInfo TypeBasedAAResult::getModRefInfo(const CallBase *Call1,
|
||||
const CallBase *Call2) {
|
||||
const CallBase *Call2,
|
||||
AAQueryInfo &AAQI) {
|
||||
if (!EnableTBAA)
|
||||
return AAResultBase::getModRefInfo(Call1, Call2);
|
||||
return AAResultBase::getModRefInfo(Call1, Call2, AAQI);
|
||||
|
||||
if (const MDNode *M1 = Call1->getMetadata(LLVMContext::MD_tbaa))
|
||||
if (const MDNode *M2 = Call2->getMetadata(LLVMContext::MD_tbaa))
|
||||
if (!Aliases(M1, M2))
|
||||
return ModRefInfo::NoModRef;
|
||||
|
||||
return AAResultBase::getModRefInfo(Call1, Call2);
|
||||
return AAResultBase::getModRefInfo(Call1, Call2, AAQI);
|
||||
}
|
||||
|
||||
bool MDNode::isTBAAVtableAccess() const {
|
||||
|
@ -76,7 +76,8 @@ static AliasResult getAliasResult(unsigned AS1, unsigned AS2) {
|
||||
}
|
||||
|
||||
AliasResult AMDGPUAAResult::alias(const MemoryLocation &LocA,
|
||||
const MemoryLocation &LocB) {
|
||||
const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
unsigned asA = LocA.Ptr->getType()->getPointerAddressSpace();
|
||||
unsigned asB = LocB.Ptr->getType()->getPointerAddressSpace();
|
||||
|
||||
@ -85,11 +86,11 @@ AliasResult AMDGPUAAResult::alias(const MemoryLocation &LocA,
|
||||
return Result;
|
||||
|
||||
// Forward the query to the next alias analysis.
|
||||
return AAResultBase::alias(LocA, LocB);
|
||||
return AAResultBase::alias(LocA, LocB, AAQI);
|
||||
}
|
||||
|
||||
bool AMDGPUAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
bool OrLocal) {
|
||||
AAQueryInfo &AAQI, bool OrLocal) {
|
||||
const Value *Base = GetUnderlyingObject(Loc.Ptr, DL);
|
||||
unsigned AS = Base->getType()->getPointerAddressSpace();
|
||||
if (AS == AMDGPUAS::CONSTANT_ADDRESS ||
|
||||
@ -106,7 +107,7 @@ bool AMDGPUAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
// Only assume constant memory for arguments on kernels.
|
||||
switch (F->getCallingConv()) {
|
||||
default:
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
case CallingConv::AMDGPU_LS:
|
||||
case CallingConv::AMDGPU_HS:
|
||||
case CallingConv::AMDGPU_ES:
|
||||
@ -133,5 +134,5 @@ bool AMDGPUAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
|
||||
return AAResultBase::pointsToConstantMemory(Loc, AAQI, OrLocal);
|
||||
}
|
||||
|
@ -44,8 +44,10 @@ public:
|
||||
/// By definition, this result is stateless and so remains valid.
|
||||
bool invalidate(Function &, const PreservedAnalyses &) { return false; }
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal);
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI);
|
||||
bool pointsToConstantMemory(const MemoryLocation &Loc, AAQueryInfo &AAQI,
|
||||
bool OrLocal);
|
||||
|
||||
private:
|
||||
bool Aliases(const MDNode *A, const MDNode *B) const;
|
||||
|
@ -85,7 +85,8 @@ struct TestCustomAAResult : AAResultBase<TestCustomAAResult> {
|
||||
|
||||
bool invalidate(Function &, const PreservedAnalyses &) { return false; }
|
||||
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB) {
|
||||
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB,
|
||||
AAQueryInfo &AAQI) {
|
||||
CB();
|
||||
return MayAlias;
|
||||
}
|
||||
|
@ -44,17 +44,19 @@ protected:
|
||||
DominatorTree DT;
|
||||
AssumptionCache AC;
|
||||
BasicAAResult BAA;
|
||||
AAQueryInfo AAQI;
|
||||
|
||||
TestAnalyses(BasicAATest &Test)
|
||||
: DT(*Test.F), AC(*Test.F), BAA(Test.DL, *Test.F, Test.TLI, AC, &DT) {}
|
||||
: DT(*Test.F), AC(*Test.F), BAA(Test.DL, *Test.F, Test.TLI, AC, &DT),
|
||||
AAQI() {}
|
||||
};
|
||||
|
||||
llvm::Optional<TestAnalyses> Analyses;
|
||||
|
||||
BasicAAResult &setupAnalyses() {
|
||||
TestAnalyses &setupAnalyses() {
|
||||
assert(F);
|
||||
Analyses.emplace(*this);
|
||||
return Analyses->BAA;
|
||||
return Analyses.getValue();
|
||||
}
|
||||
|
||||
public:
|
||||
@ -83,15 +85,17 @@ TEST_F(BasicAATest, AliasInstWithObjectOfImpreciseSize) {
|
||||
GlobalPtr->setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
|
||||
GlobalPtr->setInitializer(B.getInt8(0));
|
||||
|
||||
BasicAAResult &BasicAA = setupAnalyses();
|
||||
auto &AllAnalyses = setupAnalyses();
|
||||
BasicAAResult &BasicAA = AllAnalyses.BAA;
|
||||
AAQueryInfo &AAQI = AllAnalyses.AAQI;
|
||||
ASSERT_EQ(
|
||||
BasicAA.alias(MemoryLocation(IncomingI32Ptr, LocationSize::precise(4)),
|
||||
MemoryLocation(GlobalPtr, LocationSize::precise(1))),
|
||||
MemoryLocation(GlobalPtr, LocationSize::precise(1)), AAQI),
|
||||
AliasResult::NoAlias);
|
||||
|
||||
ASSERT_EQ(
|
||||
BasicAA.alias(MemoryLocation(IncomingI32Ptr, LocationSize::upperBound(4)),
|
||||
MemoryLocation(GlobalPtr, LocationSize::precise(1))),
|
||||
MemoryLocation(GlobalPtr, LocationSize::precise(1)), AAQI),
|
||||
AliasResult::MayAlias);
|
||||
}
|
||||
|
||||
@ -110,14 +114,18 @@ TEST_F(BasicAATest, AliasInstWithFullObjectOfImpreciseSize) {
|
||||
auto *I8AtUncertainOffset =
|
||||
cast<GetElementPtrInst>(B.CreateGEP(B.getInt8Ty(), I8, ArbitraryI32));
|
||||
|
||||
BasicAAResult &BasicAA = setupAnalyses();
|
||||
auto &AllAnalyses = setupAnalyses();
|
||||
BasicAAResult &BasicAA = AllAnalyses.BAA;
|
||||
AAQueryInfo &AAQI = AllAnalyses.AAQI;
|
||||
ASSERT_EQ(BasicAA.alias(
|
||||
MemoryLocation(I8, LocationSize::precise(2)),
|
||||
MemoryLocation(I8AtUncertainOffset, LocationSize::precise(1))),
|
||||
MemoryLocation(I8AtUncertainOffset, LocationSize::precise(1)),
|
||||
AAQI),
|
||||
AliasResult::PartialAlias);
|
||||
|
||||
ASSERT_EQ(BasicAA.alias(
|
||||
MemoryLocation(I8, LocationSize::upperBound(2)),
|
||||
MemoryLocation(I8AtUncertainOffset, LocationSize::precise(1))),
|
||||
MemoryLocation(I8AtUncertainOffset, LocationSize::precise(1)),
|
||||
AAQI),
|
||||
AliasResult::MayAlias);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user