1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-19 11:02:59 +02:00
llvm-mirror/lib/Analysis/AliasSetTracker.cpp

710 lines
23 KiB
C++
Raw Normal View History

//===- AliasSetTracker.cpp - Alias Sets Tracker implementation-------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the AliasSetTracker and AliasSet classes.
//
//===----------------------------------------------------------------------===//
#include "llvm/Analysis/AliasSetTracker.h"
#include "llvm/Analysis/AliasAnalysis.h"
#include "llvm/IR/DataLayout.h"
#include "llvm/IR/InstIterator.h"
#include "llvm/IR/Instructions.h"
#include "llvm/IR/IntrinsicInst.h"
#include "llvm/IR/Module.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Type.h"
#include "llvm/Pass.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/ErrorHandling.h"
#include "llvm/Support/raw_ostream.h"
using namespace llvm;
/// mergeSetIn - Merge the specified alias set into this alias set.
///
void AliasSet::mergeSetIn(AliasSet &AS, AliasSetTracker &AST) {
assert(!AS.Forward && "Alias set is already forwarding!");
assert(!Forward && "This set is a forwarding set!!");
// Update the alias and access types of this set...
Access |= AS.Access;
Alias |= AS.Alias;
Volatile |= AS.Volatile;
if (Alias == SetMustAlias) {
// Check that these two merged sets really are must aliases. Since both
// used to be must-alias sets, we can just check any pointer from each set
// for aliasing.
AliasAnalysis &AA = AST.getAliasAnalysis();
PointerRec *L = getSomePointer();
PointerRec *R = AS.getSomePointer();
// If the pointers are not a must-alias pair, this set becomes a may alias.
if (AA.alias(MemoryLocation(L->getValue(), L->getSize(), L->getAAInfo()),
MemoryLocation(R->getValue(), R->getSize(), R->getAAInfo())) !=
MustAlias)
Alias = SetMayAlias;
}
bool ASHadUnknownInsts = !AS.UnknownInsts.empty();
if (UnknownInsts.empty()) { // Merge call sites...
if (ASHadUnknownInsts) {
std::swap(UnknownInsts, AS.UnknownInsts);
addRef();
}
} else if (ASHadUnknownInsts) {
UnknownInsts.insert(UnknownInsts.end(), AS.UnknownInsts.begin(), AS.UnknownInsts.end());
AS.UnknownInsts.clear();
}
AS.Forward = this; // Forward across AS now...
addRef(); // AS is now pointing to us...
// Merge the list of constituent pointers...
if (AS.PtrList) {
*PtrListEnd = AS.PtrList;
AS.PtrList->setPrevInList(PtrListEnd);
PtrListEnd = AS.PtrListEnd;
AS.PtrList = nullptr;
AS.PtrListEnd = &AS.PtrList;
assert(*AS.PtrListEnd == nullptr && "End of list is not null?");
}
if (ASHadUnknownInsts)
AS.dropRef(AST);
}
void AliasSetTracker::removeAliasSet(AliasSet *AS) {
if (AliasSet *Fwd = AS->Forward) {
Fwd->dropRef(*this);
AS->Forward = nullptr;
}
AliasSets.erase(AS);
}
void AliasSet::removeFromTracker(AliasSetTracker &AST) {
assert(RefCount == 0 && "Cannot remove non-dead alias set from tracker!");
AST.removeAliasSet(this);
}
void AliasSet::addPointer(AliasSetTracker &AST, PointerRec &Entry,
uint64_t Size, const AAMDNodes &AAInfo,
ModRefInfo MR, bool KnownMustAlias) {
assert(!Entry.hasAliasSet() && "Entry already in set!");
// Check to see if we have to downgrade to _may_ alias.
if (isMustAlias() && !KnownMustAlias)
if (PointerRec *P = getSomePointer()) {
2004-09-15 18:59:47 +02:00
AliasAnalysis &AA = AST.getAliasAnalysis();
AliasResult Result =
AA.alias(MemoryLocation(P->getValue(), P->getSize(), P->getAAInfo()),
MemoryLocation(Entry.getValue(), Size, AAInfo));
if (Result != MustAlias)
Alias = SetMayAlias;
else // First entry of must alias must have maximum size!
P->updateSizeAndAAInfo(Size, AAInfo);
assert(Result != NoAlias && "Cannot be part of must set!");
}
// upgrading access if existing UnknownInst has ModRef with new pointer
Access |= MR;
Entry.setAliasSet(this);
Entry.updateSizeAndAAInfo(Size, AAInfo);
// Add it to the end of the list...
assert(*PtrListEnd == nullptr && "End of list is not null?");
*PtrListEnd = &Entry;
PtrListEnd = Entry.setPrevInList(PtrListEnd);
assert(*PtrListEnd == nullptr && "End of list is not null?");
2010-08-29 06:13:43 +02:00
addRef(); // Entry points to alias set.
}
void AliasSet::addUnknownInst(Instruction *I, AliasAnalysis &AA,
ModRefInfo MR) {
if (UnknownInsts.empty())
addRef();
UnknownInsts.emplace_back(I);
if (!I->mayWriteToMemory()) {
Alias = SetMayAlias;
Access |= MRI_Ref;
return;
}
Alias = SetMayAlias;
Access |= MR;
}
namespace {
/// returns true if there is no request of worst ModRefInfo
/// (MRcommonPtr is null)
/// or when achieved maximum ModRefInfo (MRI_ModRef).
bool processMR(ModRefInfo MR, ModRefInfo* MRcommonPtr, ModRefInfo& MRcommon) {
MRcommon = ModRefInfo(MRcommon | MR);
return !MRcommonPtr || (MRcommon == MRI_ModRef);
}
bool fillExitMR(ModRefInfo* MRcommonPtr, ModRefInfo& MRcommon) {
if (MRcommonPtr) *MRcommonPtr = MRcommon;
return MRcommon != MRI_NoModRef;
}
}
/// aliasesPointer - Return true if the specified pointer "may" (or must)
/// alias one of the members in the set.
///
bool AliasSet::aliasesPointer(const Value *Ptr, uint64_t Size,
const AAMDNodes &AAInfo,
AliasAnalysis &AA,
ModRefInfo* MRcommonPtr) const {
if (Alias == SetMustAlias) {
assert(UnknownInsts.empty() && "Illegal must alias set!");
// If this is a set of MustAliases, only check to see if the pointer aliases
2010-08-29 06:13:43 +02:00
// SOME value in the set.
PointerRec *SomePtr = getSomePointer();
assert(SomePtr && "Empty must-alias set??");
return AA.alias(MemoryLocation(SomePtr->getValue(), SomePtr->getSize(),
SomePtr->getAAInfo()),
MemoryLocation(Ptr, Size, AAInfo));
}
// If this is a may-alias set, we have to check all of the pointers in the set
// to be sure it doesn't alias the set...
for (iterator I = begin(), E = end(); I != E; ++I)
if (AA.alias(MemoryLocation(Ptr, Size, AAInfo),
MemoryLocation(I.getPointer(), I.getSize(), I.getAAInfo())))
return true;
// to gather worst ModRefInfo
ModRefInfo MRcommon = MRI_NoModRef;
// Check the unknown instructions...
if (!UnknownInsts.empty()) {
for (unsigned i = 0, e = UnknownInsts.size(); i != e; ++i)
if (processMR(AA.getModRefInfo(UnknownInsts[i],
MemoryLocation(Ptr, Size, AAInfo)), MRcommonPtr, MRcommon))
return fillExitMR(MRcommonPtr, MRcommon);
}
return fillExitMR(MRcommonPtr, MRcommon);
}
bool AliasSet::aliasesUnknownInst(const Instruction *Inst,
AliasAnalysis &AA,
ModRefInfo* MRcommonPtr) const {
if (!Inst->mayReadOrWriteMemory())
return false;
// to gather worst ModRefInfo
ModRefInfo MRcommon = MRI_NoModRef;
for (unsigned i = 0, e = UnknownInsts.size(); i != e; ++i) {
ImmutableCallSite C1(getUnknownInst(i)), C2(Inst);
if (!C1 || !C2 ||
processMR(AA.getModRefInfo(C1, C2), MRcommonPtr, MRcommon) ||
processMR(AA.getModRefInfo(C2, C1), MRcommonPtr, MRcommon))
return fillExitMR(MRcommonPtr, MRcommon);
}
for (iterator I = begin(), E = end(); I != E; ++I) {
ModRefInfo MR = AA.getModRefInfo(
Inst, MemoryLocation(I.getPointer(), I.getSize(), I.getAAInfo()));
if (processMR(MR, MRcommonPtr, MRcommon))
return fillExitMR(MRcommonPtr, MRcommon);
}
return fillExitMR(MRcommonPtr, MRcommon);
}
void AliasSetTracker::clear() {
// Delete all the PointerRec entries.
for (PointerMapType::iterator I = PointerMap.begin(), E = PointerMap.end();
I != E; ++I)
I->second->eraseFromList();
PointerMap.clear();
// The alias sets should all be clear now.
AliasSets.clear();
}
/// findAliasSetForPointer - Given a pointer, find the one alias set to put the
/// instruction referring to the pointer into. If there are multiple alias sets
/// that may alias the pointer, merge them together and return the unified set.
///
AliasSet *AliasSetTracker::findAliasSetForPointer(const Value *Ptr,
uint64_t Size,
const AAMDNodes &AAInfo,
ModRefInfo* MRcommonPtr) {
AliasSet *FoundSet = nullptr;
for (iterator I = begin(), E = end(); I != E;) {
iterator Cur = I++;
ModRefInfo MR = MRI_NoModRef;
if (Cur->Forward || !Cur->aliasesPointer(Ptr, Size, AAInfo, AA, &MR))
continue;
*MRcommonPtr = ModRefInfo(*MRcommonPtr | MR);
2010-08-29 06:06:55 +02:00
if (!FoundSet) { // If this is the first alias set ptr can go into.
Analysis: Remove implicit ilist iterator conversions Remove implicit ilist iterator conversions from LLVMAnalysis. I came across something really scary in `llvm::isKnownNotFullPoison()` which relied on `Instruction::getNextNode()` being completely broken (not surprising, but scary nevertheless). This function is documented (and coded to) return `nullptr` when it gets to the sentinel, but with an `ilist_half_node` as a sentinel, the sentinel check looks into some other memory and we don't recognize we've hit the end. Rooting out these scary cases is the reason I'm removing the implicit conversions before doing anything else with `ilist`; I'm not at all surprised that clients rely on badness. I found another scary case -- this time, not relying on badness, just bad (but I guess getting lucky so far) -- in `ObjectSizeOffsetEvaluator::compute_()`. Here, we save out the insertion point, do some things, and then restore it. Previously, we let the iterator auto-convert to `Instruction*`, and then set it back using the `Instruction*` version: Instruction *PrevInsertPoint = Builder.GetInsertPoint(); /* Logic that may change insert point */ if (PrevInsertPoint) Builder.SetInsertPoint(PrevInsertPoint); The check for `PrevInsertPoint` doesn't protect correctly against bad accesses. If the insertion point has been set to the end of a basic block (i.e., `SetInsertPoint(SomeBB)`), then `GetInsertPoint()` returns an iterator pointing at the list sentinel. The version of `SetInsertPoint()` that's getting called will then call `PrevInsertPoint->getParent()`, which explodes horribly. The only reason this hasn't blown up is that it's fairly unlikely the builder is adding to the end of the block; usually, we're adding instructions somewhere before the terminator. llvm-svn: 249925
2015-10-10 02:53:03 +02:00
FoundSet = &*Cur; // Remember it.
2010-08-29 06:06:55 +02:00
} else { // Otherwise, we must merge the sets.
FoundSet->mergeSetIn(*Cur, *this); // Merge in contents.
}
2010-08-29 06:06:55 +02:00
}
return FoundSet;
}
2004-11-26 22:36:25 +01:00
/// containsPointer - Return true if the specified location is represented by
/// this alias set, false otherwise. This does not modify the AST object or
/// alias sets.
bool AliasSetTracker::containsPointer(const Value *Ptr, uint64_t Size,
const AAMDNodes &AAInfo) const {
2004-11-26 22:36:25 +01:00
for (const_iterator I = begin(), E = end(); I != E; ++I)
if (!I->Forward && I->aliasesPointer(Ptr, Size, AAInfo, AA))
2004-11-26 22:36:25 +01:00
return true;
return false;
}
bool AliasSetTracker::containsUnknown(const Instruction *Inst) const {
for (const_iterator I = begin(), E = end(); I != E; ++I)
if (!I->Forward && I->aliasesUnknownInst(Inst, AA))
return true;
return false;
}
2004-11-26 22:36:25 +01:00
AliasSet *AliasSetTracker::findAliasSetForUnknownInst(Instruction *Inst,
ModRefInfo* MRcommonPtr) {
AliasSet *FoundSet = nullptr;
for (iterator I = begin(), E = end(); I != E;) {
iterator Cur = I++;
ModRefInfo MR = MRI_NoModRef;
if (Cur->Forward || !Cur->aliasesUnknownInst(Inst, AA, &MR))
2010-08-29 06:06:55 +02:00
continue;
*MRcommonPtr = ModRefInfo(*MRcommonPtr | MR);
if (!FoundSet) // If this is the first alias set ptr can go into.
Analysis: Remove implicit ilist iterator conversions Remove implicit ilist iterator conversions from LLVMAnalysis. I came across something really scary in `llvm::isKnownNotFullPoison()` which relied on `Instruction::getNextNode()` being completely broken (not surprising, but scary nevertheless). This function is documented (and coded to) return `nullptr` when it gets to the sentinel, but with an `ilist_half_node` as a sentinel, the sentinel check looks into some other memory and we don't recognize we've hit the end. Rooting out these scary cases is the reason I'm removing the implicit conversions before doing anything else with `ilist`; I'm not at all surprised that clients rely on badness. I found another scary case -- this time, not relying on badness, just bad (but I guess getting lucky so far) -- in `ObjectSizeOffsetEvaluator::compute_()`. Here, we save out the insertion point, do some things, and then restore it. Previously, we let the iterator auto-convert to `Instruction*`, and then set it back using the `Instruction*` version: Instruction *PrevInsertPoint = Builder.GetInsertPoint(); /* Logic that may change insert point */ if (PrevInsertPoint) Builder.SetInsertPoint(PrevInsertPoint); The check for `PrevInsertPoint` doesn't protect correctly against bad accesses. If the insertion point has been set to the end of a basic block (i.e., `SetInsertPoint(SomeBB)`), then `GetInsertPoint()` returns an iterator pointing at the list sentinel. The version of `SetInsertPoint()` that's getting called will then call `PrevInsertPoint->getParent()`, which explodes horribly. The only reason this hasn't blown up is that it's fairly unlikely the builder is adding to the end of the block; usually, we're adding instructions somewhere before the terminator. llvm-svn: 249925
2015-10-10 02:53:03 +02:00
FoundSet = &*Cur; // Remember it.
else if (!Cur->Forward) // Otherwise, we must merge the sets.
FoundSet->mergeSetIn(*Cur, *this); // Merge in contents.
2010-08-29 06:06:55 +02:00
}
return FoundSet;
}
/// getAliasSetForPointer - Return the alias set that the specified pointer
/// lives in.
AliasSet &AliasSetTracker::getAliasSetForPointer(Value *Pointer, uint64_t Size,
const AAMDNodes &AAInfo,
bool *New) {
AliasSet::PointerRec &Entry = getEntryFor(Pointer);
2010-08-29 06:13:43 +02:00
// Check to see if the pointer is already known.
if (Entry.hasAliasSet()) {
Entry.updateSizeAndAAInfo(Size, AAInfo);
// Return the set!
return *Entry.getAliasSet(*this)->getForwardedTarget(*this);
2010-08-29 06:06:55 +02:00
}
ModRefInfo MR = MRI_NoModRef;
if (AliasSet *AS = findAliasSetForPointer(Pointer, Size, AAInfo, &MR)) {
2010-08-29 06:13:43 +02:00
// Add it to the alias set it aliases.
AS->addPointer(*this, Entry, Size, AAInfo, MR);
return *AS;
}
2010-08-29 06:06:55 +02:00
if (New) *New = true;
2010-08-29 06:13:43 +02:00
// Otherwise create a new alias set to hold the loaded pointer.
2010-08-29 06:06:55 +02:00
AliasSets.push_back(new AliasSet());
AliasSets.back().addPointer(*this, Entry, Size, AAInfo, MRI_NoModRef);
2010-08-29 06:06:55 +02:00
return AliasSets.back();
}
bool AliasSetTracker::add(Value *Ptr, uint64_t Size, const AAMDNodes &AAInfo) {
2004-07-26 07:50:23 +02:00
bool NewPtr;
addPointer(Ptr, Size, AAInfo, MRI_NoModRef, NewPtr);
2004-07-26 07:50:23 +02:00
return NewPtr;
}
bool AliasSetTracker::add(LoadInst *LI) {
if (LI->getOrdering() > Monotonic) return addUnknown(LI);
AAMDNodes AAInfo;
LI->getAAMetadata(AAInfo);
AliasSet::AccessLattice Access = MRI_Ref;
bool NewPtr;
const DataLayout &DL = LI->getModule()->getDataLayout();
AliasSet &AS = addPointer(LI->getOperand(0),
DL.getTypeStoreSize(LI->getType()),
AAInfo, Access, NewPtr);
if (LI->isVolatile()) AS.setVolatile();
return NewPtr;
}
bool AliasSetTracker::add(StoreInst *SI) {
if (SI->getOrdering() > Monotonic) return addUnknown(SI);
AAMDNodes AAInfo;
SI->getAAMetadata(AAInfo);
AliasSet::AccessLattice Access = MRI_Mod;
bool NewPtr;
const DataLayout &DL = SI->getModule()->getDataLayout();
Value *Val = SI->getOperand(0);
AliasSet &AS = addPointer(SI->getOperand(1),
DL.getTypeStoreSize(Val->getType()),
AAInfo, Access, NewPtr);
if (SI->isVolatile()) AS.setVolatile();
return NewPtr;
}
bool AliasSetTracker::add(VAArgInst *VAAI) {
AAMDNodes AAInfo;
VAAI->getAAMetadata(AAInfo);
bool NewPtr;
addPointer(VAAI->getOperand(0), MemoryLocation::UnknownSize, AAInfo,
MRI_ModRef, NewPtr);
return NewPtr;
}
bool AliasSetTracker::addUnknown(Instruction *Inst) {
if (isa<DbgInfoIntrinsic>(Inst))
return true; // Ignore DbgInfo Intrinsics.
if (!Inst->mayReadOrWriteMemory())
return true; // doesn't alias anything
ModRefInfo MR = MRI_NoModRef;
AliasSet *AS = findAliasSetForUnknownInst(Inst, &MR);
2010-08-29 06:06:55 +02:00
if (AS) {
AS->addUnknownInst(Inst, AA, MR);
return false;
}
2010-08-29 06:06:55 +02:00
AliasSets.push_back(new AliasSet());
AS = &AliasSets.back();
AS->addUnknownInst(Inst, AA, MR);
2010-08-29 06:06:55 +02:00
return true;
}
bool AliasSetTracker::add(Instruction *I) {
2010-08-29 06:06:55 +02:00
// Dispatch to one of the other add methods.
if (LoadInst *LI = dyn_cast<LoadInst>(I))
return add(LI);
2010-08-29 06:06:55 +02:00
if (StoreInst *SI = dyn_cast<StoreInst>(I))
return add(SI);
2010-08-29 06:06:55 +02:00
if (VAArgInst *VAAI = dyn_cast<VAArgInst>(I))
return add(VAAI);
return addUnknown(I);
}
void AliasSetTracker::add(BasicBlock &BB) {
Analysis: Remove implicit ilist iterator conversions Remove implicit ilist iterator conversions from LLVMAnalysis. I came across something really scary in `llvm::isKnownNotFullPoison()` which relied on `Instruction::getNextNode()` being completely broken (not surprising, but scary nevertheless). This function is documented (and coded to) return `nullptr` when it gets to the sentinel, but with an `ilist_half_node` as a sentinel, the sentinel check looks into some other memory and we don't recognize we've hit the end. Rooting out these scary cases is the reason I'm removing the implicit conversions before doing anything else with `ilist`; I'm not at all surprised that clients rely on badness. I found another scary case -- this time, not relying on badness, just bad (but I guess getting lucky so far) -- in `ObjectSizeOffsetEvaluator::compute_()`. Here, we save out the insertion point, do some things, and then restore it. Previously, we let the iterator auto-convert to `Instruction*`, and then set it back using the `Instruction*` version: Instruction *PrevInsertPoint = Builder.GetInsertPoint(); /* Logic that may change insert point */ if (PrevInsertPoint) Builder.SetInsertPoint(PrevInsertPoint); The check for `PrevInsertPoint` doesn't protect correctly against bad accesses. If the insertion point has been set to the end of a basic block (i.e., `SetInsertPoint(SomeBB)`), then `GetInsertPoint()` returns an iterator pointing at the list sentinel. The version of `SetInsertPoint()` that's getting called will then call `PrevInsertPoint->getParent()`, which explodes horribly. The only reason this hasn't blown up is that it's fairly unlikely the builder is adding to the end of the block; usually, we're adding instructions somewhere before the terminator. llvm-svn: 249925
2015-10-10 02:53:03 +02:00
for (auto &I : BB)
add(&I);
}
void AliasSetTracker::add(const AliasSetTracker &AST) {
assert(&AA == &AST.AA &&
"Merging AliasSetTracker objects with different Alias Analyses!");
// Loop over all of the alias sets in AST, adding the pointers contained
// therein into the current alias sets. This can cause alias sets to be
// merged together in the current AST.
2010-08-29 06:06:55 +02:00
for (const_iterator I = AST.begin(), E = AST.end(); I != E; ++I) {
if (I->Forward) continue; // Ignore forwarding alias sets
AliasSet &AS = const_cast<AliasSet&>(*I);
// If there are any call sites in the alias set, add them to this AST.
for (unsigned i = 0, e = AS.UnknownInsts.size(); i != e; ++i)
add(AS.UnknownInsts[i]);
2010-08-29 06:06:55 +02:00
// Loop over all of the pointers in this alias set.
bool X;
for (AliasSet::iterator ASI = AS.begin(), E = AS.end(); ASI != E; ++ASI) {
AliasSet &NewAS = addPointer(ASI.getPointer(), ASI.getSize(),
ASI.getAAInfo(),
(AliasSet::AccessLattice)AS.Access, X);
2010-08-29 06:06:55 +02:00
if (AS.isVolatile()) NewAS.setVolatile();
}
2010-08-29 06:06:55 +02:00
}
}
/// remove - Remove the specified (potentially non-empty) alias set from the
/// tracker.
void AliasSetTracker::remove(AliasSet &AS) {
// Drop all call sites.
if (!AS.UnknownInsts.empty())
AS.dropRef(*this);
AS.UnknownInsts.clear();
// Clear the alias set.
unsigned NumRefs = 0;
while (!AS.empty()) {
AliasSet::PointerRec *P = AS.PtrList;
Value *ValToRemove = P->getValue();
// Unlink and delete entry from the list of values.
P->eraseFromList();
// Remember how many references need to be dropped.
++NumRefs;
// Finally, remove the entry.
PointerMap.erase(ValToRemove);
}
// Stop using the alias set, removing it.
AS.RefCount -= NumRefs;
if (AS.RefCount == 0)
AS.removeFromTracker(*this);
}
bool
AliasSetTracker::remove(Value *Ptr, uint64_t Size, const AAMDNodes &AAInfo) {
AliasSet *AS = findAliasSetForPointer(Ptr, Size, AAInfo);
2004-07-26 07:50:23 +02:00
if (!AS) return false;
remove(*AS);
return true;
}
bool AliasSetTracker::remove(LoadInst *LI) {
const DataLayout &DL = LI->getModule()->getDataLayout();
uint64_t Size = DL.getTypeStoreSize(LI->getType());
AAMDNodes AAInfo;
LI->getAAMetadata(AAInfo);
AliasSet *AS = findAliasSetForPointer(LI->getOperand(0), Size, AAInfo);
if (!AS) return false;
remove(*AS);
return true;
}
bool AliasSetTracker::remove(StoreInst *SI) {
const DataLayout &DL = SI->getModule()->getDataLayout();
uint64_t Size = DL.getTypeStoreSize(SI->getOperand(0)->getType());
AAMDNodes AAInfo;
SI->getAAMetadata(AAInfo);
AliasSet *AS = findAliasSetForPointer(SI->getOperand(1), Size, AAInfo);
if (!AS) return false;
remove(*AS);
return true;
}
bool AliasSetTracker::remove(VAArgInst *VAAI) {
AAMDNodes AAInfo;
VAAI->getAAMetadata(AAInfo);
AliasSet *AS = findAliasSetForPointer(VAAI->getOperand(0),
MemoryLocation::UnknownSize, AAInfo);
if (!AS) return false;
remove(*AS);
return true;
}
bool AliasSetTracker::removeUnknown(Instruction *I) {
if (!I->mayReadOrWriteMemory())
return false; // doesn't alias anything
AliasSet *AS = findAliasSetForUnknownInst(I);
if (!AS) return false;
remove(*AS);
return true;
}
bool AliasSetTracker::remove(Instruction *I) {
// Dispatch to one of the other remove methods...
if (LoadInst *LI = dyn_cast<LoadInst>(I))
return remove(LI);
2010-08-29 06:13:43 +02:00
if (StoreInst *SI = dyn_cast<StoreInst>(I))
return remove(SI);
2010-08-29 06:13:43 +02:00
if (VAArgInst *VAAI = dyn_cast<VAArgInst>(I))
return remove(VAAI);
return removeUnknown(I);
}
2004-05-23 23:10:58 +02:00
// deleteValue method - This method is used to remove a pointer value from the
// AliasSetTracker entirely. It should be used when an instruction is deleted
// from the program to update the AST. If you don't use this, you would have
// dangling pointers to deleted instructions.
//
2004-05-23 23:10:58 +02:00
void AliasSetTracker::deleteValue(Value *PtrVal) {
// If this is a call instruction, remove the callsite from the appropriate
// AliasSet (if present).
if (Instruction *Inst = dyn_cast<Instruction>(PtrVal)) {
if (Inst->mayReadOrWriteMemory()) {
// Scan all the alias sets to see if this call site is contained.
for (iterator I = begin(), E = end(); I != E;) {
iterator Cur = I++;
if (!Cur->Forward)
Cur->removeUnknownInst(*this, Inst);
}
}
}
// First, look up the PointerRec for this pointer.
PointerMapType::iterator I = PointerMap.find_as(PtrVal);
if (I == PointerMap.end()) return; // Noop
// If we found one, remove the pointer from the alias set it is in.
AliasSet::PointerRec *PtrValEnt = I->second;
AliasSet *AS = PtrValEnt->getAliasSet(*this);
// Unlink and delete from the list of values.
PtrValEnt->eraseFromList();
// Stop using the alias set.
AS->dropRef(*this);
PointerMap.erase(I);
}
// copyValue - This method should be used whenever a preexisting value in the
// program is copied or cloned, introducing a new value. Note that it is ok for
// clients that use this method to introduce the same value multiple times: if
// the tracker already knows about a value, it will ignore the request.
//
void AliasSetTracker::copyValue(Value *From, Value *To) {
// First, look up the PointerRec for this pointer.
PointerMapType::iterator I = PointerMap.find_as(From);
if (I == PointerMap.end())
return; // Noop
assert(I->second->hasAliasSet() && "Dead entry?");
AliasSet::PointerRec &Entry = getEntryFor(To);
if (Entry.hasAliasSet()) return; // Already in the tracker!
// Add it to the alias set it aliases...
I = PointerMap.find_as(From);
AliasSet *AS = I->second->getAliasSet(*this);
AS->addPointer(*this, Entry, I->second->getSize(),
I->second->getAAInfo(), MRI_NoModRef,
true);
}
//===----------------------------------------------------------------------===//
// AliasSet/AliasSetTracker Printing Support
//===----------------------------------------------------------------------===//
void AliasSet::print(raw_ostream &OS) const {
OS << " AliasSet[" << (const void*)this << ", " << RefCount << "] ";
OS << (Alias == SetMustAlias ? "must" : "may") << " alias, ";
switch (Access) {
case MRI_NoModRef: OS << "No access "; break;
case MRI_Ref: OS << "Ref "; break;
case MRI_Mod: OS << "Mod "; break;
case MRI_ModRef: OS << "Mod/Ref "; break;
default: llvm_unreachable("Bad value for Access!");
}
if (isVolatile()) OS << "[volatile] ";
if (Forward)
OS << " forwarding to " << (void*)Forward;
if (!empty()) {
OS << "Pointers: ";
for (iterator I = begin(), E = end(); I != E; ++I) {
if (I != begin()) OS << ", ";
I.getPointer()->printAsOperand(OS << "(");
OS << ", " << I.getSize() << ")";
}
}
if (!UnknownInsts.empty()) {
OS << "\n " << UnknownInsts.size() << " Unknown instructions: ";
for (unsigned i = 0, e = UnknownInsts.size(); i != e; ++i) {
if (i) OS << ", ";
UnknownInsts[i]->printAsOperand(OS);
}
}
OS << "\n";
}
void AliasSetTracker::print(raw_ostream &OS) const {
OS << "Alias Set Tracker: " << AliasSets.size() << " alias sets for "
<< PointerMap.size() << " pointer values.\n";
for (const_iterator I = begin(), E = end(); I != E; ++I)
I->print(OS);
OS << "\n";
}
#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
void AliasSet::dump() const { print(dbgs()); }
void AliasSetTracker::dump() const { print(dbgs()); }
#endif
//===----------------------------------------------------------------------===//
// ASTCallbackVH Class Implementation
//===----------------------------------------------------------------------===//
void AliasSetTracker::ASTCallbackVH::deleted() {
assert(AST && "ASTCallbackVH called with a null AliasSetTracker!");
AST->deleteValue(getValPtr());
// this now dangles!
}
void AliasSetTracker::ASTCallbackVH::allUsesReplacedWith(Value *V) {
AST->copyValue(getValPtr(), V);
}
AliasSetTracker::ASTCallbackVH::ASTCallbackVH(Value *V, AliasSetTracker *ast)
: CallbackVH(V), AST(ast) {}
AliasSetTracker::ASTCallbackVH &
AliasSetTracker::ASTCallbackVH::operator=(Value *V) {
return *this = ASTCallbackVH(V, AST);
}
//===----------------------------------------------------------------------===//
// AliasSetPrinter Pass
//===----------------------------------------------------------------------===//
namespace {
class AliasSetPrinter : public FunctionPass {
AliasSetTracker *Tracker;
public:
2007-05-06 15:37:16 +02:00
static char ID; // Pass identification, replacement for typeid
AliasSetPrinter() : FunctionPass(ID) {
initializeAliasSetPrinterPass(*PassRegistry::getPassRegistry());
}
void getAnalysisUsage(AnalysisUsage &AU) const override {
AU.setPreservesAll();
[PM/AA] Rebuild LLVM's alias analysis infrastructure in a way compatible with the new pass manager, and no longer relying on analysis groups. This builds essentially a ground-up new AA infrastructure stack for LLVM. The core ideas are the same that are used throughout the new pass manager: type erased polymorphism and direct composition. The design is as follows: - FunctionAAResults is a type-erasing alias analysis results aggregation interface to walk a single query across a range of results from different alias analyses. Currently this is function-specific as we always assume that aliasing queries are *within* a function. - AAResultBase is a CRTP utility providing stub implementations of various parts of the alias analysis result concept, notably in several cases in terms of other more general parts of the interface. This can be used to implement only a narrow part of the interface rather than the entire interface. This isn't really ideal, this logic should be hoisted into FunctionAAResults as currently it will cause a significant amount of redundant work, but it faithfully models the behavior of the prior infrastructure. - All the alias analysis passes are ported to be wrapper passes for the legacy PM and new-style analysis passes for the new PM with a shared result object. In some cases (most notably CFL), this is an extremely naive approach that we should revisit when we can specialize for the new pass manager. - BasicAA has been restructured to reflect that it is much more fundamentally a function analysis because it uses dominator trees and loop info that need to be constructed for each function. All of the references to getting alias analysis results have been updated to use the new aggregation interface. All the preservation and other pass management code has been updated accordingly. The way the FunctionAAResultsWrapperPass works is to detect the available alias analyses when run, and add them to the results object. This means that we should be able to continue to respect when various passes are added to the pipeline, for example adding CFL or adding TBAA passes should just cause their results to be available and to get folded into this. The exception to this rule is BasicAA which really needs to be a function pass due to using dominator trees and loop info. As a consequence, the FunctionAAResultsWrapperPass directly depends on BasicAA and always includes it in the aggregation. This has significant implications for preserving analyses. Generally, most passes shouldn't bother preserving FunctionAAResultsWrapperPass because rebuilding the results just updates the set of known AA passes. The exception to this rule are LoopPass instances which need to preserve all the function analyses that the loop pass manager will end up needing. This means preserving both BasicAAWrapperPass and the aggregating FunctionAAResultsWrapperPass. Now, when preserving an alias analysis, you do so by directly preserving that analysis. This is only necessary for non-immutable-pass-provided alias analyses though, and there are only three of interest: BasicAA, GlobalsAA (formerly GlobalsModRef), and SCEVAA. Usually BasicAA is preserved when needed because it (like DominatorTree and LoopInfo) is marked as a CFG-only pass. I've expanded GlobalsAA into the preserved set everywhere we previously were preserving all of AliasAnalysis, and I've added SCEVAA in the intersection of that with where we preserve SCEV itself. One significant challenge to all of this is that the CGSCC passes were actually using the alias analysis implementations by taking advantage of a pretty amazing set of loop holes in the old pass manager's analysis management code which allowed analysis groups to slide through in many cases. Moving away from analysis groups makes this problem much more obvious. To fix it, I've leveraged the flexibility the design of the new PM components provides to just directly construct the relevant alias analyses for the relevant functions in the IPO passes that need them. This is a bit hacky, but should go away with the new pass manager, and is already in many ways cleaner than the prior state. Another significant challenge is that various facilities of the old alias analysis infrastructure just don't fit any more. The most significant of these is the alias analysis 'counter' pass. That pass relied on the ability to snoop on AA queries at different points in the analysis group chain. Instead, I'm planning to build printing functionality directly into the aggregation layer. I've not included that in this patch merely to keep it smaller. Note that all of this needs a nearly complete rewrite of the AA documentation. I'm planning to do that, but I'd like to make sure the new design settles, and to flesh out a bit more of what it looks like in the new pass manager first. Differential Revision: http://reviews.llvm.org/D12080 llvm-svn: 247167
2015-09-09 19:55:00 +02:00
AU.addRequired<AAResultsWrapperPass>();
}
bool runOnFunction(Function &F) override {
[PM/AA] Rebuild LLVM's alias analysis infrastructure in a way compatible with the new pass manager, and no longer relying on analysis groups. This builds essentially a ground-up new AA infrastructure stack for LLVM. The core ideas are the same that are used throughout the new pass manager: type erased polymorphism and direct composition. The design is as follows: - FunctionAAResults is a type-erasing alias analysis results aggregation interface to walk a single query across a range of results from different alias analyses. Currently this is function-specific as we always assume that aliasing queries are *within* a function. - AAResultBase is a CRTP utility providing stub implementations of various parts of the alias analysis result concept, notably in several cases in terms of other more general parts of the interface. This can be used to implement only a narrow part of the interface rather than the entire interface. This isn't really ideal, this logic should be hoisted into FunctionAAResults as currently it will cause a significant amount of redundant work, but it faithfully models the behavior of the prior infrastructure. - All the alias analysis passes are ported to be wrapper passes for the legacy PM and new-style analysis passes for the new PM with a shared result object. In some cases (most notably CFL), this is an extremely naive approach that we should revisit when we can specialize for the new pass manager. - BasicAA has been restructured to reflect that it is much more fundamentally a function analysis because it uses dominator trees and loop info that need to be constructed for each function. All of the references to getting alias analysis results have been updated to use the new aggregation interface. All the preservation and other pass management code has been updated accordingly. The way the FunctionAAResultsWrapperPass works is to detect the available alias analyses when run, and add them to the results object. This means that we should be able to continue to respect when various passes are added to the pipeline, for example adding CFL or adding TBAA passes should just cause their results to be available and to get folded into this. The exception to this rule is BasicAA which really needs to be a function pass due to using dominator trees and loop info. As a consequence, the FunctionAAResultsWrapperPass directly depends on BasicAA and always includes it in the aggregation. This has significant implications for preserving analyses. Generally, most passes shouldn't bother preserving FunctionAAResultsWrapperPass because rebuilding the results just updates the set of known AA passes. The exception to this rule are LoopPass instances which need to preserve all the function analyses that the loop pass manager will end up needing. This means preserving both BasicAAWrapperPass and the aggregating FunctionAAResultsWrapperPass. Now, when preserving an alias analysis, you do so by directly preserving that analysis. This is only necessary for non-immutable-pass-provided alias analyses though, and there are only three of interest: BasicAA, GlobalsAA (formerly GlobalsModRef), and SCEVAA. Usually BasicAA is preserved when needed because it (like DominatorTree and LoopInfo) is marked as a CFG-only pass. I've expanded GlobalsAA into the preserved set everywhere we previously were preserving all of AliasAnalysis, and I've added SCEVAA in the intersection of that with where we preserve SCEV itself. One significant challenge to all of this is that the CGSCC passes were actually using the alias analysis implementations by taking advantage of a pretty amazing set of loop holes in the old pass manager's analysis management code which allowed analysis groups to slide through in many cases. Moving away from analysis groups makes this problem much more obvious. To fix it, I've leveraged the flexibility the design of the new PM components provides to just directly construct the relevant alias analyses for the relevant functions in the IPO passes that need them. This is a bit hacky, but should go away with the new pass manager, and is already in many ways cleaner than the prior state. Another significant challenge is that various facilities of the old alias analysis infrastructure just don't fit any more. The most significant of these is the alias analysis 'counter' pass. That pass relied on the ability to snoop on AA queries at different points in the analysis group chain. Instead, I'm planning to build printing functionality directly into the aggregation layer. I've not included that in this patch merely to keep it smaller. Note that all of this needs a nearly complete rewrite of the AA documentation. I'm planning to do that, but I'd like to make sure the new design settles, and to flesh out a bit more of what it looks like in the new pass manager first. Differential Revision: http://reviews.llvm.org/D12080 llvm-svn: 247167
2015-09-09 19:55:00 +02:00
auto &AAWP = getAnalysis<AAResultsWrapperPass>();
Tracker = new AliasSetTracker(AAWP.getAAResults());
for (inst_iterator I = inst_begin(F), E = inst_end(F); I != E; ++I)
Tracker->add(&*I);
Tracker->print(errs());
delete Tracker;
return false;
}
};
}
char AliasSetPrinter::ID = 0;
INITIALIZE_PASS_BEGIN(AliasSetPrinter, "print-alias-sets",
"Alias Set Printer", false, true)
[PM/AA] Rebuild LLVM's alias analysis infrastructure in a way compatible with the new pass manager, and no longer relying on analysis groups. This builds essentially a ground-up new AA infrastructure stack for LLVM. The core ideas are the same that are used throughout the new pass manager: type erased polymorphism and direct composition. The design is as follows: - FunctionAAResults is a type-erasing alias analysis results aggregation interface to walk a single query across a range of results from different alias analyses. Currently this is function-specific as we always assume that aliasing queries are *within* a function. - AAResultBase is a CRTP utility providing stub implementations of various parts of the alias analysis result concept, notably in several cases in terms of other more general parts of the interface. This can be used to implement only a narrow part of the interface rather than the entire interface. This isn't really ideal, this logic should be hoisted into FunctionAAResults as currently it will cause a significant amount of redundant work, but it faithfully models the behavior of the prior infrastructure. - All the alias analysis passes are ported to be wrapper passes for the legacy PM and new-style analysis passes for the new PM with a shared result object. In some cases (most notably CFL), this is an extremely naive approach that we should revisit when we can specialize for the new pass manager. - BasicAA has been restructured to reflect that it is much more fundamentally a function analysis because it uses dominator trees and loop info that need to be constructed for each function. All of the references to getting alias analysis results have been updated to use the new aggregation interface. All the preservation and other pass management code has been updated accordingly. The way the FunctionAAResultsWrapperPass works is to detect the available alias analyses when run, and add them to the results object. This means that we should be able to continue to respect when various passes are added to the pipeline, for example adding CFL or adding TBAA passes should just cause their results to be available and to get folded into this. The exception to this rule is BasicAA which really needs to be a function pass due to using dominator trees and loop info. As a consequence, the FunctionAAResultsWrapperPass directly depends on BasicAA and always includes it in the aggregation. This has significant implications for preserving analyses. Generally, most passes shouldn't bother preserving FunctionAAResultsWrapperPass because rebuilding the results just updates the set of known AA passes. The exception to this rule are LoopPass instances which need to preserve all the function analyses that the loop pass manager will end up needing. This means preserving both BasicAAWrapperPass and the aggregating FunctionAAResultsWrapperPass. Now, when preserving an alias analysis, you do so by directly preserving that analysis. This is only necessary for non-immutable-pass-provided alias analyses though, and there are only three of interest: BasicAA, GlobalsAA (formerly GlobalsModRef), and SCEVAA. Usually BasicAA is preserved when needed because it (like DominatorTree and LoopInfo) is marked as a CFG-only pass. I've expanded GlobalsAA into the preserved set everywhere we previously were preserving all of AliasAnalysis, and I've added SCEVAA in the intersection of that with where we preserve SCEV itself. One significant challenge to all of this is that the CGSCC passes were actually using the alias analysis implementations by taking advantage of a pretty amazing set of loop holes in the old pass manager's analysis management code which allowed analysis groups to slide through in many cases. Moving away from analysis groups makes this problem much more obvious. To fix it, I've leveraged the flexibility the design of the new PM components provides to just directly construct the relevant alias analyses for the relevant functions in the IPO passes that need them. This is a bit hacky, but should go away with the new pass manager, and is already in many ways cleaner than the prior state. Another significant challenge is that various facilities of the old alias analysis infrastructure just don't fit any more. The most significant of these is the alias analysis 'counter' pass. That pass relied on the ability to snoop on AA queries at different points in the analysis group chain. Instead, I'm planning to build printing functionality directly into the aggregation layer. I've not included that in this patch merely to keep it smaller. Note that all of this needs a nearly complete rewrite of the AA documentation. I'm planning to do that, but I'd like to make sure the new design settles, and to flesh out a bit more of what it looks like in the new pass manager first. Differential Revision: http://reviews.llvm.org/D12080 llvm-svn: 247167
2015-09-09 19:55:00 +02:00
INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
INITIALIZE_PASS_END(AliasSetPrinter, "print-alias-sets",
"Alias Set Printer", false, true)