1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-25 12:12:47 +01:00

[PM/AA] Split the location computation out of getArgLocation so the

virtual interface on AliasAnalysis only deals with ModRef information.

This interface was both computing memory locations by using TLI and
other tricks to estimate the size of memory referenced by an operand,
and computing ModRef information through similar investigations. This
change narrows the scope of the virtual interface on AliasAnalysis
slightly.

Note that all of this code could live in BasicAA, and be done with
a single investigation of the argument, if it weren't for the fact that
the generic code in AliasAnalysis::getModRefBehavior for a callsite
calls into the virtual aspect of (now) getArgModRefInfo. But this
patch's arrangement seems a not terrible way to go for now.

The other interesting wrinkle is how we could reasonably extend LLVM
with support for custom memory location sizes and mod/ref behavior for
library routines. After discussions with Hal on the review, the
conclusion is that this would be best done by fleshing out the much
desired support for extensions to TLI, and support these types of
queries in that interface where we would likely be doing other library
API recognition and analysis.

Differential Revision: http://reviews.llvm.org/D10259

llvm-svn: 239884
This commit is contained in:
Chandler Carruth 2015-06-17 07:12:40 +00:00
parent 53746a75e4
commit 6ac32c5879
6 changed files with 133 additions and 98 deletions

View File

@ -228,13 +228,12 @@ public:
UnknownModRefBehavior = Anywhere | ModRef
};
/// Get the location associated with a pointer argument of a callsite.
/// The mask bits are set to indicate the allowed aliasing ModRef kinds.
/// Note that these mask bits do not necessarily account for the overall
/// behavior of the function, but rather only provide additional
/// per-argument information.
virtual Location getArgLocation(ImmutableCallSite CS, unsigned ArgIdx,
ModRefResult &Mask);
/// Get the ModRef info associated with a pointer argument of a callsite. The
/// result's bits are set to indicate the allowed aliasing ModRef kinds. Note
/// that these bits do not necessarily account for the overall behavior of
/// the function, but rather only provide additional per-argument
/// information.
virtual ModRefResult getArgModRefInfo(ImmutableCallSite CS, unsigned ArgIdx);
/// getModRefBehavior - Return the behavior when calling the given call site.
virtual ModRefBehavior getModRefBehavior(ImmutableCallSite CS);

View File

@ -26,6 +26,7 @@ class LoadInst;
class StoreInst;
class MemTransferInst;
class MemIntrinsic;
class TargetLibraryInfo;
/// Representation for a specific memory location.
///
@ -87,6 +88,10 @@ public:
/// transfer.
static MemoryLocation getForDest(const MemIntrinsic *MI);
/// Return a location representing a particular argument of a call.
static MemoryLocation getForArgument(ImmutableCallSite CS, unsigned ArgIdx,
const TargetLibraryInfo &TLI);
explicit MemoryLocation(const Value *Ptr = nullptr,
uint64_t Size = UnknownSize,
const AAMDNodes &AATags = AAMDNodes())

View File

@ -60,11 +60,10 @@ bool AliasAnalysis::pointsToConstantMemory(const Location &Loc,
return AA->pointsToConstantMemory(Loc, OrLocal);
}
AliasAnalysis::Location
AliasAnalysis::getArgLocation(ImmutableCallSite CS, unsigned ArgIdx,
AliasAnalysis::ModRefResult &Mask) {
AliasAnalysis::ModRefResult
AliasAnalysis::getArgModRefInfo(ImmutableCallSite CS, unsigned ArgIdx) {
assert(AA && "AA didn't call InitializeAliasAnalysis in its run method!");
return AA->getArgLocation(CS, ArgIdx, Mask);
return AA->getArgModRefInfo(CS, ArgIdx);
}
void AliasAnalysis::deleteValue(Value *V) {
@ -122,11 +121,10 @@ AliasAnalysis::getModRefInfo(ImmutableCallSite CS,
const Value *Arg = *AI;
if (!Arg->getType()->isPointerTy())
continue;
ModRefResult ArgMask;
Location CSLoc =
getArgLocation(CS, (unsigned) std::distance(CS.arg_begin(), AI),
ArgMask);
if (!isNoAlias(CSLoc, Loc)) {
unsigned ArgIdx = std::distance(CS.arg_begin(), AI);
Location ArgLoc = MemoryLocation::getForArgument(CS, ArgIdx, *TLI);
if (!isNoAlias(ArgLoc, Loc)) {
ModRefResult ArgMask = getArgModRefInfo(CS, ArgIdx);
doesAlias = true;
AllArgsMask = ModRefResult(AllArgsMask | ArgMask);
}
@ -183,18 +181,18 @@ AliasAnalysis::getModRefInfo(ImmutableCallSite CS1, ImmutableCallSite CS2) {
const Value *Arg = *I;
if (!Arg->getType()->isPointerTy())
continue;
ModRefResult ArgMask;
Location CS2Loc =
getArgLocation(CS2, (unsigned) std::distance(CS2.arg_begin(), I),
ArgMask);
// ArgMask indicates what CS2 might do to CS2Loc, and the dependence of
unsigned CS2ArgIdx = std::distance(CS2.arg_begin(), I);
Location CS2ArgLoc = MemoryLocation::getForArgument(CS2, CS2ArgIdx, *TLI);
// ArgMask indicates what CS2 might do to CS2ArgLoc, and the dependence of
// CS1 on that location is the inverse.
ModRefResult ArgMask = getArgModRefInfo(CS2, CS2ArgIdx);
if (ArgMask == Mod)
ArgMask = ModRef;
else if (ArgMask == Ref)
ArgMask = Mod;
R = ModRefResult((R | (getModRefInfo(CS1, CS2Loc) & ArgMask)) & Mask);
R = ModRefResult((R | (getModRefInfo(CS1, CS2ArgLoc) & ArgMask)) & Mask);
if (R == Mask)
break;
}
@ -212,13 +210,14 @@ AliasAnalysis::getModRefInfo(ImmutableCallSite CS1, ImmutableCallSite CS2) {
const Value *Arg = *I;
if (!Arg->getType()->isPointerTy())
continue;
ModRefResult ArgMask;
Location CS1Loc = getArgLocation(
CS1, (unsigned)std::distance(CS1.arg_begin(), I), ArgMask);
// ArgMask indicates what CS1 might do to CS1Loc; if CS1 might Mod
// CS1Loc, then we care about either a Mod or a Ref by CS2. If CS1
unsigned CS1ArgIdx = std::distance(CS1.arg_begin(), I);
Location CS1ArgLoc = MemoryLocation::getForArgument(CS1, CS1ArgIdx, *TLI);
// ArgMask indicates what CS1 might do to CS1ArgLoc; if CS1 might Mod
// CS1ArgLoc, then we care about either a Mod or a Ref by CS2. If CS1
// might Ref, then we care only about a Mod by CS2.
ModRefResult ArgR = getModRefInfo(CS2, CS1Loc);
ModRefResult ArgMask = getArgModRefInfo(CS1, CS1ArgIdx);
ModRefResult ArgR = getModRefInfo(CS2, CS1ArgLoc);
if (((ArgMask & Mod) != NoModRef && (ArgR & ModRef) != NoModRef) ||
((ArgMask & Ref) != NoModRef && (ArgR & Mod) != NoModRef))
R = ModRefResult((R | ArgMask) & Mask);

View File

@ -485,8 +485,8 @@ namespace {
bool pointsToConstantMemory(const Location &Loc, bool OrLocal) override;
/// Get the location associated with a pointer argument of a callsite.
Location getArgLocation(ImmutableCallSite CS, unsigned ArgIdx,
ModRefResult &Mask) override;
ModRefResult getArgModRefInfo(ImmutableCallSite CS,
unsigned ArgIdx) override;
/// getModRefBehavior - Return the behavior when calling the given
/// call site.
@ -652,6 +652,8 @@ BasicAliasAnalysis::pointsToConstantMemory(const Location &Loc, bool OrLocal) {
return Worklist.empty();
}
// FIXME: This code is duplicated with MemoryLocation and should be hoisted to
// some common utility location.
static bool isMemsetPattern16(const Function *MS,
const TargetLibraryInfo &TLI) {
if (TLI.has(LibFunc::memset_pattern16) &&
@ -715,84 +717,33 @@ BasicAliasAnalysis::getModRefBehavior(const Function *F) {
return ModRefBehavior(AliasAnalysis::getModRefBehavior(F) & Min);
}
AliasAnalysis::Location
BasicAliasAnalysis::getArgLocation(ImmutableCallSite CS, unsigned ArgIdx,
ModRefResult &Mask) {
Location Loc = AliasAnalysis::getArgLocation(CS, ArgIdx, Mask);
const TargetLibraryInfo &TLI =
getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
const IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction());
if (II != nullptr)
AliasAnalysis::ModRefResult
BasicAliasAnalysis::getArgModRefInfo(ImmutableCallSite CS, unsigned ArgIdx) {
if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction()))
switch (II->getIntrinsicID()) {
default: break;
default:
break;
case Intrinsic::memset:
case Intrinsic::memcpy:
case Intrinsic::memmove: {
case Intrinsic::memmove:
assert((ArgIdx == 0 || ArgIdx == 1) &&
"Invalid argument index for memory intrinsic");
if (ConstantInt *LenCI = dyn_cast<ConstantInt>(II->getArgOperand(2)))
Loc.Size = LenCI->getZExtValue();
assert(Loc.Ptr == II->getArgOperand(ArgIdx) &&
"Memory intrinsic location pointer not argument?");
Mask = ArgIdx ? Ref : Mod;
break;
}
case Intrinsic::lifetime_start:
case Intrinsic::lifetime_end:
case Intrinsic::invariant_start: {
assert(ArgIdx == 1 && "Invalid argument index");
assert(Loc.Ptr == II->getArgOperand(ArgIdx) &&
"Intrinsic location pointer not argument?");
Loc.Size = cast<ConstantInt>(II->getArgOperand(0))->getZExtValue();
break;
}
case Intrinsic::invariant_end: {
assert(ArgIdx == 2 && "Invalid argument index");
assert(Loc.Ptr == II->getArgOperand(ArgIdx) &&
"Intrinsic location pointer not argument?");
Loc.Size = cast<ConstantInt>(II->getArgOperand(1))->getZExtValue();
break;
}
case Intrinsic::arm_neon_vld1: {
assert(ArgIdx == 0 && "Invalid argument index");
assert(Loc.Ptr == II->getArgOperand(ArgIdx) &&
"Intrinsic location pointer not argument?");
// LLVM's vld1 and vst1 intrinsics currently only support a single
// vector register.
if (DL)
Loc.Size = DL->getTypeStoreSize(II->getType());
break;
}
case Intrinsic::arm_neon_vst1: {
assert(ArgIdx == 0 && "Invalid argument index");
assert(Loc.Ptr == II->getArgOperand(ArgIdx) &&
"Intrinsic location pointer not argument?");
if (DL)
Loc.Size = DL->getTypeStoreSize(II->getArgOperand(1)->getType());
break;
}
return ArgIdx ? Ref : Mod;
}
// We can bound the aliasing properties of memset_pattern16 just as we can
// for memcpy/memset. This is particularly important because the
// LoopIdiomRecognizer likes to turn loops into calls to memset_pattern16
// whenever possible.
else if (CS.getCalledFunction() &&
isMemsetPattern16(CS.getCalledFunction(), TLI)) {
if (CS.getCalledFunction() &&
isMemsetPattern16(CS.getCalledFunction(), *TLI)) {
assert((ArgIdx == 0 || ArgIdx == 1) &&
"Invalid argument index for memset_pattern16");
if (ArgIdx == 1)
Loc.Size = 16;
else if (const ConstantInt *LenCI =
dyn_cast<ConstantInt>(CS.getArgument(2)))
Loc.Size = LenCI->getZExtValue();
assert(Loc.Ptr == CS.getArgument(ArgIdx) &&
"memset_pattern16 location pointer not argument?");
Mask = ArgIdx ? Ref : Mod;
return ArgIdx ? Ref : Mod;
}
// FIXME: Handle memset_pattern4 and memset_pattern8 also.
return Loc;
return AliasAnalysis::getArgModRefInfo(CS, ArgIdx);
}
static bool isAssumeIntrinsic(ImmutableCallSite CS) {

View File

@ -8,6 +8,7 @@
//===----------------------------------------------------------------------===//
#include "llvm/Analysis/MemoryLocation.h"
#include "llvm/Analysis/TargetLibraryInfo.h"
#include "llvm/IR/BasicBlock.h"
#include "llvm/IR/DataLayout.h"
#include "llvm/IR/Instructions.h"
@ -88,3 +89,86 @@ MemoryLocation MemoryLocation::getForDest(const MemIntrinsic *MTI) {
return MemoryLocation(MTI->getRawDest(), Size, AATags);
}
// FIXME: This code is duplicated with BasicAliasAnalysis and should be hoisted
// to some common utility location.
static bool isMemsetPattern16(const Function *MS,
const TargetLibraryInfo &TLI) {
if (TLI.has(LibFunc::memset_pattern16) &&
MS->getName() == "memset_pattern16") {
FunctionType *MemsetType = MS->getFunctionType();
if (!MemsetType->isVarArg() && MemsetType->getNumParams() == 3 &&
isa<PointerType>(MemsetType->getParamType(0)) &&
isa<PointerType>(MemsetType->getParamType(1)) &&
isa<IntegerType>(MemsetType->getParamType(2)))
return true;
}
return false;
}
MemoryLocation MemoryLocation::getForArgument(ImmutableCallSite CS,
unsigned ArgIdx,
const TargetLibraryInfo &TLI) {
AAMDNodes AATags;
CS->getAAMetadata(AATags);
const Value *Arg = CS.getArgument(ArgIdx);
// We may be able to produce an exact size for known intrinsics.
if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
const DataLayout &DL = II->getModule()->getDataLayout();
switch (II->getIntrinsicID()) {
default:
break;
case Intrinsic::memset:
case Intrinsic::memcpy:
case Intrinsic::memmove:
assert((ArgIdx == 0 || ArgIdx == 1) &&
"Invalid argument index for memory intrinsic");
if (ConstantInt *LenCI = dyn_cast<ConstantInt>(II->getArgOperand(2)))
return MemoryLocation(Arg, LenCI->getZExtValue(), AATags);
break;
case Intrinsic::lifetime_start:
case Intrinsic::lifetime_end:
case Intrinsic::invariant_start:
assert(ArgIdx == 1 && "Invalid argument index");
return MemoryLocation(
Arg, cast<ConstantInt>(II->getArgOperand(0))->getZExtValue(), AATags);
case Intrinsic::invariant_end:
assert(ArgIdx == 2 && "Invalid argument index");
return MemoryLocation(
Arg, cast<ConstantInt>(II->getArgOperand(1))->getZExtValue(), AATags);
case Intrinsic::arm_neon_vld1:
assert(ArgIdx == 0 && "Invalid argument index");
// LLVM's vld1 and vst1 intrinsics currently only support a single
// vector register.
return MemoryLocation(Arg, DL.getTypeStoreSize(II->getType()), AATags);
case Intrinsic::arm_neon_vst1:
assert(ArgIdx == 0 && "Invalid argument index");
return MemoryLocation(
Arg, DL.getTypeStoreSize(II->getArgOperand(1)->getType()), AATags);
}
}
// We can bound the aliasing properties of memset_pattern16 just as we can
// for memcpy/memset. This is particularly important because the
// LoopIdiomRecognizer likes to turn loops into calls to memset_pattern16
// whenever possible.
if (CS.getCalledFunction() &&
isMemsetPattern16(CS.getCalledFunction(), TLI)) {
assert((ArgIdx == 0 || ArgIdx == 1) &&
"Invalid argument index for memset_pattern16");
if (ArgIdx == 1)
return MemoryLocation(Arg, 16, AATags);
if (const ConstantInt *LenCI = dyn_cast<ConstantInt>(CS.getArgument(2)))
return MemoryLocation(Arg, LenCI->getZExtValue(), AATags);
}
// FIXME: Handle memset_pattern4 and memset_pattern8 also.
return MemoryLocation(CS.getArgument(ArgIdx), UnknownSize, AATags);
}

View File

@ -55,12 +55,9 @@ namespace {
bool pointsToConstantMemory(const Location &Loc, bool OrLocal) override {
return false;
}
Location getArgLocation(ImmutableCallSite CS, unsigned ArgIdx,
ModRefResult &Mask) override {
Mask = ModRef;
AAMDNodes AATags;
CS->getAAMetadata(AATags);
return Location(CS.getArgument(ArgIdx), UnknownSize, AATags);
ModRefResult getArgModRefInfo(ImmutableCallSite CS,
unsigned ArgIdx) override {
return ModRef;
}
ModRefResult getModRefInfo(ImmutableCallSite CS,