mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-22 02:33:06 +01:00
Revert "Allow invokable sub-classes of IntrinsicInst"
This reverts commit d87b9b81ccb95217181ce75515c6c68bbb408ca4. Post commit review raised concerns, reverting while discussion happens.
This commit is contained in:
parent
2608e4a8c5
commit
5209f18c1c
@ -154,8 +154,8 @@ public:
|
||||
// instruction to a special delegation helper.
|
||||
#define HANDLE_INST(NUM, OPCODE, CLASS) \
|
||||
RetTy visit##OPCODE(CLASS &I) { \
|
||||
if (NUM == Instruction::Call || NUM == Instruction::Invoke) \
|
||||
return delegateCallBase(I); \
|
||||
if (NUM == Instruction::Call) \
|
||||
return delegateCallInst(I); \
|
||||
else \
|
||||
DELEGATE(CLASS); \
|
||||
}
|
||||
@ -215,12 +215,7 @@ public:
|
||||
RetTy visitVAStartInst(VAStartInst &I) { DELEGATE(IntrinsicInst); }
|
||||
RetTy visitVAEndInst(VAEndInst &I) { DELEGATE(IntrinsicInst); }
|
||||
RetTy visitVACopyInst(VACopyInst &I) { DELEGATE(IntrinsicInst); }
|
||||
RetTy visitIntrinsicInst(IntrinsicInst &I) {
|
||||
if (isa<CallInst>(I))
|
||||
return static_cast<SubClass*>(this)->visitCallInst(cast<CallInst>(I));
|
||||
else
|
||||
return static_cast<SubClass*>(this)->visitInvokeInst(cast<InvokeInst>(I));
|
||||
}
|
||||
RetTy visitIntrinsicInst(IntrinsicInst &I) { DELEGATE(CallInst); }
|
||||
RetTy visitCallInst(CallInst &I) { DELEGATE(CallBase); }
|
||||
RetTy visitInvokeInst(InvokeInst &I) { DELEGATE(CallBase); }
|
||||
RetTy visitCallBrInst(CallBrInst &I) { DELEGATE(CallBase); }
|
||||
@ -285,7 +280,7 @@ public:
|
||||
|
||||
private:
|
||||
// Special helper function to delegate to CallInst subclass visitors.
|
||||
RetTy delegateCallBase(CallBase &I) {
|
||||
RetTy delegateCallInst(CallInst &I) {
|
||||
if (const Function *F = I.getCalledFunction()) {
|
||||
switch (F->getIntrinsicID()) {
|
||||
default: DELEGATE(IntrinsicInst);
|
||||
@ -301,16 +296,13 @@ private:
|
||||
case Intrinsic::not_intrinsic: break;
|
||||
}
|
||||
}
|
||||
if (isa<CallInst>(I))
|
||||
return static_cast<SubClass*>(this)->visitCallInst(cast<CallInst>(I));
|
||||
else
|
||||
return static_cast<SubClass*>(this)->visitInvokeInst(cast<InvokeInst>(I));
|
||||
DELEGATE(CallInst);
|
||||
}
|
||||
|
||||
// An overload that will never actually be called, it is used only from dead
|
||||
// code in the dispatching from opcodes to instruction subclasses.
|
||||
RetTy delegateCallBase(Instruction &I) {
|
||||
llvm_unreachable("delegateCallBase called for non-CallBase");
|
||||
RetTy delegateCallInst(Instruction &I) {
|
||||
llvm_unreachable("delegateCallInst called for non-CallInst");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -13,10 +13,10 @@
|
||||
// if (MemCpyInst *MCI = dyn_cast<MemCpyInst>(Inst))
|
||||
// ... MCI->getDest() ... MCI->getSource() ...
|
||||
//
|
||||
// All intrinsic function calls are instances of the call or invoke instruction,
|
||||
// so these are all subclasses of the CallBase class. Note that none of these
|
||||
// classes has state or virtual methods, which is an important part of this
|
||||
// gross/neat hack working.
|
||||
// All intrinsic function calls are instances of the call instruction, so these
|
||||
// are all subclasses of the CallInst class. Note that none of these classes
|
||||
// has state or virtual methods, which is an important part of this gross/neat
|
||||
// hack working.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
@ -42,7 +42,7 @@ namespace llvm {
|
||||
/// A wrapper class for inspecting calls to intrinsic functions.
|
||||
/// This allows the standard isa/dyncast/cast functionality to work with calls
|
||||
/// to intrinsic functions.
|
||||
class IntrinsicInst : public CallBase {
|
||||
class IntrinsicInst : public CallInst {
|
||||
public:
|
||||
IntrinsicInst() = delete;
|
||||
IntrinsicInst(const IntrinsicInst &) = delete;
|
||||
@ -107,13 +107,13 @@ public:
|
||||
}
|
||||
|
||||
// Methods for support type inquiry through isa, cast, and dyn_cast:
|
||||
static bool classof(const CallBase *I) {
|
||||
static bool classof(const CallInst *I) {
|
||||
if (const Function *CF = I->getCalledFunction())
|
||||
return CF->isIntrinsic();
|
||||
return false;
|
||||
}
|
||||
static bool classof(const Value *V) {
|
||||
return isa<CallBase>(V) && classof(cast<CallBase>(V));
|
||||
return isa<CallInst>(V) && classof(cast<CallInst>(V));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -83,7 +83,7 @@ void llvm::findDevirtualizableCallsForTypeTest(
|
||||
// Find llvm.assume intrinsics for this llvm.type.test call.
|
||||
for (const Use &CIU : CI->uses())
|
||||
if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
|
||||
Assumes.push_back(cast<CallInst>(Assume));
|
||||
Assumes.push_back(Assume);
|
||||
|
||||
// If we found any, search for virtual calls based on %p and add them to
|
||||
// DevirtCalls.
|
||||
|
@ -1338,15 +1338,15 @@ bool FastISel::selectIntrinsicCall(const IntrinsicInst *II) {
|
||||
return true;
|
||||
}
|
||||
case Intrinsic::experimental_stackmap:
|
||||
return selectStackmap(cast<CallInst>(II));
|
||||
return selectStackmap(II);
|
||||
case Intrinsic::experimental_patchpoint_void:
|
||||
case Intrinsic::experimental_patchpoint_i64:
|
||||
return selectPatchpoint(cast<CallInst>(II));
|
||||
return selectPatchpoint(II);
|
||||
|
||||
case Intrinsic::xray_customevent:
|
||||
return selectXRayCustomEvent(cast<CallInst>(II));
|
||||
return selectXRayCustomEvent(II);
|
||||
case Intrinsic::xray_typedevent:
|
||||
return selectXRayTypedEvent(cast<CallInst>(II));
|
||||
return selectXRayTypedEvent(II);
|
||||
}
|
||||
|
||||
return fastLowerIntrinsicCall(II);
|
||||
|
@ -244,7 +244,7 @@ void ShadowStackGCLowering::CollectRoots(Function &F) {
|
||||
if (Function *F = CI->getCalledFunction())
|
||||
if (F->getIntrinsicID() == Intrinsic::gcroot) {
|
||||
std::pair<CallInst *, AllocaInst *> Pair = std::make_pair(
|
||||
cast<CallInst>(CI),
|
||||
CI,
|
||||
cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
|
||||
if (IsNullValue(CI->getArgOperand(1)))
|
||||
Roots.push_back(Pair);
|
||||
|
@ -255,7 +255,7 @@ static const CallInst *findStackProtectorIntrinsic(Function &F) {
|
||||
for (const Instruction &I : BB)
|
||||
if (const auto *II = dyn_cast<IntrinsicInst>(&I))
|
||||
if (II->getIntrinsicID() == Intrinsic::stackprotector)
|
||||
return cast<CallInst>(II);
|
||||
return II;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
@ -1143,7 +1143,7 @@ void Interpreter::visitIntrinsicInst(IntrinsicInst &I) {
|
||||
bool atBegin(Parent->begin() == Me);
|
||||
if (!atBegin)
|
||||
--Me;
|
||||
IL->LowerIntrinsicCall(cast<CallInst>(&I));
|
||||
IL->LowerIntrinsicCall(&I);
|
||||
|
||||
// Restore the CurInst pointer to the first instruction newly inserted, if
|
||||
// any.
|
||||
|
@ -3482,8 +3482,7 @@ bool AArch64FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||
return false;
|
||||
|
||||
const char *IntrMemName = isa<MemCpyInst>(II) ? "memcpy" : "memmove";
|
||||
return lowerCallTo(cast<CallInst>(II), IntrMemName,
|
||||
II->getNumArgOperands() - 1);
|
||||
return lowerCallTo(II, IntrMemName, II->getNumArgOperands() - 1);
|
||||
}
|
||||
case Intrinsic::memset: {
|
||||
const MemSetInst *MSI = cast<MemSetInst>(II);
|
||||
@ -3499,8 +3498,7 @@ bool AArch64FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||
// address spaces.
|
||||
return false;
|
||||
|
||||
return lowerCallTo(cast<CallInst>(II), "memset",
|
||||
II->getNumArgOperands() - 1);
|
||||
return lowerCallTo(II, "memset", II->getNumArgOperands() - 1);
|
||||
}
|
||||
case Intrinsic::sin:
|
||||
case Intrinsic::cos:
|
||||
|
@ -1660,7 +1660,7 @@ bool MipsFastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||
if (!MTI->getLength()->getType()->isIntegerTy(32))
|
||||
return false;
|
||||
const char *IntrMemName = isa<MemCpyInst>(II) ? "memcpy" : "memmove";
|
||||
return lowerCallTo(cast<CallInst>(II), IntrMemName, II->getNumArgOperands() - 1);
|
||||
return lowerCallTo(II, IntrMemName, II->getNumArgOperands() - 1);
|
||||
}
|
||||
case Intrinsic::memset: {
|
||||
const MemSetInst *MSI = cast<MemSetInst>(II);
|
||||
@ -1669,7 +1669,7 @@ bool MipsFastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||
return false;
|
||||
if (!MSI->getLength()->getType()->isIntegerTy(32))
|
||||
return false;
|
||||
return lowerCallTo(cast<CallInst>(II), "memset", II->getNumArgOperands() - 1);
|
||||
return lowerCallTo(II, "memset", II->getNumArgOperands() - 1);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
@ -2738,7 +2738,7 @@ bool X86FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||
if (MCI->getSourceAddressSpace() > 255 || MCI->getDestAddressSpace() > 255)
|
||||
return false;
|
||||
|
||||
return lowerCallTo(cast<CallInst>(II), "memcpy", II->getNumArgOperands() - 1);
|
||||
return lowerCallTo(II, "memcpy", II->getNumArgOperands() - 1);
|
||||
}
|
||||
case Intrinsic::memset: {
|
||||
const MemSetInst *MSI = cast<MemSetInst>(II);
|
||||
@ -2753,7 +2753,7 @@ bool X86FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||
if (MSI->getDestAddressSpace() > 255)
|
||||
return false;
|
||||
|
||||
return lowerCallTo(cast<CallInst>(II), "memset", II->getNumArgOperands() - 1);
|
||||
return lowerCallTo(II, "memset", II->getNumArgOperands() - 1);
|
||||
}
|
||||
case Intrinsic::stackprotector: {
|
||||
// Emit code to store the stack guard onto the stack.
|
||||
|
@ -140,7 +140,6 @@ public:
|
||||
Instruction *visitAddrSpaceCast(AddrSpaceCastInst &CI);
|
||||
Instruction *foldItoFPtoI(CastInst &FI);
|
||||
Instruction *visitSelectInst(SelectInst &SI);
|
||||
Instruction *visitCallBase(CallBase &Call);
|
||||
Instruction *visitCallInst(CallInst &CI);
|
||||
Instruction *visitInvokeInst(InvokeInst &II);
|
||||
Instruction *visitCallBrInst(CallBrInst &CBI);
|
||||
@ -223,6 +222,7 @@ private:
|
||||
Instruction &CtxI, Value *&OperationResult,
|
||||
Constant *&OverflowResult);
|
||||
|
||||
Instruction *visitCallBase(CallBase &Call);
|
||||
Instruction *tryOptimizeCall(CallInst *CI);
|
||||
bool transformConstExprCastCall(CallBase &Call);
|
||||
Instruction *transformCallThroughTrampoline(CallBase &Call,
|
||||
|
@ -4142,7 +4142,7 @@ struct VarArgAMD64Helper : public VarArgHelper {
|
||||
Value *VAArgTLSOriginCopy = nullptr;
|
||||
Value *VAArgOverflowSize = nullptr;
|
||||
|
||||
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
||||
|
||||
enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory };
|
||||
|
||||
@ -4351,7 +4351,7 @@ struct VarArgAMD64Helper : public VarArgHelper {
|
||||
// Instrument va_start.
|
||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||
auto *OrigInst = VAStartInstrumentationList[i];
|
||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||
|
||||
@ -4405,7 +4405,7 @@ struct VarArgMIPS64Helper : public VarArgHelper {
|
||||
Value *VAArgTLSCopy = nullptr;
|
||||
Value *VAArgSize = nullptr;
|
||||
|
||||
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
||||
|
||||
VarArgMIPS64Helper(Function &F, MemorySanitizer &MS,
|
||||
MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {}
|
||||
@ -4494,7 +4494,7 @@ struct VarArgMIPS64Helper : public VarArgHelper {
|
||||
// Instrument va_start.
|
||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||
auto *OrigInst = VAStartInstrumentationList[i];
|
||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||
Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C);
|
||||
@ -4533,7 +4533,7 @@ struct VarArgAArch64Helper : public VarArgHelper {
|
||||
Value *VAArgTLSCopy = nullptr;
|
||||
Value *VAArgOverflowSize = nullptr;
|
||||
|
||||
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
||||
|
||||
enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory };
|
||||
|
||||
@ -4688,7 +4688,7 @@ struct VarArgAArch64Helper : public VarArgHelper {
|
||||
// Instrument va_start, copy va_list shadow from the backup copy of
|
||||
// the TLS contents.
|
||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||
auto *OrigInst = VAStartInstrumentationList[i];
|
||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||
|
||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||
@ -4783,7 +4783,7 @@ struct VarArgPowerPC64Helper : public VarArgHelper {
|
||||
Value *VAArgTLSCopy = nullptr;
|
||||
Value *VAArgSize = nullptr;
|
||||
|
||||
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
||||
|
||||
VarArgPowerPC64Helper(Function &F, MemorySanitizer &MS,
|
||||
MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {}
|
||||
@ -4932,7 +4932,7 @@ struct VarArgPowerPC64Helper : public VarArgHelper {
|
||||
// Instrument va_start.
|
||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||
auto *OrigInst = VAStartInstrumentationList[i];
|
||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||
Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C);
|
||||
@ -4972,7 +4972,7 @@ struct VarArgSystemZHelper : public VarArgHelper {
|
||||
Value *VAArgTLSOriginCopy = nullptr;
|
||||
Value *VAArgOverflowSize = nullptr;
|
||||
|
||||
SmallVector<IntrinsicInst *, 16> VAStartInstrumentationList;
|
||||
SmallVector<CallInst *, 16> VAStartInstrumentationList;
|
||||
|
||||
enum class ArgKind {
|
||||
GeneralPurpose,
|
||||
@ -5255,7 +5255,7 @@ struct VarArgSystemZHelper : public VarArgHelper {
|
||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||
for (size_t VaStartNo = 0, VaStartNum = VAStartInstrumentationList.size();
|
||||
VaStartNo < VaStartNum; VaStartNo++) {
|
||||
auto *OrigInst = VAStartInstrumentationList[VaStartNo];
|
||||
CallInst *OrigInst = VAStartInstrumentationList[VaStartNo];
|
||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||
copyRegSaveArea(IRB, VAListTag);
|
||||
|
Loading…
Reference in New Issue
Block a user