From 5209f18c1c349de7a79130bf9c08a8e83608d7fd Mon Sep 17 00:00:00 2001 From: Philip Reames Date: Tue, 20 Apr 2021 15:37:49 -0700 Subject: [PATCH] Revert "Allow invokable sub-classes of IntrinsicInst" This reverts commit d87b9b81ccb95217181ce75515c6c68bbb408ca4. Post commit review raised concerns, reverting while discussion happens. --- include/llvm/IR/InstVisitor.h | 22 ++++++------------- include/llvm/IR/IntrinsicInst.h | 14 ++++++------ lib/Analysis/TypeMetadataUtils.cpp | 2 +- lib/CodeGen/SelectionDAG/FastISel.cpp | 8 +++---- lib/CodeGen/ShadowStackGCLowering.cpp | 2 +- lib/CodeGen/StackProtector.cpp | 2 +- lib/ExecutionEngine/Interpreter/Execution.cpp | 2 +- lib/Target/AArch64/AArch64FastISel.cpp | 6 ++--- lib/Target/Mips/MipsFastISel.cpp | 4 ++-- lib/Target/X86/X86FastISel.cpp | 4 ++-- .../InstCombine/InstCombineInternal.h | 2 +- .../Instrumentation/MemorySanitizer.cpp | 20 ++++++++--------- 12 files changed, 39 insertions(+), 49 deletions(-) diff --git a/include/llvm/IR/InstVisitor.h b/include/llvm/IR/InstVisitor.h index d42eceea135..4dbdc66d136 100644 --- a/include/llvm/IR/InstVisitor.h +++ b/include/llvm/IR/InstVisitor.h @@ -154,8 +154,8 @@ public: // instruction to a special delegation helper. #define HANDLE_INST(NUM, OPCODE, CLASS) \ RetTy visit##OPCODE(CLASS &I) { \ - if (NUM == Instruction::Call || NUM == Instruction::Invoke) \ - return delegateCallBase(I); \ + if (NUM == Instruction::Call) \ + return delegateCallInst(I); \ else \ DELEGATE(CLASS); \ } @@ -215,12 +215,7 @@ public: RetTy visitVAStartInst(VAStartInst &I) { DELEGATE(IntrinsicInst); } RetTy visitVAEndInst(VAEndInst &I) { DELEGATE(IntrinsicInst); } RetTy visitVACopyInst(VACopyInst &I) { DELEGATE(IntrinsicInst); } - RetTy visitIntrinsicInst(IntrinsicInst &I) { - if (isa(I)) - return static_cast(this)->visitCallInst(cast(I)); - else - return static_cast(this)->visitInvokeInst(cast(I)); - } + RetTy visitIntrinsicInst(IntrinsicInst &I) { DELEGATE(CallInst); } RetTy visitCallInst(CallInst &I) { DELEGATE(CallBase); } RetTy visitInvokeInst(InvokeInst &I) { DELEGATE(CallBase); } RetTy visitCallBrInst(CallBrInst &I) { DELEGATE(CallBase); } @@ -285,7 +280,7 @@ public: private: // Special helper function to delegate to CallInst subclass visitors. - RetTy delegateCallBase(CallBase &I) { + RetTy delegateCallInst(CallInst &I) { if (const Function *F = I.getCalledFunction()) { switch (F->getIntrinsicID()) { default: DELEGATE(IntrinsicInst); @@ -301,16 +296,13 @@ private: case Intrinsic::not_intrinsic: break; } } - if (isa(I)) - return static_cast(this)->visitCallInst(cast(I)); - else - return static_cast(this)->visitInvokeInst(cast(I)); + DELEGATE(CallInst); } // An overload that will never actually be called, it is used only from dead // code in the dispatching from opcodes to instruction subclasses. - RetTy delegateCallBase(Instruction &I) { - llvm_unreachable("delegateCallBase called for non-CallBase"); + RetTy delegateCallInst(Instruction &I) { + llvm_unreachable("delegateCallInst called for non-CallInst"); } }; diff --git a/include/llvm/IR/IntrinsicInst.h b/include/llvm/IR/IntrinsicInst.h index bcafda577e0..e217f973b4b 100644 --- a/include/llvm/IR/IntrinsicInst.h +++ b/include/llvm/IR/IntrinsicInst.h @@ -13,10 +13,10 @@ // if (MemCpyInst *MCI = dyn_cast(Inst)) // ... MCI->getDest() ... MCI->getSource() ... // -// All intrinsic function calls are instances of the call or invoke instruction, -// so these are all subclasses of the CallBase class. Note that none of these -// classes has state or virtual methods, which is an important part of this -// gross/neat hack working. +// All intrinsic function calls are instances of the call instruction, so these +// are all subclasses of the CallInst class. Note that none of these classes +// has state or virtual methods, which is an important part of this gross/neat +// hack working. // //===----------------------------------------------------------------------===// @@ -42,7 +42,7 @@ namespace llvm { /// A wrapper class for inspecting calls to intrinsic functions. /// This allows the standard isa/dyncast/cast functionality to work with calls /// to intrinsic functions. -class IntrinsicInst : public CallBase { +class IntrinsicInst : public CallInst { public: IntrinsicInst() = delete; IntrinsicInst(const IntrinsicInst &) = delete; @@ -107,13 +107,13 @@ public: } // Methods for support type inquiry through isa, cast, and dyn_cast: - static bool classof(const CallBase *I) { + static bool classof(const CallInst *I) { if (const Function *CF = I->getCalledFunction()) return CF->isIntrinsic(); return false; } static bool classof(const Value *V) { - return isa(V) && classof(cast(V)); + return isa(V) && classof(cast(V)); } }; diff --git a/lib/Analysis/TypeMetadataUtils.cpp b/lib/Analysis/TypeMetadataUtils.cpp index 1c31d16d65f..f015ba9a09c 100644 --- a/lib/Analysis/TypeMetadataUtils.cpp +++ b/lib/Analysis/TypeMetadataUtils.cpp @@ -83,7 +83,7 @@ void llvm::findDevirtualizableCallsForTypeTest( // Find llvm.assume intrinsics for this llvm.type.test call. for (const Use &CIU : CI->uses()) if (auto *Assume = dyn_cast(CIU.getUser())) - Assumes.push_back(cast(Assume)); + Assumes.push_back(Assume); // If we found any, search for virtual calls based on %p and add them to // DevirtCalls. diff --git a/lib/CodeGen/SelectionDAG/FastISel.cpp b/lib/CodeGen/SelectionDAG/FastISel.cpp index b9547bb7223..faf3a848a69 100644 --- a/lib/CodeGen/SelectionDAG/FastISel.cpp +++ b/lib/CodeGen/SelectionDAG/FastISel.cpp @@ -1338,15 +1338,15 @@ bool FastISel::selectIntrinsicCall(const IntrinsicInst *II) { return true; } case Intrinsic::experimental_stackmap: - return selectStackmap(cast(II)); + return selectStackmap(II); case Intrinsic::experimental_patchpoint_void: case Intrinsic::experimental_patchpoint_i64: - return selectPatchpoint(cast(II)); + return selectPatchpoint(II); case Intrinsic::xray_customevent: - return selectXRayCustomEvent(cast(II)); + return selectXRayCustomEvent(II); case Intrinsic::xray_typedevent: - return selectXRayTypedEvent(cast(II)); + return selectXRayTypedEvent(II); } return fastLowerIntrinsicCall(II); diff --git a/lib/CodeGen/ShadowStackGCLowering.cpp b/lib/CodeGen/ShadowStackGCLowering.cpp index ae59b0bea9e..36752ef8652 100644 --- a/lib/CodeGen/ShadowStackGCLowering.cpp +++ b/lib/CodeGen/ShadowStackGCLowering.cpp @@ -244,7 +244,7 @@ void ShadowStackGCLowering::CollectRoots(Function &F) { if (Function *F = CI->getCalledFunction()) if (F->getIntrinsicID() == Intrinsic::gcroot) { std::pair Pair = std::make_pair( - cast(CI), + CI, cast(CI->getArgOperand(0)->stripPointerCasts())); if (IsNullValue(CI->getArgOperand(1))) Roots.push_back(Pair); diff --git a/lib/CodeGen/StackProtector.cpp b/lib/CodeGen/StackProtector.cpp index b0629b31899..ff6ff6dfe03 100644 --- a/lib/CodeGen/StackProtector.cpp +++ b/lib/CodeGen/StackProtector.cpp @@ -255,7 +255,7 @@ static const CallInst *findStackProtectorIntrinsic(Function &F) { for (const Instruction &I : BB) if (const auto *II = dyn_cast(&I)) if (II->getIntrinsicID() == Intrinsic::stackprotector) - return cast(II); + return II; return nullptr; } diff --git a/lib/ExecutionEngine/Interpreter/Execution.cpp b/lib/ExecutionEngine/Interpreter/Execution.cpp index 0add020ce49..62e1ea6e0f0 100644 --- a/lib/ExecutionEngine/Interpreter/Execution.cpp +++ b/lib/ExecutionEngine/Interpreter/Execution.cpp @@ -1143,7 +1143,7 @@ void Interpreter::visitIntrinsicInst(IntrinsicInst &I) { bool atBegin(Parent->begin() == Me); if (!atBegin) --Me; - IL->LowerIntrinsicCall(cast(&I)); + IL->LowerIntrinsicCall(&I); // Restore the CurInst pointer to the first instruction newly inserted, if // any. diff --git a/lib/Target/AArch64/AArch64FastISel.cpp b/lib/Target/AArch64/AArch64FastISel.cpp index 09fa49a9127..95b5699552b 100644 --- a/lib/Target/AArch64/AArch64FastISel.cpp +++ b/lib/Target/AArch64/AArch64FastISel.cpp @@ -3482,8 +3482,7 @@ bool AArch64FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) { return false; const char *IntrMemName = isa(II) ? "memcpy" : "memmove"; - return lowerCallTo(cast(II), IntrMemName, - II->getNumArgOperands() - 1); + return lowerCallTo(II, IntrMemName, II->getNumArgOperands() - 1); } case Intrinsic::memset: { const MemSetInst *MSI = cast(II); @@ -3499,8 +3498,7 @@ bool AArch64FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) { // address spaces. return false; - return lowerCallTo(cast(II), "memset", - II->getNumArgOperands() - 1); + return lowerCallTo(II, "memset", II->getNumArgOperands() - 1); } case Intrinsic::sin: case Intrinsic::cos: diff --git a/lib/Target/Mips/MipsFastISel.cpp b/lib/Target/Mips/MipsFastISel.cpp index b7a8a06ccfc..e963185eaea 100644 --- a/lib/Target/Mips/MipsFastISel.cpp +++ b/lib/Target/Mips/MipsFastISel.cpp @@ -1660,7 +1660,7 @@ bool MipsFastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) { if (!MTI->getLength()->getType()->isIntegerTy(32)) return false; const char *IntrMemName = isa(II) ? "memcpy" : "memmove"; - return lowerCallTo(cast(II), IntrMemName, II->getNumArgOperands() - 1); + return lowerCallTo(II, IntrMemName, II->getNumArgOperands() - 1); } case Intrinsic::memset: { const MemSetInst *MSI = cast(II); @@ -1669,7 +1669,7 @@ bool MipsFastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) { return false; if (!MSI->getLength()->getType()->isIntegerTy(32)) return false; - return lowerCallTo(cast(II), "memset", II->getNumArgOperands() - 1); + return lowerCallTo(II, "memset", II->getNumArgOperands() - 1); } } return false; diff --git a/lib/Target/X86/X86FastISel.cpp b/lib/Target/X86/X86FastISel.cpp index e41f6ee4b6e..bd08af81e67 100644 --- a/lib/Target/X86/X86FastISel.cpp +++ b/lib/Target/X86/X86FastISel.cpp @@ -2738,7 +2738,7 @@ bool X86FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) { if (MCI->getSourceAddressSpace() > 255 || MCI->getDestAddressSpace() > 255) return false; - return lowerCallTo(cast(II), "memcpy", II->getNumArgOperands() - 1); + return lowerCallTo(II, "memcpy", II->getNumArgOperands() - 1); } case Intrinsic::memset: { const MemSetInst *MSI = cast(II); @@ -2753,7 +2753,7 @@ bool X86FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) { if (MSI->getDestAddressSpace() > 255) return false; - return lowerCallTo(cast(II), "memset", II->getNumArgOperands() - 1); + return lowerCallTo(II, "memset", II->getNumArgOperands() - 1); } case Intrinsic::stackprotector: { // Emit code to store the stack guard onto the stack. diff --git a/lib/Transforms/InstCombine/InstCombineInternal.h b/lib/Transforms/InstCombine/InstCombineInternal.h index b8ba0f7c59b..15152bb6f11 100644 --- a/lib/Transforms/InstCombine/InstCombineInternal.h +++ b/lib/Transforms/InstCombine/InstCombineInternal.h @@ -140,7 +140,6 @@ public: Instruction *visitAddrSpaceCast(AddrSpaceCastInst &CI); Instruction *foldItoFPtoI(CastInst &FI); Instruction *visitSelectInst(SelectInst &SI); - Instruction *visitCallBase(CallBase &Call); Instruction *visitCallInst(CallInst &CI); Instruction *visitInvokeInst(InvokeInst &II); Instruction *visitCallBrInst(CallBrInst &CBI); @@ -223,6 +222,7 @@ private: Instruction &CtxI, Value *&OperationResult, Constant *&OverflowResult); + Instruction *visitCallBase(CallBase &Call); Instruction *tryOptimizeCall(CallInst *CI); bool transformConstExprCastCall(CallBase &Call); Instruction *transformCallThroughTrampoline(CallBase &Call, diff --git a/lib/Transforms/Instrumentation/MemorySanitizer.cpp b/lib/Transforms/Instrumentation/MemorySanitizer.cpp index caf6f29c02e..3e4fae586aa 100644 --- a/lib/Transforms/Instrumentation/MemorySanitizer.cpp +++ b/lib/Transforms/Instrumentation/MemorySanitizer.cpp @@ -4142,7 +4142,7 @@ struct VarArgAMD64Helper : public VarArgHelper { Value *VAArgTLSOriginCopy = nullptr; Value *VAArgOverflowSize = nullptr; - SmallVector VAStartInstrumentationList; + SmallVector VAStartInstrumentationList; enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory }; @@ -4351,7 +4351,7 @@ struct VarArgAMD64Helper : public VarArgHelper { // Instrument va_start. // Copy va_list shadow from the backup copy of the TLS contents. for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) { - auto *OrigInst = VAStartInstrumentationList[i]; + CallInst *OrigInst = VAStartInstrumentationList[i]; IRBuilder<> IRB(OrigInst->getNextNode()); Value *VAListTag = OrigInst->getArgOperand(0); @@ -4405,7 +4405,7 @@ struct VarArgMIPS64Helper : public VarArgHelper { Value *VAArgTLSCopy = nullptr; Value *VAArgSize = nullptr; - SmallVector VAStartInstrumentationList; + SmallVector VAStartInstrumentationList; VarArgMIPS64Helper(Function &F, MemorySanitizer &MS, MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {} @@ -4494,7 +4494,7 @@ struct VarArgMIPS64Helper : public VarArgHelper { // Instrument va_start. // Copy va_list shadow from the backup copy of the TLS contents. for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) { - auto *OrigInst = VAStartInstrumentationList[i]; + CallInst *OrigInst = VAStartInstrumentationList[i]; IRBuilder<> IRB(OrigInst->getNextNode()); Value *VAListTag = OrigInst->getArgOperand(0); Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C); @@ -4533,7 +4533,7 @@ struct VarArgAArch64Helper : public VarArgHelper { Value *VAArgTLSCopy = nullptr; Value *VAArgOverflowSize = nullptr; - SmallVector VAStartInstrumentationList; + SmallVector VAStartInstrumentationList; enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory }; @@ -4688,7 +4688,7 @@ struct VarArgAArch64Helper : public VarArgHelper { // Instrument va_start, copy va_list shadow from the backup copy of // the TLS contents. for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) { - auto *OrigInst = VAStartInstrumentationList[i]; + CallInst *OrigInst = VAStartInstrumentationList[i]; IRBuilder<> IRB(OrigInst->getNextNode()); Value *VAListTag = OrigInst->getArgOperand(0); @@ -4783,7 +4783,7 @@ struct VarArgPowerPC64Helper : public VarArgHelper { Value *VAArgTLSCopy = nullptr; Value *VAArgSize = nullptr; - SmallVector VAStartInstrumentationList; + SmallVector VAStartInstrumentationList; VarArgPowerPC64Helper(Function &F, MemorySanitizer &MS, MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {} @@ -4932,7 +4932,7 @@ struct VarArgPowerPC64Helper : public VarArgHelper { // Instrument va_start. // Copy va_list shadow from the backup copy of the TLS contents. for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) { - auto *OrigInst = VAStartInstrumentationList[i]; + CallInst *OrigInst = VAStartInstrumentationList[i]; IRBuilder<> IRB(OrigInst->getNextNode()); Value *VAListTag = OrigInst->getArgOperand(0); Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C); @@ -4972,7 +4972,7 @@ struct VarArgSystemZHelper : public VarArgHelper { Value *VAArgTLSOriginCopy = nullptr; Value *VAArgOverflowSize = nullptr; - SmallVector VAStartInstrumentationList; + SmallVector VAStartInstrumentationList; enum class ArgKind { GeneralPurpose, @@ -5255,7 +5255,7 @@ struct VarArgSystemZHelper : public VarArgHelper { // Copy va_list shadow from the backup copy of the TLS contents. for (size_t VaStartNo = 0, VaStartNum = VAStartInstrumentationList.size(); VaStartNo < VaStartNum; VaStartNo++) { - auto *OrigInst = VAStartInstrumentationList[VaStartNo]; + CallInst *OrigInst = VAStartInstrumentationList[VaStartNo]; IRBuilder<> IRB(OrigInst->getNextNode()); Value *VAListTag = OrigInst->getArgOperand(0); copyRegSaveArea(IRB, VAListTag);