mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-22 18:54:02 +01:00
Do not call replaceAllUsesWith to upgrade calls to ARC runtime functions
to intrinsic calls This fixes a bug in r368311. It turns out that the ARC runtime functions in the IR can have pointer parameter types that are not i8* or i8**. Instead of RAUWing normal functions with intrinsics, manually bitcast the arguments before passing them to the intrinsic functions and bitcast the return value back to the type of the original call instruction. This recommits r368634, which was reverted in r368637. The loop in the patch was iterating over uses of a function and deleting function calls inside it, which caused bots to crash. rdar://problem/54125406 Differential Revision: https://reviews.llvm.org/D66047 llvm-svn: 368646
This commit is contained in:
parent
e62b2c77ca
commit
3381f8fbc6
@ -3855,6 +3855,8 @@ bool llvm::UpgradeRetainReleaseMarker(Module &M) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void llvm::UpgradeARCRuntimeCalls(Module &M) {
|
void llvm::UpgradeARCRuntimeCalls(Module &M) {
|
||||||
|
// This lambda converts normal function calls to ARC runtime functions to
|
||||||
|
// intrinsic calls.
|
||||||
auto UpgradeToIntrinsic = [&](const char *OldFunc,
|
auto UpgradeToIntrinsic = [&](const char *OldFunc,
|
||||||
llvm::Intrinsic::ID IntrinsicFunc) {
|
llvm::Intrinsic::ID IntrinsicFunc) {
|
||||||
Function *Fn = M.getFunction(OldFunc);
|
Function *Fn = M.getFunction(OldFunc);
|
||||||
@ -3863,11 +3865,44 @@ void llvm::UpgradeARCRuntimeCalls(Module &M) {
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
Function *NewFn = llvm::Intrinsic::getDeclaration(&M, IntrinsicFunc);
|
Function *NewFn = llvm::Intrinsic::getDeclaration(&M, IntrinsicFunc);
|
||||||
Fn->replaceAllUsesWith(NewFn);
|
|
||||||
Fn->eraseFromParent();
|
for (auto I = Fn->user_begin(), E = Fn->user_end(); I != E;) {
|
||||||
|
CallInst *CI = dyn_cast<CallInst>(*I++);
|
||||||
|
if (!CI || CI->getCalledFunction() != Fn)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
IRBuilder<> Builder(CI->getParent(), CI->getIterator());
|
||||||
|
FunctionType *NewFuncTy = NewFn->getFunctionType();
|
||||||
|
SmallVector<Value *, 2> Args;
|
||||||
|
|
||||||
|
for (unsigned I = 0, E = CI->getNumArgOperands(); I != E; ++I) {
|
||||||
|
Value *Arg = CI->getArgOperand(I);
|
||||||
|
// Bitcast argument to the parameter type of the new function if it's
|
||||||
|
// not a variadic argument.
|
||||||
|
if (I < NewFuncTy->getNumParams())
|
||||||
|
Arg = Builder.CreateBitCast(Arg, NewFuncTy->getParamType(I));
|
||||||
|
Args.push_back(Arg);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a call instruction that calls the new function.
|
||||||
|
CallInst *NewCall = Builder.CreateCall(NewFuncTy, NewFn, Args);
|
||||||
|
NewCall->setTailCallKind(cast<CallInst>(CI)->getTailCallKind());
|
||||||
|
NewCall->setName(CI->getName());
|
||||||
|
|
||||||
|
// Bitcast the return value back to the type of the old call.
|
||||||
|
Value *NewRetVal = Builder.CreateBitCast(NewCall, CI->getType());
|
||||||
|
|
||||||
|
if (!CI->use_empty())
|
||||||
|
CI->replaceAllUsesWith(NewRetVal);
|
||||||
|
CI->eraseFromParent();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Fn->use_empty())
|
||||||
|
Fn->eraseFromParent();
|
||||||
};
|
};
|
||||||
|
|
||||||
// Unconditionally convert "clang.arc.use" to "llvm.objc.clang.arc.use".
|
// Unconditionally convert a call to "clang.arc.use" to a call to
|
||||||
|
// "llvm.objc.clang.arc.use".
|
||||||
UpgradeToIntrinsic("clang.arc.use", llvm::Intrinsic::objc_clang_arc_use);
|
UpgradeToIntrinsic("clang.arc.use", llvm::Intrinsic::objc_clang_arc_use);
|
||||||
|
|
||||||
// Return if the bitcode doesn't have the arm64 retainAutoreleasedReturnValue
|
// Return if the bitcode doesn't have the arm64 retainAutoreleasedReturnValue
|
||||||
|
Binary file not shown.
@ -8,14 +8,62 @@
|
|||||||
; RUN: llvm-dis < %S/upgrade-arc-runtime-calls.bc | FileCheck -check-prefixes=ARC %s
|
; RUN: llvm-dis < %S/upgrade-arc-runtime-calls.bc | FileCheck -check-prefixes=ARC %s
|
||||||
; RUN: llvm-dis < %S/upgrade-mrr-runtime-calls.bc | FileCheck -check-prefixes=MRR %s
|
; RUN: llvm-dis < %S/upgrade-mrr-runtime-calls.bc | FileCheck -check-prefixes=MRR %s
|
||||||
|
|
||||||
// ARC: define void @testRuntimeCalls(i8* %[[A:.*]], i8** %[[B:.*]], i8** %[[C:.*]]) {
|
define void @testRuntimeCalls(i8* %a, i8** %b, i8** %c, i32* %d, i32** %e) personality i32 (...)* @__gxx_personality_v0 {
|
||||||
|
entry:
|
||||||
|
%v0 = tail call i8* @objc_autorelease(i8* %a) #0
|
||||||
|
tail call void @objc_autoreleasePoolPop(i8* %a) #0
|
||||||
|
%v1 = tail call i8* @objc_autoreleasePoolPush() #0
|
||||||
|
%v2 = tail call i8* @objc_autoreleaseReturnValue(i8* %a) #0
|
||||||
|
tail call void @objc_copyWeak(i8** %b, i8** %c) #0
|
||||||
|
tail call void @objc_destroyWeak(i8** %b) #0
|
||||||
|
%v3 = tail call i32* @objc_initWeak(i32** %e, i32* %d) #0
|
||||||
|
%v4 = tail call i8* @objc_loadWeak(i8** %b) #0
|
||||||
|
%v5 = tail call i8* @objc_loadWeakRetained(i8** %b) #0
|
||||||
|
tail call void @objc_moveWeak(i8** %b, i8** %c) #0
|
||||||
|
tail call void @objc_release(i8* %a) #0
|
||||||
|
%v6 = tail call i8* @objc_retain(i8* %a) #0
|
||||||
|
%v7 = tail call i8* @objc_retainAutorelease(i8* %a) #0
|
||||||
|
%v8 = tail call i8* @objc_retainAutoreleaseReturnValue(i8* %a) #0
|
||||||
|
%v9 = tail call i8* @objc_retainAutoreleasedReturnValue(i8* %a) #0
|
||||||
|
%v10 = tail call i8* @objc_retainBlock(i8* %a) #0
|
||||||
|
tail call void @objc_storeStrong(i8** %b, i8* %a) #0
|
||||||
|
%v11 = tail call i8* @objc_storeWeak(i8** %b, i8* %a) #0
|
||||||
|
tail call void (...) @clang.arc.use(i8* %a) #0
|
||||||
|
%v12 = tail call i8* @objc_unsafeClaimAutoreleasedReturnValue(i8* %a) #0
|
||||||
|
%v13 = tail call i8* @objc_retainedObject(i8* %a) #0
|
||||||
|
%v14 = tail call i8* @objc_unretainedObject(i8* %a) #0
|
||||||
|
%v15 = tail call i8* @objc_unretainedPointer(i8* %a) #0
|
||||||
|
%v16 = tail call i8* @objc_retain.autorelease(i8* %a) #0
|
||||||
|
%v17 = tail call i32 @objc_sync.enter(i8* %a) #0
|
||||||
|
%v18 = tail call i32 @objc_sync.exit(i8* %a) #0
|
||||||
|
tail call void @objc_arc_annotation_topdown_bbstart(i8** %b, i8** %c) #0
|
||||||
|
tail call void @objc_arc_annotation_topdown_bbend(i8** %b, i8** %c) #0
|
||||||
|
tail call void @objc_arc_annotation_bottomup_bbstart(i8** %b, i8** %c) #0
|
||||||
|
tail call void @objc_arc_annotation_bottomup_bbend(i8** %b, i8** %c) #0
|
||||||
|
invoke void @objc_autoreleasePoolPop(i8* %a)
|
||||||
|
to label %normalBlock unwind label %unwindBlock
|
||||||
|
normalBlock:
|
||||||
|
ret void
|
||||||
|
unwindBlock:
|
||||||
|
%ll = landingpad { i8*, i32 }
|
||||||
|
cleanup
|
||||||
|
ret void
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that auto-upgrader converts function calls to intrinsic calls. Note that
|
||||||
|
// the auto-upgrader doesn't touch invoke instructions.
|
||||||
|
|
||||||
|
// ARC: define void @testRuntimeCalls(i8* %[[A:.*]], i8** %[[B:.*]], i8** %[[C:.*]], i32* %[[D:.*]], i32** %[[E:.*]]) personality
|
||||||
// ARC: %[[V0:.*]] = tail call i8* @llvm.objc.autorelease(i8* %[[A]])
|
// ARC: %[[V0:.*]] = tail call i8* @llvm.objc.autorelease(i8* %[[A]])
|
||||||
// ARC-NEXT: tail call void @llvm.objc.autoreleasePoolPop(i8* %[[A]])
|
// ARC-NEXT: tail call void @llvm.objc.autoreleasePoolPop(i8* %[[A]])
|
||||||
// ARC-NEXT: %[[V1:.*]] = tail call i8* @llvm.objc.autoreleasePoolPush()
|
// ARC-NEXT: %[[V1:.*]] = tail call i8* @llvm.objc.autoreleasePoolPush()
|
||||||
// ARC-NEXT: %[[V2:.*]] = tail call i8* @llvm.objc.autoreleaseReturnValue(i8* %[[A]])
|
// ARC-NEXT: %[[V2:.*]] = tail call i8* @llvm.objc.autoreleaseReturnValue(i8* %[[A]])
|
||||||
// ARC-NEXT: tail call void @llvm.objc.copyWeak(i8** %[[B]], i8** %[[C]])
|
// ARC-NEXT: tail call void @llvm.objc.copyWeak(i8** %[[B]], i8** %[[C]])
|
||||||
// ARC-NEXT: tail call void @llvm.objc.destroyWeak(i8** %[[B]])
|
// ARC-NEXT: tail call void @llvm.objc.destroyWeak(i8** %[[B]])
|
||||||
// ARC-NEXT: %[[V3:.*]] = tail call i8* @llvm.objc.initWeak(i8** %[[B]], i8* %[[A]])
|
// ARC-NEXT: %[[V100:.*]] = bitcast i32** %[[E]] to i8**
|
||||||
|
// ARC-NEXT: %[[V101:.*]] = bitcast i32* %[[D]] to i8*
|
||||||
|
// ARC-NEXT: %[[V102:.*]] = tail call i8* @llvm.objc.initWeak(i8** %[[V100]], i8* %[[V101]])
|
||||||
|
// ARC-NEXT: %[[V103:.*]] = bitcast i8* %[[V102]] to i32*
|
||||||
// ARC-NEXT: %[[V4:.*]] = tail call i8* @llvm.objc.loadWeak(i8** %[[B]])
|
// ARC-NEXT: %[[V4:.*]] = tail call i8* @llvm.objc.loadWeak(i8** %[[B]])
|
||||||
// ARC-NEXT: %[[V5:.*]] = tail call i8* @llvm.objc.loadWeakRetained(i8** %[[B]])
|
// ARC-NEXT: %[[V5:.*]] = tail call i8* @llvm.objc.loadWeakRetained(i8** %[[B]])
|
||||||
// ARC-NEXT: tail call void @llvm.objc.moveWeak(i8** %[[B]], i8** %[[C]])
|
// ARC-NEXT: tail call void @llvm.objc.moveWeak(i8** %[[B]], i8** %[[C]])
|
||||||
@ -39,16 +87,16 @@
|
|||||||
// ARC-NEXT: tail call void @llvm.objc.arc.annotation.topdown.bbend(i8** %[[B]], i8** %[[C]])
|
// ARC-NEXT: tail call void @llvm.objc.arc.annotation.topdown.bbend(i8** %[[B]], i8** %[[C]])
|
||||||
// ARC-NEXT: tail call void @llvm.objc.arc.annotation.bottomup.bbstart(i8** %[[B]], i8** %[[C]])
|
// ARC-NEXT: tail call void @llvm.objc.arc.annotation.bottomup.bbstart(i8** %[[B]], i8** %[[C]])
|
||||||
// ARC-NEXT: tail call void @llvm.objc.arc.annotation.bottomup.bbend(i8** %[[B]], i8** %[[C]])
|
// ARC-NEXT: tail call void @llvm.objc.arc.annotation.bottomup.bbend(i8** %[[B]], i8** %[[C]])
|
||||||
// ARC-NEXT: ret void
|
// ARC-NEXT: invoke void @objc_autoreleasePoolPop(i8* %[[A]])
|
||||||
|
|
||||||
// MRR: define void @testRuntimeCalls(i8* %[[A:.*]], i8** %[[B:.*]], i8** %[[C:.*]]) {
|
// MRR: define void @testRuntimeCalls(i8* %[[A:.*]], i8** %[[B:.*]], i8** %[[C:.*]], i32* %[[D:.*]], i32** %[[E:.*]]) personality
|
||||||
// MRR: %[[V0:.*]] = tail call i8* @objc_autorelease(i8* %[[A]])
|
// MRR: %[[V0:.*]] = tail call i8* @objc_autorelease(i8* %[[A]])
|
||||||
// MRR-NEXT: tail call void @objc_autoreleasePoolPop(i8* %[[A]])
|
// MRR-NEXT: tail call void @objc_autoreleasePoolPop(i8* %[[A]])
|
||||||
// MRR-NEXT: %[[V1:.*]] = tail call i8* @objc_autoreleasePoolPush()
|
// MRR-NEXT: %[[V1:.*]] = tail call i8* @objc_autoreleasePoolPush()
|
||||||
// MRR-NEXT: %[[V2:.*]] = tail call i8* @objc_autoreleaseReturnValue(i8* %[[A]])
|
// MRR-NEXT: %[[V2:.*]] = tail call i8* @objc_autoreleaseReturnValue(i8* %[[A]])
|
||||||
// MRR-NEXT: tail call void @objc_copyWeak(i8** %[[B]], i8** %[[C]])
|
// MRR-NEXT: tail call void @objc_copyWeak(i8** %[[B]], i8** %[[C]])
|
||||||
// MRR-NEXT: tail call void @objc_destroyWeak(i8** %[[B]])
|
// MRR-NEXT: tail call void @objc_destroyWeak(i8** %[[B]])
|
||||||
// MRR-NEXT: %[[V3:.*]] = tail call i8* @objc_initWeak(i8** %[[B]], i8* %[[A]])
|
// MRR-NEXT: %[[V3:.*]] = tail call i32* @objc_initWeak(i32** %[[E]], i32* %[[D]])
|
||||||
// MRR-NEXT: %[[V4:.*]] = tail call i8* @objc_loadWeak(i8** %[[B]])
|
// MRR-NEXT: %[[V4:.*]] = tail call i8* @objc_loadWeak(i8** %[[B]])
|
||||||
// MRR-NEXT: %[[V5:.*]] = tail call i8* @objc_loadWeakRetained(i8** %[[B]])
|
// MRR-NEXT: %[[V5:.*]] = tail call i8* @objc_loadWeakRetained(i8** %[[B]])
|
||||||
// MRR-NEXT: tail call void @objc_moveWeak(i8** %[[B]], i8** %[[C]])
|
// MRR-NEXT: tail call void @objc_moveWeak(i8** %[[B]], i8** %[[C]])
|
||||||
@ -72,4 +120,4 @@
|
|||||||
// MRR-NEXT: tail call void @objc_arc_annotation_topdown_bbend(i8** %[[B]], i8** %[[C]])
|
// MRR-NEXT: tail call void @objc_arc_annotation_topdown_bbend(i8** %[[B]], i8** %[[C]])
|
||||||
// MRR-NEXT: tail call void @objc_arc_annotation_bottomup_bbstart(i8** %[[B]], i8** %[[C]])
|
// MRR-NEXT: tail call void @objc_arc_annotation_bottomup_bbstart(i8** %[[B]], i8** %[[C]])
|
||||||
// MRR-NEXT: tail call void @objc_arc_annotation_bottomup_bbend(i8** %[[B]], i8** %[[C]])
|
// MRR-NEXT: tail call void @objc_arc_annotation_bottomup_bbend(i8** %[[B]], i8** %[[C]])
|
||||||
// MRR-NEXT: ret void
|
// MRR-NEXT: invoke void @objc_autoreleasePoolPop(i8* %[[A]])
|
||||||
|
Binary file not shown.
Loading…
Reference in New Issue
Block a user