mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-01 08:23:21 +01:00
fcbd79805b
The normal dataflow sequence in the ARC optimizer consists of the following states: Retain -> CanRelease -> Use -> Release The optimizer before this patch stored the uses that determine the lifetime of the retainable object pointer when it bottom up hits a retain or when top down it hits a release. This is correct for an imprecise lifetime scenario since what we are trying to do is remove retains/releases while making sure that no ``CanRelease'' (which is usually a call) deallocates the given pointer before we get to the ``Use'' (since that would cause a segfault). If we are considering the precise lifetime scenario though, this is not correct. In such a situation, we *DO* care about the previous sequence, but additionally, we wish to track the uses resulting from the following incomplete sequences: Retain -> CanRelease -> Release (TopDown) Retain <- Use <- Release (BottomUp) *NOTE* This patch looks large but the most of it consists of updating test cases. Additionally this fix exposed an additional bug. I removed the test case that expressed said bug and will recommit it with the fix in a little bit. llvm-svn: 178921
216 lines
7.8 KiB
LLVM
216 lines
7.8 KiB
LLVM
; RUN: opt -S -objc-arc < %s | FileCheck %s
|
|
|
|
declare i8* @objc_retain(i8*) nonlazybind
|
|
declare void @objc_release(i8*) nonlazybind
|
|
declare i8* @objc_retainBlock(i8*)
|
|
|
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
; Use by an instruction which copies the value is an escape if the ;
|
|
; result is an escape. The current instructions with this property are: ;
|
|
; ;
|
|
; 1. BitCast. ;
|
|
; 2. GEP. ;
|
|
; 3. PhiNode. ;
|
|
; 4. SelectInst. ;
|
|
; ;
|
|
; Make sure that such instructions do not confuse the optimizer into removing ;
|
|
; an objc_retainBlock that is needed. ;
|
|
; ;
|
|
; rdar://13273675. (With extra test cases to handle bitcast, phi, and select. ;
|
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
|
|
define void @bitcasttest(i8* %storage, void (...)* %block) {
|
|
; CHECK: define void @bitcasttest
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %storage to void (...)**
|
|
%t5 = bitcast i8* %t3 to void (...)*
|
|
store void (...)* %t5, void (...)** %t4, align 8
|
|
; CHECK: call void @objc_release
|
|
call void @objc_release(i8* %t1)
|
|
ret void
|
|
; CHECK: }
|
|
}
|
|
|
|
define void @bitcasttest_a(i8* %storage, void (...)* %block) {
|
|
; CHECK: define void @bitcasttest_a
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK-NOT: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %storage to void (...)**
|
|
%t5 = bitcast i8* %t3 to void (...)*
|
|
store void (...)* %t5, void (...)** %t4, align 8
|
|
; CHECK-NOT: call void @objc_release
|
|
call void @objc_release(i8* %t1), !clang.imprecise_release !0
|
|
ret void
|
|
; CHECK: }
|
|
}
|
|
|
|
define void @geptest(void (...)** %storage_array, void (...)* %block) {
|
|
; CHECK: define void @geptest
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %t3 to void (...)*
|
|
|
|
%storage = getelementptr inbounds void (...)** %storage_array, i64 0
|
|
|
|
store void (...)* %t4, void (...)** %storage, align 8
|
|
; CHECK: call void @objc_release
|
|
call void @objc_release(i8* %t1)
|
|
ret void
|
|
; CHECK: }
|
|
}
|
|
|
|
define void @geptest_a(void (...)** %storage_array, void (...)* %block) {
|
|
; CHECK: define void @geptest_a
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK-NOT: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %t3 to void (...)*
|
|
|
|
%storage = getelementptr inbounds void (...)** %storage_array, i64 0
|
|
|
|
store void (...)* %t4, void (...)** %storage, align 8
|
|
; CHECK-NOT: call void @objc_release
|
|
call void @objc_release(i8* %t1), !clang.imprecise_release !0
|
|
ret void
|
|
; CHECK: }
|
|
}
|
|
|
|
define void @selecttest(void (...)** %store1, void (...)** %store2,
|
|
void (...)* %block) {
|
|
; CHECK: define void @selecttest
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %t3 to void (...)*
|
|
%store = select i1 undef, void (...)** %store1, void (...)** %store2
|
|
store void (...)* %t4, void (...)** %store, align 8
|
|
; CHECK: call void @objc_release
|
|
call void @objc_release(i8* %t1)
|
|
ret void
|
|
; CHECK: }
|
|
}
|
|
|
|
define void @selecttest_a(void (...)** %store1, void (...)** %store2,
|
|
void (...)* %block) {
|
|
; CHECK: define void @selecttest_a
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK-NOT: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %t3 to void (...)*
|
|
%store = select i1 undef, void (...)** %store1, void (...)** %store2
|
|
store void (...)* %t4, void (...)** %store, align 8
|
|
; CHECK-NOT: call void @objc_release
|
|
call void @objc_release(i8* %t1), !clang.imprecise_release !0
|
|
ret void
|
|
; CHECK: }
|
|
}
|
|
|
|
define void @phinodetest(void (...)** %storage1,
|
|
void (...)** %storage2,
|
|
void (...)* %block) {
|
|
; CHECK: define void @phinodetest
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %t3 to void (...)*
|
|
br i1 undef, label %store1_set, label %store2_set
|
|
; CHECK: store1_set:
|
|
|
|
store1_set:
|
|
br label %end
|
|
|
|
store2_set:
|
|
br label %end
|
|
|
|
end:
|
|
; CHECK: end:
|
|
%storage = phi void (...)** [ %storage1, %store1_set ], [ %storage2, %store2_set]
|
|
store void (...)* %t4, void (...)** %storage, align 8
|
|
; CHECK: call void @objc_release
|
|
call void @objc_release(i8* %t1)
|
|
ret void
|
|
; CHECK: }
|
|
}
|
|
|
|
define void @phinodetest_a(void (...)** %storage1,
|
|
void (...)** %storage2,
|
|
void (...)* %block) {
|
|
; CHECK: define void @phinodetest_a
|
|
entry:
|
|
%t1 = bitcast void (...)* %block to i8*
|
|
; CHECK-NOT: tail call i8* @objc_retain
|
|
%t2 = tail call i8* @objc_retain(i8* %t1)
|
|
; CHECK: tail call i8* @objc_retainBlock
|
|
%t3 = tail call i8* @objc_retainBlock(i8* %t1), !clang.arc.copy_on_escape !0
|
|
%t4 = bitcast i8* %t3 to void (...)*
|
|
br i1 undef, label %store1_set, label %store2_set
|
|
|
|
store1_set:
|
|
br label %end
|
|
|
|
store2_set:
|
|
br label %end
|
|
|
|
end:
|
|
%storage = phi void (...)** [ %storage1, %store1_set ], [ %storage2, %store2_set]
|
|
store void (...)* %t4, void (...)** %storage, align 8
|
|
; CHECK-NOT: call void @objc_release
|
|
call void @objc_release(i8* %t1), !clang.imprecise_release !0
|
|
ret void
|
|
}
|
|
|
|
|
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
; This test makes sure that we do not hang clang when visiting a use ;
|
|
; cycle caused by phi nodes during objc-arc analysis. *NOTE* This ;
|
|
; test case looks a little convoluted since it was produced by ;
|
|
; bugpoint. ;
|
|
; ;
|
|
; bugzilla://14551 ;
|
|
; rdar://12851911 ;
|
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
|
|
define void @phinode_use_cycle(i8* %block) uwtable optsize ssp {
|
|
; CHECK: define void @phinode_use_cycle(i8* %block)
|
|
entry:
|
|
br label %for.body
|
|
|
|
for.body: ; preds = %if.then, %for.body, %entry
|
|
%block.05 = phi void (...)* [ null, %entry ], [ %1, %if.then ], [ %block.05, %for.body ]
|
|
br i1 undef, label %for.body, label %if.then
|
|
|
|
if.then: ; preds = %for.body
|
|
%0 = call i8* @objc_retainBlock(i8* %block), !clang.arc.copy_on_escape !0
|
|
%1 = bitcast i8* %0 to void (...)*
|
|
%2 = bitcast void (...)* %block.05 to i8*
|
|
call void @objc_release(i8* %2) nounwind, !clang.imprecise_release !0
|
|
br label %for.body
|
|
}
|
|
|
|
!0 = metadata !{}
|