1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2025-01-31 20:51:52 +01:00

[Statepoints] Disable VReg lowering for values used on exception path of invoke.

Currently we lower invokes the same way as usual calls, e.g.:

V1 = STATEPOINT ... V (tied-def 0)

But this is incorrect is V1 is used on exceptional path.
By LLVM rules V1 neither dominates its uses in landing pad, nor
its live range is live on entry to landing pad. So compiler is
allowed to do various weird transformations like splitting live
range after statepoint and use split LR in catch block.

Until (and if) we find better solution to this problem, let's
use old lowering (spilling) for those values which are used on
exceptional path and allow VReg lowering for values used only
on normal path.

Differential Revision: https://reviews.llvm.org/D93449
This commit is contained in:
Denis Antrushin 2020-12-17 16:36:57 +07:00
parent 52b2d862f9
commit ff5b573256
2 changed files with 47 additions and 22 deletions

View File

@ -546,6 +546,18 @@ lowerStatepointMetaArgs(SmallVectorImpl<SDValue> &Ops,
// Decide which deriver pointers will go on VRegs
unsigned MaxVRegPtrs = MaxRegistersForGCPointers.getValue();
// Pointers used on exceptional path of invoke statepoint.
// We cannot assing them to VRegs.
SmallSet<SDValue, 8> LPadPointers;
if (auto *StInvoke = dyn_cast_or_null<InvokeInst>(SI.StatepointInstr)) {
LandingPadInst *LPI = StInvoke->getLandingPadInst();
for (auto *Relocate : SI.GCRelocates)
if (Relocate->getOperand(0) == LPI) {
LPadPointers.insert(Builder.getValue(Relocate->getBasePtr()));
LPadPointers.insert(Builder.getValue(Relocate->getDerivedPtr()));
}
}
LLVM_DEBUG(dbgs() << "Deciding how to lower GC Pointers:\n");
// List of unique lowered GC Pointer values.
@ -555,6 +567,14 @@ lowerStatepointMetaArgs(SmallVectorImpl<SDValue> &Ops,
unsigned CurNumVRegs = 0;
auto canPassGCPtrOnVReg = [&](SDValue SD) {
if (SD.getValueType().isVector())
return false;
if (LPadPointers.count(SD))
return false;
return !willLowerDirectly(SD);
};
auto processGCPtr = [&](const Value *V) {
SDValue PtrSD = Builder.getValue(V);
if (!LoweredGCPtrs.insert(PtrSD))
@ -564,7 +584,9 @@ lowerStatepointMetaArgs(SmallVectorImpl<SDValue> &Ops,
assert(!LowerAsVReg.count(PtrSD) && "must not have been seen");
if (LowerAsVReg.size() == MaxVRegPtrs)
return;
if (willLowerDirectly(PtrSD) || V->getType()->isVectorTy()) {
assert(V->getType()->isVectorTy() == PtrSD.getValueType().isVector() &&
"IR and SD types disagree");
if (!canPassGCPtrOnVReg(PtrSD)) {
LLVM_DEBUG(dbgs() << "direct/spill "; PtrSD.dump(&Builder.DAG));
return;
}

View File

@ -10,14 +10,16 @@ declare dso_local i32* @personality_function()
define i64 addrspace(1)* @test_basic_invoke(i64 addrspace(1)* %obj, i64 addrspace(1)* %obj1)
; CHECK-LABEL: name: test_basic_invoke
; CHECK: bb.0.entry:
; CHECK: renamable $r14, renamable $rbx = STATEPOINT 0, 0, 1, @some_call, $rdi, 2, 0, 2, 0, 2, 5, 2, 0, 2, -1, 2, 0, 2, 0, 2, 0, 2, 2, killed renamable $r14(tied-def 0), killed renamable $rbx(tied-def 1), 2, 0, 2, 2, 0, 0, 1, 1, csr_64, implicit-def $rsp, implicit-def $ssp
; CHECK: MOV64mr %stack.1, 1, $noreg, 0, $noreg, renamable $rdi :: (store 8 into %stack.1)
; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, killed renamable $rsi :: (store 8 into %stack.0)
; CHECK: STATEPOINT 0, 0, 1, @some_call, $rdi, 2, 0, 2, 0, 2, 5, 2, 0, 2, -1, 2, 0, 2, 0, 2, 0, 2, 2, 1, 8, %stack.0, 0, 1, 8, %stack.1, 0, 2, 0, 2, 2, 0, 0, 1, 1, csr_64, implicit-def $rsp, implicit-def $ssp :: (volatile load store 8 on %stack.0), (volatile load store 8 on %stack.1)
; CHECK: JMP_1 %bb.1
; CHECK: bb.1.safepoint_normal_dest:
; CHECK: renamable $rax = MOV64rm %stack.1, 1, $noreg, 0, $noreg :: (load 8 from %stack.1)
; CHECK: bb.2.normal_return:
; CHECK: $rax = COPY killed renamable $rbx
; CHECK: RET 0, $rax
; CHECK: bb.3.exceptional_return (landing-pad):
; CHECK: $rax = COPY killed renamable $r14
; CHECK: renamable $rax = MOV64rm %stack.0, 1, $noreg, 0, $noreg :: (load 8 from %stack.0)
; CHECK: RET 0, $rax
gc "statepoint-example" personality i32* ()* @"personality_function" {
entry:
@ -43,35 +45,37 @@ exceptional_return:
define i64 addrspace(1)* @test_invoke_same_val(i1 %cond, i64 addrspace(1)* %val1, i64 addrspace(1)* %val2, i64 addrspace(1)* %val3)
; CHECK-LABEL: name: test_invoke_same_val
; CHECK: bb.0.entry:
; CHECK: renamable $r15 = COPY $rcx
; CHECK: renamable $rbx = COPY $rdx
; CHECK: renamable $rbp = COPY $rsi
; CHECK: renamable $rbx = COPY $rcx
; CHECK: renamable $rbp = COPY $rdx
; CHECK: renamable $r14d = COPY $edi
; CHECK: TEST8ri renamable $r14b, 1, implicit-def $eflags
; CHECK: JCC_1 %bb.3, 4, implicit killed $eflags
; CHECK: JMP_1 %bb.1
; CHECK: bb.1.left:
; CHECK: $rdi = COPY renamable $rbp
; CHECK: renamable $rbx, renamable $rbp = STATEPOINT 0, 0, 1, @some_call, $rdi, 2, 0, 2, 0, 2, 0, 2, 2, killed renamable $rbx(tied-def 0), killed renamable $rbp(tied-def 1), 2, 0, 2, 2, 0, 0, 1, 1, csr_64, implicit-def $rsp, implicit-def $ssp
; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, renamable $rsi :: (store 8 into %stack.0)
; CHECK: $rdi = COPY killed renamable $rsi
; CHECK: renamable $rbp = STATEPOINT 0, 0, 1, @some_call, $rdi, 2, 0, 2, 0, 2, 0, 2, 2, killed renamable $rbp(tied-def 0), 1, 8, %stack.0, 0, 2, 0, 2, 2, 0, 0, 1, 1, csr_64, implicit-def $rsp, implicit-def $ssp :: (volatile load store 8 on %stack.0)
; CHECK: JMP_1 %bb.2
; CHECK: bb.2.left.relocs:
; CHECK: renamable $rbx = MOV64rm %stack.0, 1, $noreg, 0, $noreg :: (load 8 from %stack.0)
; CHECK: JMP_1 %bb.5
; CHECK: bb.3.right:
; CHECK: $rdi = COPY killed renamable $rbp
; CHECK: renamable $r15, renamable $rbx = STATEPOINT 0, 0, 1, @some_call, $rdi, 2, 0, 2, 0, 2, 0, 2, 2, killed renamable $r15(tied-def 0), killed renamable $rbx(tied-def 1), 2, 0, 2, 2, 0, 0, 1, 1, csr_64, implicit-def $rsp, implicit-def $ssp
; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, killed renamable $rbp :: (store 8 into %stack.0)
; CHECK: $rdi = COPY killed renamable $rsi
; CHECK: renamable $rbx = STATEPOINT 0, 0, 1, @some_call, $rdi, 2, 0, 2, 0, 2, 0, 2, 2, killed renamable $rbx(tied-def 0), 1, 8, %stack.0, 0, 2, 0, 2, 2, 0, 0, 1, 1, csr_64, implicit-def $rsp, implicit-def $ssp :: (volatile load store 8 on %stack.0)
; CHECK: JMP_1 %bb.4
; CHECK: bb.4.right.relocs:
; CHECK: renamable $rbp = COPY killed renamable $r15
; CHECK: renamable $rbp = MOV64rm %stack.0, 1, $noreg, 0, $noreg :: (load 8 from %stack.0)
; CHECK: bb.5.normal_return:
; CHECK: TEST8ri renamable $r14b, 1, implicit-def $eflags, implicit killed $r14d
; CHECK: renamable $rbp = CMOV64rr killed renamable $rbp, killed renamable $rbx, 4, implicit killed $eflags
; CHECK: $rax = COPY killed renamable $rbp
; CHECK: renamable $rbx = CMOV64rr killed renamable $rbx, killed renamable $rbp, 4, implicit killed $eflags
; CHECK: $rax = COPY killed renamable $rbx
; CHECK: RET 0, $rax
; CHECK: bb.6.exceptional_return.left (landing-pad):
; CHECK: $rax = COPY killed renamable $rbp
; CHECK: renamable $rax = MOV64rm %stack.0, 1, $noreg, 0, $noreg :: (load 8 from %stack.0)
; CHECK: RET 0, $rax
; CHECK: bb.7.exceptional_return.right (landing-pad):
; CHECK: $rax = COPY killed renamable $rbx
; CHECK: renamable $rax = MOV64rm %stack.0, 1, $noreg, 0, $noreg :: (load 8 from %stack.0)
; CHECK: RET 0, $rax
gc "statepoint-example" personality i32* ()* @"personality_function" {
entry:
@ -117,17 +121,16 @@ exceptional_return.right:
define void @test_duplicate_ir_values() gc "statepoint-example" personality i32* ()* @personality_function {
; CHECK-LABEL: name: test_duplicate_ir_values
; CHECK: bb.0.entry:
; CHECK: renamable $rbx = MOV64rm undef renamable $rax, 1, $noreg, 0, $noreg :: (load 8 from `i8 addrspace(1)* addrspace(1)* undef`, addrspace 1)
; CHECK: renamable $rbx = STATEPOINT 1, 16, 5, undef renamable $rax, undef $edi, undef $rsi, undef $edx, undef $ecx, undef $r8d, 2, 0, 2, 0, 2, 0, 2, 1, killed renamable $rbx(tied-def 0), 2, 0, 2, 1, 0, 0, csr_64, implicit-def $rsp, implicit-def $ssp, implicit-def dead $eax
; CHECK: renamable $rax = MOV64rm undef renamable $rax, 1, $noreg, 0, $noreg :: (load 8 from `i8 addrspace(1)* addrspace(1)* undef`, addrspace 1)
; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, killed renamable $rax :: (store 8 into %stack.0)
; CHECK: STATEPOINT 1, 16, 5, undef renamable $rax, undef $edi, undef $rsi, undef $edx, undef $ecx, undef $r8d, 2, 0, 2, 0, 2, 0, 2, 1, 1, 8, %stack.0, 0, 2, 0, 2, 1, 0, 0, csr_64, implicit-def $rsp, implicit-def $ssp, implicit-def dead $eax :: (volatile load store 8 on %stack.0)
; CHECK: JMP_1 %bb.1
; CHECK: bb.1.normal_continue:
; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, killed renamable $rbx :: (store 8 into %stack.0)
; CHECK: $edi = MOV32ri 10
; CHECK: STATEPOINT 2882400000, 0, 1, target-flags(x86-plt) @__llvm_deoptimize, killed $edi, 2, 0, 2, 2, 2, 2, 1, 8, %stack.0, 0, 1, 8, %stack.0, 0, 2, 0, 2, 0, 2, 0, csr_64, implicit-def $rsp, implicit-def $ssp :: (volatile load store 8 on %stack.0)
; CHECK: STATEPOINT 2882400000, 0, 1, target-flags(x86-plt) @__llvm_deoptimize, killed $edi, 2, 0, 2, 2, 2, 2, 1, 8, %stack.0, 0, 1, 8, %stack.0, 0, 2, 0, 2, 0, 2, 0, csr_64, implicit-def $rsp, implicit-def $ssp
; CHECK: bb.2.exceptional_return (landing-pad):
; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, killed renamable $rbx :: (store 8 into %stack.0)
; CHECK: $edi = MOV32ri -271
; CHECK: STATEPOINT 2882400000, 0, 1, target-flags(x86-plt) @__llvm_deoptimize, killed $edi, 2, 0, 2, 0, 2, 1, 1, 8, %stack.0, 0, 2, 0, 2, 0, 2, 0, csr_64, implicit-def $rsp, implicit-def $ssp :: (volatile load store 8 on %stack.0)
; CHECK: STATEPOINT 2882400000, 0, 1, target-flags(x86-plt) @__llvm_deoptimize, killed $edi, 2, 0, 2, 0, 2, 1, 1, 8, %stack.0, 0, 2, 0, 2, 0, 2, 0, csr_64, implicit-def $rsp, implicit-def $ssp
entry:
%val1 = load i8 addrspace(1)*, i8 addrspace(1)* addrspace(1)* undef, align 8
%val2 = load i8 addrspace(1)*, i8 addrspace(1)* addrspace(1)* undef, align 8