1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-25 20:23:11 +01:00

Revert "Temporarily do not drop volatile stores before unreachable"

This reverts commit 4e413e16216d0c94ada2171f3c59e0a85f4fa4b6,
which landed almost 10 months ago under premise that the original behavior
didn't match reality and was breaking users, even though it was correct as per
the LangRef. But the LangRef change still hasn't appeared, which might suggest
that the affected parties aren't really worried about this problem.

Please refer to discussion in:
* https://reviews.llvm.org/D87399 (`Revert "[InstCombine] erase instructions leading up to unreachable"`)
* https://reviews.llvm.org/D53184 (`[LangRef] Clarify semantics of volatile operations.`)
* https://reviews.llvm.org/D87149 (`[InstCombine] erase instructions leading up to unreachable`)

clang has `-Wnull-dereference` which will diagnose the obvious cases
of null dereference, it was adjusted in f4877c78c0fc98be47b926439bbfe33d5e1d1b6d,
but it will only catch the cases where the pointer is a null literal,
it will not catch the cases where an arbitrary store is expected to trap.

Differential Revision: https://reviews.llvm.org/D105338
This commit is contained in:
Roman Lebedev 2021-07-09 13:36:21 +03:00
parent eeea5b4c4a
commit ff629c6563
6 changed files with 36 additions and 72 deletions

View File

@ -2888,14 +2888,6 @@ Instruction *InstCombinerImpl::visitUnreachableInst(UnreachableInst &I) {
// Otherwise, this instruction can be freely erased,
// even if it is not side-effect free.
// Temporarily disable removal of volatile stores preceding unreachable,
// pending a potential LangRef change permitting volatile stores to trap.
// TODO: Either remove this code, or properly integrate the check into
// isGuaranteedToTransferExecutionToSuccessor().
if (auto *SI = dyn_cast<StoreInst>(Prev))
if (SI->isVolatile())
return nullptr; // Can not drop this instruction. We're done here.
// A value may still have uses before we process it here (for example, in
// another unreachable block), so convert those to poison.
replaceInstUsesWith(*Prev, PoisonValue::get(Prev->getType()));

View File

@ -2297,9 +2297,6 @@ static bool markAliveBlocks(Function &F,
// that they should be changed to unreachable by passes that can't
// modify the CFG.
// Don't touch volatile stores.
if (SI->isVolatile()) continue;
Value *Ptr = SI->getOperand(1);
if (isa<UndefValue>(Ptr) ||

View File

@ -4672,14 +4672,6 @@ bool SimplifyCFGOpt::simplifyUnreachable(UnreachableInst *UI) {
// Otherwise, this instruction can be freely erased,
// even if it is not side-effect free.
// Temporarily disable removal of volatile stores preceding unreachable,
// pending a potential LangRef change permitting volatile stores to trap.
// TODO: Either remove this code, or properly integrate the check into
// isGuaranteedToTransferExecutionToSuccessor().
if (auto *SI = dyn_cast<StoreInst>(&*BBI))
if (SI->isVolatile())
break; // Can not drop this instruction. We're done here.
// Note that deleting EH's here is in fact okay, although it involves a bit
// of subtle reasoning. If this inst is an EH, all the predecessors of this
// block will be the unwind edges of Invoke/CatchSwitch/CleanupReturn,

View File

@ -3,51 +3,38 @@
; NUM-COUNT-3: endbr64
;SJLJ: main: # @main
;SJLJ-NEXT: .Lfunc_begin0:
;SJLJ-NEXT: # %bb.0: # %entry
;SJLJ-NEXT: endbr64
;SJLJ-NEXT: pushq %rbp
;SJLJ: callq _Unwind_SjLj_Register
;SJLJ-NEXT: .Ltmp0:
;SJLJ-NEXT: callq _Z3foov
;SJLJ-NEXT: .Ltmp1:
;SJLJ-NEXT: # %bb.1: # %invoke.cont
;SJLJ-NEXT: movl
;SJLJ-NEXT: .LBB0_7: # %return
;SJLJ: callq _Unwind_SjLj_Unregister
;SJLJ: retq
;SJLJ-NEXT: .LBB0_9:
;SJLJ-NEXT: endbr64
;SJLJ-NEXT: movl
;SJLJ-NEXT: cmpl
;SJLJ-NEXT: jb .LBB0_10
;SJLJ-NEXT: # %bb.11:
;SJLJ-NEXT: ud2
;SJLJ-NEXT: .LBB0_10:
;SJLJ-NEXT: leaq .LJTI0_0(%rip), %rcx
;SJLJ-NEXT: jmpq *(%rcx,%rax,8)
;SJLJ-NEXT: .LBB0_2: # %lpad
;SJLJ-NEXT: .Ltmp2:
;SJLJ-NEXT: endbr64
;SJLJ: jne .LBB0_4
;SJLJ-NEXT: # %bb.3: # %catch3
;SJLJ: callq __cxa_begin_catch
;SJLJ: jmp .LBB0_6
;SJLJ-NEXT: .LBB0_4: # %catch.fallthrough
;SJLJ-NEXT: cmpl
;SJLJ-NEXT: jne .LBB0_8
;SJLJ-NEXT: # %bb.5: # %catch
;SJLJ: callq __cxa_begin_catch
;SJLJ: cmpb
;SJLJ-NEXT: .LBB0_6: # %return
;SJLJ: callq __cxa_end_catch
;SJLJ-NEXT: jmp .LBB0_7
;SJLJ-NEXT: .LBB0_8: # %eh.resume
;SJLJ-NEXT: movl
;SJLJ-NEXT: .Lfunc_end0:
;SJLJ: .LJTI0_0:
;SJLJ-NEXT: .quad .LBB0_2
; SJLJ-LABEL: main:
; SJLJ: # %bb.0: # %entry
; SJLJ-NEXT: endbr64
; SJLJ: callq _Unwind_SjLj_Register@PLT
; SJLJ-NEXT: .Ltmp0:
; SJLJ-NEXT: callq _Z3foov
; SJLJ-NEXT: .Ltmp1:
; SJLJ-NEXT: # %bb.1: # %invoke.cont
; SJLJ: .LBB0_6: # %return
; SJLJ: callq _Unwind_SjLj_Unregister@PLT
; SJLJ: retq
; SJLJ-NEXT: .LBB0_7:
; SJLJ-NEXT: endbr64
; SJLJ: jb .LBB0_8
; SJLJ-NEXT: # %bb.9:
; SJLJ-NEXT: ud2
; SJLJ-NEXT: .LBB0_8:
; SJLJ: jmpq *(%rcx,%rax,8)
; SJLJ-NEXT: .LBB0_2: # %lpad
; SJLJ-NEXT: .Ltmp2:
; SJLJ-NEXT: endbr64
; SJLJ: jne .LBB0_4
; SJLJ-NEXT: # %bb.3: # %catch3
; SJLJ: callq __cxa_begin_catch
; SJLJ: jmp .LBB0_5
; SJLJ-NEXT: .LBB0_4: # %catch
; SJLJ: callq __cxa_begin_catch
; SJLJ: cmpb $3, %al
; SJLJ-NEXT: .LBB0_5: # %return
; SJLJ-NEXT: setne %cl
; SJLJ: callq __cxa_end_catch
; SJLJ-NEXT: jmp .LBB0_6
@_ZTIi = external dso_local constant i8*
@_ZTIc = external dso_local constant i8*

View File

@ -25,7 +25,6 @@ define void @volatile_store_before_unreachable(i1 %c, i8* %p) {
; CHECK-LABEL: @volatile_store_before_unreachable(
; CHECK-NEXT: br i1 [[C:%.*]], label [[TRUE:%.*]], label [[FALSE:%.*]]
; CHECK: true:
; CHECK-NEXT: store volatile i8 0, i8* [[P:%.*]], align 1
; CHECK-NEXT: unreachable
; CHECK: false:
; CHECK-NEXT: ret void

View File

@ -76,8 +76,8 @@ entry:
define void @test3() nounwind {
; CHECK-LABEL: @test3(
; CHECK-NEXT: entry:
; CHECK-NEXT: store volatile i32 4, i32* null, align 4
; CHECK-NEXT: ret void
; CHECK-NEXT: call void @llvm.trap()
; CHECK-NEXT: unreachable
;
entry:
store volatile i32 4, i32* null
@ -101,11 +101,8 @@ entry:
define void @test4(i1 %C, i32* %P) {
; CHECK-LABEL: @test4(
; CHECK-NEXT: entry:
; CHECK-NEXT: br i1 [[C:%.*]], label [[T:%.*]], label [[F:%.*]]
; CHECK: T:
; CHECK-NEXT: store volatile i32 0, i32* [[P:%.*]], align 4
; CHECK-NEXT: unreachable
; CHECK: F:
; CHECK-NEXT: [[TMP0:%.*]] = xor i1 [[C:%.*]], true
; CHECK-NEXT: call void @llvm.assume(i1 [[TMP0]])
; CHECK-NEXT: ret void
;
entry: