1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-18 10:32:48 +02:00

[LLVM IR] Allow volatile stores to trap.

Proposed alternative to D105338.

This is ugly, but short-term I think it's the best way forward: first,
let's formalize the hacks into a coherent model. Then we can consider
extensions of that model (we could have different flavors of volatile
with different rules).

Differential Revision: https://reviews.llvm.org/D106309
This commit is contained in:
Eli Friedman 2021-07-26 10:51:00 -07:00
parent d0d4c1578a
commit 2211738092
8 changed files with 18 additions and 22 deletions

View File

@ -2867,6 +2867,12 @@ The compiler may assume execution will continue after a volatile operation,
so operations which modify memory or may have undefined behavior can be so operations which modify memory or may have undefined behavior can be
hoisted past a volatile operation. hoisted past a volatile operation.
As an exception to the preceding rule, the compiler may not assume execution
will continue after a volatile store operation. This restriction is necessary
to support the somewhat common pattern in C of intentionally storing to an
invalid pointer to crash the program. In the future, it might make sense to
allow frontends to control this behavior.
IR-level volatile loads and stores cannot safely be optimized into llvm.memcpy IR-level volatile loads and stores cannot safely be optimized into llvm.memcpy
or llvm.memmove intrinsics even when those intrinsics are flagged volatile. or llvm.memmove intrinsics even when those intrinsics are flagged volatile.
Likewise, the backend should never split or merge target-legal volatile Likewise, the backend should never split or merge target-legal volatile

View File

@ -5274,6 +5274,10 @@ bool llvm::isGuaranteedToTransferExecutionToSuccessor(const Instruction *I) {
if (isa<UnreachableInst>(I)) if (isa<UnreachableInst>(I))
return false; return false;
// Note: Do not add new checks here; instead, change Instruction::mayThrow or
// Instruction::willReturn.
//
// FIXME: Move this check into Instruction::willReturn.
if (isa<CatchPadInst>(I)) { if (isa<CatchPadInst>(I)) {
switch (classifyEHPersonality(I->getFunction()->getPersonalityFn())) { switch (classifyEHPersonality(I->getFunction()->getPersonalityFn())) {
default: default:

View File

@ -673,6 +673,10 @@ bool Instruction::isSafeToRemove() const {
} }
bool Instruction::willReturn() const { bool Instruction::willReturn() const {
// Volatile store isn't guaranteed to return; see LangRef.
if (auto *SI = dyn_cast<StoreInst>(this))
return !SI->isVolatile();
if (const auto *CB = dyn_cast<CallBase>(this)) if (const auto *CB = dyn_cast<CallBase>(this))
// FIXME: Temporarily assume that all side-effect free intrinsics will // FIXME: Temporarily assume that all side-effect free intrinsics will
// return. Remove this workaround once all intrinsics are appropriately // return. Remove this workaround once all intrinsics are appropriately

View File

@ -2928,14 +2928,6 @@ Instruction *InstCombinerImpl::visitUnreachableInst(UnreachableInst &I) {
// Otherwise, this instruction can be freely erased, // Otherwise, this instruction can be freely erased,
// even if it is not side-effect free. // even if it is not side-effect free.
// Temporarily disable removal of volatile stores preceding unreachable,
// pending a potential LangRef change permitting volatile stores to trap.
// TODO: Either remove this code, or properly integrate the check into
// isGuaranteedToTransferExecutionToSuccessor().
if (auto *SI = dyn_cast<StoreInst>(Prev))
if (SI->isVolatile())
return nullptr; // Can not drop this instruction. We're done here.
// A value may still have uses before we process it here (for example, in // A value may still have uses before we process it here (for example, in
// another unreachable block), so convert those to poison. // another unreachable block), so convert those to poison.
replaceInstUsesWith(*Prev, PoisonValue::get(Prev->getType())); replaceInstUsesWith(*Prev, PoisonValue::get(Prev->getType()));

View File

@ -4699,14 +4699,6 @@ bool SimplifyCFGOpt::simplifyUnreachable(UnreachableInst *UI) {
// Otherwise, this instruction can be freely erased, // Otherwise, this instruction can be freely erased,
// even if it is not side-effect free. // even if it is not side-effect free.
// Temporarily disable removal of volatile stores preceding unreachable,
// pending a potential LangRef change permitting volatile stores to trap.
// TODO: Either remove this code, or properly integrate the check into
// isGuaranteedToTransferExecutionToSuccessor().
if (auto *SI = dyn_cast<StoreInst>(&*BBI))
if (SI->isVolatile())
break; // Can not drop this instruction. We're done here.
// Note that deleting EH's here is in fact okay, although it involves a bit // Note that deleting EH's here is in fact okay, although it involves a bit
// of subtle reasoning. If this inst is an EH, all the predecessors of this // of subtle reasoning. If this inst is an EH, all the predecessors of this
// block will be the unwind edges of Invoke/CatchSwitch/CleanupReturn, // block will be the unwind edges of Invoke/CatchSwitch/CleanupReturn,

View File

@ -161,7 +161,7 @@ define void @store_unordered(i32* nocapture %0) norecurse nounwind uwtable {
; negative, should not deduce nosync ; negative, should not deduce nosync
; atomic load with release ordering ; atomic load with release ordering
define void @load_release(i32* nocapture %0) norecurse nounwind uwtable { define void @load_release(i32* nocapture %0) norecurse nounwind uwtable {
; CHECK: Function Attrs: mustprogress nofree norecurse nounwind uwtable willreturn ; CHECK: Function Attrs: nofree norecurse nounwind uwtable
; CHECK-LABEL: @load_release( ; CHECK-LABEL: @load_release(
; CHECK-NEXT: store atomic volatile i32 10, i32* [[TMP0:%.*]] release, align 4 ; CHECK-NEXT: store atomic volatile i32 10, i32* [[TMP0:%.*]] release, align 4
; CHECK-NEXT: ret void ; CHECK-NEXT: ret void
@ -172,7 +172,7 @@ define void @load_release(i32* nocapture %0) norecurse nounwind uwtable {
; negative volatile, relaxed atomic ; negative volatile, relaxed atomic
define void @load_volatile_release(i32* nocapture %0) norecurse nounwind uwtable { define void @load_volatile_release(i32* nocapture %0) norecurse nounwind uwtable {
; CHECK: Function Attrs: mustprogress nofree norecurse nounwind uwtable willreturn ; CHECK: Function Attrs: nofree norecurse nounwind uwtable
; CHECK-LABEL: @load_volatile_release( ; CHECK-LABEL: @load_volatile_release(
; CHECK-NEXT: store atomic volatile i32 10, i32* [[TMP0:%.*]] release, align 4 ; CHECK-NEXT: store atomic volatile i32 10, i32* [[TMP0:%.*]] release, align 4
; CHECK-NEXT: ret void ; CHECK-NEXT: ret void
@ -183,7 +183,7 @@ define void @load_volatile_release(i32* nocapture %0) norecurse nounwind uwtable
; volatile store. ; volatile store.
define void @volatile_store(i32* %0) norecurse nounwind uwtable { define void @volatile_store(i32* %0) norecurse nounwind uwtable {
; CHECK: Function Attrs: mustprogress nofree norecurse nounwind uwtable willreturn ; CHECK: Function Attrs: nofree norecurse nounwind uwtable
; CHECK-LABEL: @volatile_store( ; CHECK-LABEL: @volatile_store(
; CHECK-NEXT: store volatile i32 14, i32* [[TMP0:%.*]], align 4 ; CHECK-NEXT: store volatile i32 14, i32* [[TMP0:%.*]], align 4
; CHECK-NEXT: ret void ; CHECK-NEXT: ret void

View File

@ -68,8 +68,6 @@ bb16: ; preds = %bb13
bb17: ; preds = %bb16, %bb13 bb17: ; preds = %bb16, %bb13
%i18 = load volatile i32, i32* @c, align 4, !dbg !57, !tbaa !40 %i18 = load volatile i32, i32* @c, align 4, !dbg !57, !tbaa !40
%i19 = add nsw i32 %i18, 1, !dbg !57
store volatile i32 %i19, i32* @c, align 4, !dbg !57, !tbaa !40
%i20 = load volatile i32, i32* @c, align 4, !dbg !46, !tbaa !40 %i20 = load volatile i32, i32* @c, align 4, !dbg !46, !tbaa !40
%i21 = icmp slt i32 %i20, 6, !dbg !47 %i21 = icmp slt i32 %i20, 6, !dbg !47
br i1 %i21, label %bb13, label %bb22, !dbg !48, !llvm.loop !58 br i1 %i21, label %bb13, label %bb22, !dbg !48, !llvm.loop !58

View File

@ -34,7 +34,7 @@ define void @caller1(i1 %c, i64* align 1 %ptr) {
; ASSUMPTIONS-ON-LABEL: @caller1( ; ASSUMPTIONS-ON-LABEL: @caller1(
; ASSUMPTIONS-ON-NEXT: br i1 [[C:%.*]], label [[COMMON_RET:%.*]], label [[FALSE1:%.*]] ; ASSUMPTIONS-ON-NEXT: br i1 [[C:%.*]], label [[COMMON_RET:%.*]], label [[FALSE1:%.*]]
; ASSUMPTIONS-ON: false1: ; ASSUMPTIONS-ON: false1:
; ASSUMPTIONS-ON-NEXT: store volatile i64 1, i64* [[PTR:%.*]], align 8 ; ASSUMPTIONS-ON-NEXT: store volatile i64 1, i64* [[PTR:%.*]], align 4
; ASSUMPTIONS-ON-NEXT: br label [[COMMON_RET]] ; ASSUMPTIONS-ON-NEXT: br label [[COMMON_RET]]
; ASSUMPTIONS-ON: common.ret: ; ASSUMPTIONS-ON: common.ret:
; ASSUMPTIONS-ON-NEXT: [[DOTSINK:%.*]] = phi i64 [ 3, [[FALSE1]] ], [ 2, [[TMP0:%.*]] ] ; ASSUMPTIONS-ON-NEXT: [[DOTSINK:%.*]] = phi i64 [ 3, [[FALSE1]] ], [ 2, [[TMP0:%.*]] ]