1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-23 03:02:36 +01:00

Rename hasVolatileMemoryRef() to hasOrderedMemoryRef().

Ordered memory operations are more constrained than volatile loads and
stores because they must be ordered with respect to all other memory
operations.

llvm-svn: 162861
This commit is contained in:
Jakob Stoklund Olesen 2012-08-29 21:19:21 +00:00
parent 0841c7b2e7
commit 50309198d1
4 changed files with 16 additions and 16 deletions

View File

@ -857,11 +857,11 @@ public:
bool isSafeToReMat(const TargetInstrInfo *TII, AliasAnalysis *AA,
unsigned DstReg) const;
/// hasVolatileMemoryRef - Return true if this instruction may have a
/// volatile memory reference, or if the information describing the
/// memory reference is not available. Return false if it is known to
/// have no volatile memory references.
bool hasVolatileMemoryRef() const;
/// hasOrderedMemoryRef - Return true if this instruction may have an ordered
/// or volatile memory reference, or if the information describing the memory
/// reference is not available. Return false if it is known to have no
/// ordered or volatile memory references.
bool hasOrderedMemoryRef() const;
/// isInvariantLoad - Return true if this instruction is loading from a
/// location whose value is invariant across the function. For example,

View File

@ -1348,7 +1348,7 @@ bool MachineInstr::isSafeToMove(const TargetInstrInfo *TII,
// volatiles, but it is required for atomic loads. It is now allowed to move
// a load across an atomic load with Ordering > Monotonic.
if (mayStore() || isCall() ||
(mayLoad() && hasVolatileMemoryRef())) {
(mayLoad() && hasOrderedMemoryRef())) {
SawStore = true;
return false;
}
@ -1396,11 +1396,11 @@ bool MachineInstr::isSafeToReMat(const TargetInstrInfo *TII,
return true;
}
/// hasVolatileMemoryRef - Return true if this instruction may have a
/// volatile memory reference, or if the information describing the
/// memory reference is not available. Return false if it is known to
/// have no volatile memory references.
bool MachineInstr::hasVolatileMemoryRef() const {
/// hasOrderedMemoryRef - Return true if this instruction may have an ordered
/// or volatile memory reference, or if the information describing the memory
/// reference is not available. Return false if it is known to have no ordered
/// memory references.
bool MachineInstr::hasOrderedMemoryRef() const {
// An instruction known never to access memory won't have a volatile access.
if (!mayStore() &&
!mayLoad() &&
@ -1413,9 +1413,9 @@ bool MachineInstr::hasVolatileMemoryRef() const {
if (memoperands_empty())
return true;
// Check the memory reference information for volatile references.
// Check the memory reference information for ordered references.
for (mmo_iterator I = memoperands_begin(), E = memoperands_end(); I != E; ++I)
if ((*I)->isVolatile())
if (!(*I)->isUnordered())
return true;
return false;

View File

@ -502,7 +502,7 @@ void ScheduleDAGInstrs::addVRegUseDeps(SUnit *SU, unsigned OperIdx) {
/// (like a call or something with unmodeled side effects).
static inline bool isGlobalMemoryObject(AliasAnalysis *AA, MachineInstr *MI) {
if (MI->isCall() || MI->hasUnmodeledSideEffects() ||
(MI->hasVolatileMemoryRef() &&
(MI->hasOrderedMemoryRef() &&
(!MI->mayLoad() || !MI->isInvariantLoad(AA))))
return true;
return false;

View File

@ -3474,8 +3474,8 @@ bool HexagonPacketizerList::isLegalToPacketizeTogether(SUnit *SUI, SUnit *SUJ) {
// 1. Two loads unless they are volatile.
// 2. Two stores in V4 unless they are volatile.
else if ((DepType == SDep::Order) &&
!I->hasVolatileMemoryRef() &&
!J->hasVolatileMemoryRef()) {
!I->hasOrderedMemoryRef() &&
!J->hasOrderedMemoryRef()) {
if (QRI->Subtarget.hasV4TOps() &&
// hexagonv4 allows dual store.
MCIDI.mayStore() && MCIDJ.mayStore()) {