Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions llvm/include/llvm/CodeGen/MachineInstr.h
Original file line number Diff line number Diff line change
Expand Up @@ -1774,6 +1774,9 @@ class MachineInstr
/// ordered or volatile memory references.
LLVM_ABI bool hasOrderedMemoryRef() const;

/// Like hasOrderedMemoryRef, but allows for same-address ordering.
LLVM_ABI bool hasDifferentAddressOrderedMemoryRef() const;

/// Return true if this load instruction never traps and points to a memory
/// location whose value doesn't change during the execution of this function.
///
Expand Down
10 changes: 10 additions & 0 deletions llvm/include/llvm/CodeGen/MachineMemOperand.h
Original file line number Diff line number Diff line change
Expand Up @@ -318,6 +318,16 @@ class MachineMemOperand {
!isVolatile();
}

// Return true if the only ordering constraint on this operation is
// same-address ordering -- basically the same as isUnordered(), but allow
// Monotonic as well.
bool isDifferentAddressUnordered() const {
return (getSuccessOrdering() == AtomicOrdering::NotAtomic ||
getSuccessOrdering() == AtomicOrdering::Unordered ||
getSuccessOrdering() == AtomicOrdering::Monotonic) &&
!isVolatile();
}

/// Update this MachineMemOperand to reflect the alignment of MMO, if it has a
/// greater alignment. This must only be used when the new alignment applies
/// to all users of this MachineMemOperand.
Expand Down
18 changes: 18 additions & 0 deletions llvm/lib/CodeGen/MachineInstr.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1587,6 +1587,24 @@ bool MachineInstr::hasOrderedMemoryRef() const {
});
}

/// hasDifferentAddressOrderedMemoryRef - Like hasOrderedMemoryRef, but allows
/// same address orderings.
bool MachineInstr::hasDifferentAddressOrderedMemoryRef() const {
// An instruction known never to access memory won't have a volatile access.
if (!mayStore() && !mayLoad() && !isCall() && !hasUnmodeledSideEffects())
return false;

// Otherwise, if the instruction has no memory reference information,
// conservatively assume it wasn't preserved.
if (memoperands_empty())
return true;

// Check if any of our memory operands are ordered.
return llvm::any_of(memoperands(), [](const MachineMemOperand *MMO) {
return !MMO->isDifferentAddressUnordered();
});
}

/// isDereferenceableInvariantLoad - Return true if this instruction will never
/// trap and is loading from a location whose value is invariant across a run of
/// this function.
Expand Down
2 changes: 1 addition & 1 deletion llvm/lib/Target/AArch64/AArch64InstrInfo.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2919,7 +2919,7 @@ bool AArch64InstrInfo::isCandidateToMergeOrPair(const MachineInstr &MI) const {
bool IsPreLdSt = isPreLdSt(MI);

// If this is a volatile load/store, don't mess with it.
if (MI.hasOrderedMemoryRef())
if (MI.hasDifferentAddressOrderedMemoryRef())
return false;

// Make sure this is a reg/fi+imm (as opposed to an address reloc).
Expand Down
6 changes: 3 additions & 3 deletions llvm/lib/Target/AArch64/AArch64LoadStoreOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1631,11 +1631,11 @@ static bool areCandidatesToMergeOrPair(MachineInstr &FirstMI, MachineInstr &MI,
LdStPairFlags &Flags,
const AArch64InstrInfo *TII) {
// If this is volatile or if pairing is suppressed, not a candidate.
if (MI.hasOrderedMemoryRef() || TII->isLdStPairSuppressed(MI))
if (MI.hasDifferentAddressOrderedMemoryRef() || TII->isLdStPairSuppressed(MI))
return false;

// We should have already checked FirstMI for pair suppression and volatility.
assert(!FirstMI.hasOrderedMemoryRef() &&
assert(!FirstMI.hasDifferentAddressOrderedMemoryRef() &&
!TII->isLdStPairSuppressed(FirstMI) &&
"FirstMI shouldn't get here if either of these checks are true.");

Expand Down Expand Up @@ -2744,7 +2744,7 @@ bool AArch64LoadStoreOpt::tryToPromoteLoadFromStore(
MachineBasicBlock::iterator &MBBI) {
MachineInstr &MI = *MBBI;
// If this is a volatile load, don't mess with it.
if (MI.hasOrderedMemoryRef())
if (MI.hasDifferentAddressOrderedMemoryRef())
return false;

if (needsWinCFI(MI.getMF()) && MI.getFlag(MachineInstr::FrameDestroy))
Expand Down
14 changes: 14 additions & 0 deletions llvm/test/CodeGen/AArch64/ldst-opt.ll
Original file line number Diff line number Diff line change
Expand Up @@ -1697,3 +1697,17 @@ define void @trunc_splat(ptr %ptr) {
store <2 x i16> <i16 42, i16 42>, ptr %ptr, align 4
ret void
}

; CHECK-LABEL: pair_monotonic
; CHECK: ldp x{{[0-9]+}}, x{{[0-9]+}}, [x{{[0-9]+}}]
; CHECK: stp x{{[0-9]+}}, x{{[0-9]+}}, [x{{[0-9]+}}]
define void @pair_monotonic(ptr %i, ptr %o) {
entry:
%0 = load atomic i64, ptr %i monotonic, align 8
%hi = getelementptr inbounds nuw i8, ptr %i, i64 8
%1 = load atomic i64, ptr %hi monotonic, align 8
store atomic i64 %0, ptr %o monotonic, align 8
%hi5 = getelementptr inbounds nuw i8, ptr %o, i64 8
store atomic i64 %1, ptr %hi5 monotonic, align 8
ret void
}
Loading