diff --git a/llvm/include/llvm/Target/TargetSelectionDAG.td b/llvm/include/llvm/Target/TargetSelectionDAG.td index a807ce267aacf..bec025fc5add4 100644 --- a/llvm/include/llvm/Target/TargetSelectionDAG.td +++ b/llvm/include/llvm/Target/TargetSelectionDAG.td @@ -1852,6 +1852,13 @@ def atomic_load_sext : let IsSignExtLoad = true; } +/// Atomic load which any extends the excess high bits. +def atomic_load_aext : + PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> { + let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic? + let IsAnyExtLoad = true; +} + def atomic_load_8 : PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> { @@ -1891,6 +1898,12 @@ def atomic_load_zext_16 : let MemoryVT = i16; } +def atomic_load_zext_32 : + PatFrag<(ops node:$ptr), (atomic_load_zext node:$ptr)> { + let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic? + let MemoryVT = i32; +} + def atomic_load_sext_8 : PatFrag<(ops node:$ptr), (atomic_load_sext node:$ptr)> { let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic? @@ -1903,15 +1916,54 @@ def atomic_load_sext_16 : let MemoryVT = i16; } +def atomic_load_sext_32 : + PatFrag<(ops node:$ptr), (atomic_load_sext node:$ptr)> { + let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic? + let MemoryVT = i32; +} + +def atomic_load_aext_8 : + PatFrag<(ops node:$ptr), (atomic_load_aext node:$ptr)> { + let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic? + let MemoryVT = i8; +} + +def atomic_load_aext_16 : + PatFrag<(ops node:$ptr), (atomic_load_aext node:$ptr)> { + let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic? + let MemoryVT = i16; +} + +def atomic_load_aext_32 : + PatFrag<(ops node:$ptr), (atomic_load_aext node:$ptr)> { + let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic? + let MemoryVT = i32; +} + // Atomic load which zeroes or anyextends the high bits. -def atomic_load_az_8 : PatFrags<(ops node:$op), - [(atomic_load_8 node:$op), - (atomic_load_zext_8 node:$op)]>; +def atomic_load_azext_8 : PatFrags<(ops node:$op), + [(atomic_load_aext_8 node:$op), + (atomic_load_zext_8 node:$op)]>; // Atomic load which zeroes or anyextends the high bits. -def atomic_load_az_16 : PatFrags<(ops node:$op), - [(atomic_load_16 node:$op), - (atomic_load_zext_16 node:$op)]>; +def atomic_load_azext_16 : PatFrags<(ops node:$op), + [(atomic_load_aext_16 node:$op), + (atomic_load_zext_16 node:$op)]>; + +// Atomic load which sign extends or anyextends the high bits. +def atomic_load_asext_8 : PatFrags<(ops node:$op), + [(atomic_load_aext_8 node:$op), + (atomic_load_sext_8 node:$op)]>; + +// Atomic load which sign extends or anyextends the high bits. +def atomic_load_asext_16 : PatFrags<(ops node:$op), + [(atomic_load_aext_16 node:$op), + (atomic_load_sext_16 node:$op)]>; + +// Atomic load which sign extends or anyextends the high bits. +def atomic_load_asext_32 : PatFrags<(ops node:$op), + [(atomic_load_aext_32 node:$op), + (atomic_load_sext_32 node:$op)]>; def nonext_masked_gather : PatFrag<(ops node:$def, node:$pred, node:$ptr, node:$idx), diff --git a/llvm/lib/Target/AArch64/AArch64InstrAtomics.td b/llvm/lib/Target/AArch64/AArch64InstrAtomics.td index 2d7a9d6f00bd0..a5f7b384b3e5d 100644 --- a/llvm/lib/Target/AArch64/AArch64InstrAtomics.td +++ b/llvm/lib/Target/AArch64/AArch64InstrAtomics.td @@ -61,34 +61,34 @@ let Predicates = [HasRCPC] in { } // 8-bit loads -def : Pat<(seq_cst_load GPR64sp:$ptr), (LDARB GPR64sp:$ptr)>; -def : Pat<(acquiring_load GPR64sp:$ptr), (LDARB GPR64sp:$ptr)>; -def : Pat<(relaxed_load (ro_Windexed8 GPR64sp:$Rn, GPR32:$Rm, - ro_Wextend8:$offset)), +def : Pat<(seq_cst_load GPR64sp:$ptr), (LDARB GPR64sp:$ptr)>; +def : Pat<(acquiring_load GPR64sp:$ptr), (LDARB GPR64sp:$ptr)>; +def : Pat<(relaxed_load (ro_Windexed8 GPR64sp:$Rn, GPR32:$Rm, + ro_Wextend8:$offset)), (LDRBBroW GPR64sp:$Rn, GPR32:$Rm, ro_Wextend8:$offset)>; -def : Pat<(relaxed_load (ro_Xindexed8 GPR64sp:$Rn, GPR64:$Rm, - ro_Xextend8:$offset)), +def : Pat<(relaxed_load (ro_Xindexed8 GPR64sp:$Rn, GPR64:$Rm, + ro_Xextend8:$offset)), (LDRBBroX GPR64sp:$Rn, GPR64:$Rm, ro_Xextend8:$offset)>; -def : Pat<(relaxed_load (am_indexed8 GPR64sp:$Rn, - uimm12s1:$offset)), +def : Pat<(relaxed_load (am_indexed8 GPR64sp:$Rn, + uimm12s1:$offset)), (LDRBBui GPR64sp:$Rn, uimm12s1:$offset)>; -def : Pat<(relaxed_load +def : Pat<(relaxed_load (am_unscaled8 GPR64sp:$Rn, simm9:$offset)), (LDURBBi GPR64sp:$Rn, simm9:$offset)>; // 16-bit loads -def : Pat<(seq_cst_load GPR64sp:$ptr), (LDARH GPR64sp:$ptr)>; -def : Pat<(acquiring_load GPR64sp:$ptr), (LDARH GPR64sp:$ptr)>; -def : Pat<(relaxed_load (ro_Windexed16 GPR64sp:$Rn, GPR32:$Rm, - ro_Wextend16:$extend)), +def : Pat<(seq_cst_load GPR64sp:$ptr), (LDARH GPR64sp:$ptr)>; +def : Pat<(acquiring_load GPR64sp:$ptr), (LDARH GPR64sp:$ptr)>; +def : Pat<(relaxed_load (ro_Windexed16 GPR64sp:$Rn, GPR32:$Rm, + ro_Wextend16:$extend)), (LDRHHroW GPR64sp:$Rn, GPR32:$Rm, ro_Wextend16:$extend)>; -def : Pat<(relaxed_load (ro_Xindexed16 GPR64sp:$Rn, GPR64:$Rm, - ro_Xextend16:$extend)), +def : Pat<(relaxed_load (ro_Xindexed16 GPR64sp:$Rn, GPR64:$Rm, + ro_Xextend16:$extend)), (LDRHHroX GPR64sp:$Rn, GPR64:$Rm, ro_Xextend16:$extend)>; -def : Pat<(relaxed_load (am_indexed16 GPR64sp:$Rn, - uimm12s2:$offset)), +def : Pat<(relaxed_load (am_indexed16 GPR64sp:$Rn, + uimm12s2:$offset)), (LDRHHui GPR64sp:$Rn, uimm12s2:$offset)>; -def : Pat<(relaxed_load +def : Pat<(relaxed_load (am_unscaled16 GPR64sp:$Rn, simm9:$offset)), (LDURHHi GPR64sp:$Rn, simm9:$offset)>; @@ -591,10 +591,10 @@ let Predicates = [HasRCPC3, HasNEON] in { // v8.4a FEAT_LRCPC2 patterns let Predicates = [HasRCPC_IMMO, UseLDAPUR] in { // Load-Acquire RCpc Register unscaled loads - def : Pat<(acquiring_load + def : Pat<(acquiring_load (am_unscaled8 GPR64sp:$Rn, simm9:$offset)), (LDAPURBi GPR64sp:$Rn, simm9:$offset)>; - def : Pat<(acquiring_load + def : Pat<(acquiring_load (am_unscaled16 GPR64sp:$Rn, simm9:$offset)), (LDAPURHi GPR64sp:$Rn, simm9:$offset)>; def : Pat<(acquiring_load diff --git a/llvm/lib/Target/BPF/BPFInstrInfo.td b/llvm/lib/Target/BPF/BPFInstrInfo.td index 2dcf1eae086be..e717ac1a1d209 100644 --- a/llvm/lib/Target/BPF/BPFInstrInfo.td +++ b/llvm/lib/Target/BPF/BPFInstrInfo.td @@ -1342,11 +1342,11 @@ let Predicates = [BPFHasALU32] in { let Predicates = [BPFHasLoadAcqStoreRel] in { foreach P = [[relaxed_load, LDW32], - [relaxed_load, LDH32], - [relaxed_load, LDB32], + [relaxed_load, LDH32], + [relaxed_load, LDB32], [acquiring_load, LDWACQ32], - [acquiring_load, LDHACQ32], - [acquiring_load, LDBACQ32], + [acquiring_load, LDHACQ32], + [acquiring_load, LDBACQ32], ] in { def : Pat<(P[0] ADDRri:$addr), (P[1] ADDRri:$addr)>; } diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td index 6600b33d638c3..b348e774d50b8 100644 --- a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td @@ -118,29 +118,6 @@ defm AMOMAXU_D : AMO_rr_aq_rl<0b11100, 0b011, "amomaxu.d">, // Pseudo-instructions and codegen patterns //===----------------------------------------------------------------------===// -def riscv_atomic_asextload : PatFrag<(ops node:$ptr), (atomic_load node:$ptr), [{ - ISD::LoadExtType ETy = cast(N)->getExtensionType(); - return ETy == ISD::EXTLOAD || ETy == ISD::SEXTLOAD; -}]>; - -def riscv_atomic_asextload_8 : PatFrag<(ops node:$ptr), - (riscv_atomic_asextload node:$ptr)> { - let IsAtomic = true; - let MemoryVT = i8; -} - -def riscv_atomic_asextload_16 : PatFrag<(ops node:$ptr), - (riscv_atomic_asextload node:$ptr)> { - let IsAtomic = true; - let MemoryVT = i16; -} - -def riscv_atomic_asextload_32 : PatFrag<(ops node:$ptr), - (riscv_atomic_asextload node:$ptr)> { - let IsAtomic = true; - let MemoryVT = i32; -} - let IsAtomic = 1 in { // An atomic load operation that does not need either acquire or release // semantics. @@ -188,8 +165,8 @@ class seq_cst_store // any ordering. This is necessary because AtomicExpandPass has added fences to // atomic load/stores and changed them to unordered ones. let Predicates = [HasAtomicLdSt] in { - def : LdPat, LB>; - def : LdPat, LH>; + def : LdPat, LB>; + def : LdPat, LH>; def : StPat, SB, GPR, XLenVT>; def : StPat, SH, GPR, XLenVT>; @@ -201,7 +178,7 @@ let Predicates = [HasAtomicLdSt, IsRV32] in { } let Predicates = [HasAtomicLdSt, IsRV64] in { - def : LdPat, LW>; + def : LdPat, LW>; def : LdPat, LD, i64>; def : StPat, SD, GPR, i64>; } diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td index f42352d1716b0..837aa7f1005af 100644 --- a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td @@ -76,11 +76,11 @@ class PatSRL let Predicates = [HasStdExtZalasr] in { // the sequentially consistent loads use // .aq instead of .aqrl to match the psABI/A.7 - def : PatLAQ, LB_AQ>; - def : PatLAQ, LB_AQ>; + def : PatLAQ, LB_AQ>; + def : PatLAQ, LB_AQ>; - def : PatLAQ, LH_AQ>; - def : PatLAQ, LH_AQ>; + def : PatLAQ, LH_AQ>; + def : PatLAQ, LH_AQ>; // the sequentially consistent stores use // .rl instead of .aqrl to match the psABI/A.7 @@ -101,8 +101,8 @@ let Predicates = [HasStdExtZalasr, IsRV32] in { } // Predicates = [HasStdExtZalasr, IsRV64] let Predicates = [HasStdExtZalasr, IsRV64] in { - def : PatLAQ, LW_AQ>; - def : PatLAQ, LW_AQ>; + def : PatLAQ, LW_AQ>; + def : PatLAQ, LW_AQ>; def : PatLAQ, LD_AQ>; def : PatLAQ, LD_AQ>; diff --git a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp index 7f58c4a88c76d..a4fa063ae61cb 100644 --- a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp +++ b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp @@ -910,7 +910,7 @@ std::string TreePredicateFn::getPredCode() const { if (!isLoad() && !isStore() && !isAtomic() && getMemoryVT()) PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), - "MemoryVT requires IsLoad or IsStore"); + "MemoryVT requires IsLoad or IsStore or IsAtomic"); if (!isLoad() && !isStore()) { if (isUnindexed()) @@ -937,11 +937,10 @@ std::string TreePredicateFn::getPredCode() const { if (isNonExtLoad()) PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), "IsNonExtLoad requires IsLoad"); - if (isAnyExtLoad()) - PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), - "IsAnyExtLoad requires IsLoad"); - if (!isAtomic()) { + if (isAnyExtLoad()) + PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), + "IsAnyExtLoad requires IsLoad or IsAtomic"); if (isSignExtLoad()) PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), "IsSignExtLoad requires IsLoad or IsAtomic"); @@ -970,8 +969,9 @@ std::string TreePredicateFn::getPredCode() const { if (getMemoryVT() == nullptr && !isAtomicOrderingMonotonic() && getAddressSpaces() == nullptr && // FIXME: Should atomic loads be IsLoad, IsAtomic, or both? - !isZeroExtLoad() && !isSignExtLoad() && !isAtomicOrderingAcquire() && - !isAtomicOrderingRelease() && !isAtomicOrderingAcquireRelease() && + !isAnyExtLoad() && !isZeroExtLoad() && !isSignExtLoad() && + !isAtomicOrderingAcquire() && !isAtomicOrderingRelease() && + !isAtomicOrderingAcquireRelease() && !isAtomicOrderingSequentiallyConsistent() && !isAtomicOrderingAcquireOrStronger() && !isAtomicOrderingReleaseOrStronger() && @@ -1075,9 +1075,22 @@ std::string TreePredicateFn::getPredCode() const { "if (isReleaseOrStronger(cast(N)->getMergedOrdering())) " "return false;\n"; - // TODO: Handle atomic sextload/zextload normally when ATOMIC_LOAD is removed. - if (isAtomic() && (isZeroExtLoad() || isSignExtLoad())) - Code += "return false;\n"; + if (isAtomic()) { + if ((isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) > 1) + PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), + "IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are " + "mutually exclusive"); + + if (isAnyExtLoad()) + Code += "if (cast(N)->getExtensionType() != ISD::EXTLOAD) " + "return false;\n"; + if (isSignExtLoad()) + Code += "if (cast(N)->getExtensionType() != ISD::SEXTLOAD) " + "return false;\n"; + if (isZeroExtLoad()) + Code += "if (cast(N)->getExtensionType() != ISD::ZEXTLOAD) " + "return false;\n"; + } if (isLoad() || isStore()) { StringRef SDNodeName = isLoad() ? "LoadSDNode" : "StoreSDNode"; diff --git a/llvm/utils/TableGen/GlobalISelEmitter.cpp b/llvm/utils/TableGen/GlobalISelEmitter.cpp index ccc4c00fca047..affecc34468ec 100644 --- a/llvm/utils/TableGen/GlobalISelEmitter.cpp +++ b/llvm/utils/TableGen/GlobalISelEmitter.cpp @@ -624,7 +624,8 @@ Expected GlobalISelEmitter::addBuiltinPredicates( 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0); return InsnMatcher; } - if (Predicate.isLoad() && Predicate.isAnyExtLoad()) { + if ((Predicate.isLoad() || Predicate.isAtomic()) && + Predicate.isAnyExtLoad()) { InsnMatcher.addPredicate( 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0); return InsnMatcher;