diff --git a/llvm/lib/Target/ARM/ARMISelLowering.cpp b/llvm/lib/Target/ARM/ARMISelLowering.cpp index 11c3c3192eb3fd..256ae98f548b98 100644 --- a/llvm/lib/Target/ARM/ARMISelLowering.cpp +++ b/llvm/lib/Target/ARM/ARMISelLowering.cpp @@ -20976,8 +20976,16 @@ Instruction *ARMTargetLowering::emitTrailingFence(IRBuilderBase &Builder, // things go wrong. Cortex M doesn't have ldrexd/strexd though, so don't emit // anything for those. bool ARMTargetLowering::shouldExpandAtomicStoreInIR(StoreInst *SI) const { + bool has64BitAtomicStore; + if (Subtarget->isMClass()) + has64BitAtomicStore = false; + else if (Subtarget->isThumb()) + has64BitAtomicStore = Subtarget->hasV7Ops(); + else + has64BitAtomicStore = Subtarget->hasV6Ops(); + unsigned Size = SI->getValueOperand()->getType()->getPrimitiveSizeInBits(); - return (Size == 64) && !Subtarget->isMClass(); + return Size == 64 && has64BitAtomicStore; } // Loads and stores less than 64-bits are already atomic; ones above that @@ -20989,9 +20997,17 @@ bool ARMTargetLowering::shouldExpandAtomicStoreInIR(StoreInst *SI) const { // sections A8.8.72-74 LDRD) TargetLowering::AtomicExpansionKind ARMTargetLowering::shouldExpandAtomicLoadInIR(LoadInst *LI) const { + bool has64BitAtomicLoad; + if (Subtarget->isMClass()) + has64BitAtomicLoad = false; + else if (Subtarget->isThumb()) + has64BitAtomicLoad = Subtarget->hasV7Ops(); + else + has64BitAtomicLoad = Subtarget->hasV6Ops(); + unsigned Size = LI->getType()->getPrimitiveSizeInBits(); - return ((Size == 64) && !Subtarget->isMClass()) ? AtomicExpansionKind::LLOnly - : AtomicExpansionKind::None; + return (Size == 64 && has64BitAtomicLoad) ? AtomicExpansionKind::LLOnly + : AtomicExpansionKind::None; } // For the real atomic operations, we have ldrex/strex up to 32 bits, diff --git a/llvm/test/CodeGen/ARM/atomic-load-store.ll b/llvm/test/CodeGen/ARM/atomic-load-store.ll index 876d6124ed211a..ac33e4c65ca839 100644 --- a/llvm/test/CodeGen/ARM/atomic-load-store.ll +++ b/llvm/test/CodeGen/ARM/atomic-load-store.ll @@ -270,8 +270,15 @@ define i64 @test_old_load_64bit(i64* %p) { ; ; THUMBONE-LABEL: test_old_load_64bit: ; THUMBONE: @ %bb.0: -; THUMBONE-NEXT: ldaexd r0, r1, [r0] -; THUMBONE-NEXT: bx lr +; THUMBONE-NEXT: push {r7, lr} +; THUMBONE-NEXT: sub sp, #8 +; THUMBONE-NEXT: movs r2, #0 +; THUMBONE-NEXT: str r2, [sp] +; THUMBONE-NEXT: str r2, [sp, #4] +; THUMBONE-NEXT: mov r3, r2 +; THUMBONE-NEXT: bl __sync_val_compare_and_swap_8 +; THUMBONE-NEXT: add sp, #8 +; THUMBONE-NEXT: pop {r7, pc} ; ; ARMV4-LABEL: test_old_load_64bit: ; ARMV4: @ %bb.0: