diff --git a/llvm/lib/Target/LoongArch/LoongArchTargetMachine.h b/llvm/lib/Target/LoongArch/LoongArchTargetMachine.h index 7d39d47e86b36..fa9bc7608e7d2 100644 --- a/llvm/lib/Target/LoongArch/LoongArchTargetMachine.h +++ b/llvm/lib/Target/LoongArch/LoongArchTargetMachine.h @@ -45,6 +45,11 @@ class LoongArchTargetMachine : public LLVMTargetMachine { MachineFunctionInfo * createMachineFunctionInfo(BumpPtrAllocator &Allocator, const Function &F, const TargetSubtargetInfo *STI) const override; + + // Addrspacecasts are always noops. + bool isNoopAddrSpaceCast(unsigned SrcAS, unsigned DestAS) const override { + return true; + } }; } // end namespace llvm diff --git a/llvm/test/CodeGen/LoongArch/addrspacecast.ll b/llvm/test/CodeGen/LoongArch/addrspacecast.ll new file mode 100644 index 0000000000000..7875562331be0 --- /dev/null +++ b/llvm/test/CodeGen/LoongArch/addrspacecast.ll @@ -0,0 +1,47 @@ +; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py +; RUN: llc --mtriple=loongarch32 --verify-machineinstrs < %s | FileCheck %s --check-prefix=LA32 +; RUN: llc --mtriple=loongarch64 --verify-machineinstrs < %s | FileCheck %s --check-prefix=LA64 + +define void @cast0(ptr addrspace(1) %ptr) { +; LA32-LABEL: cast0: +; LA32: # %bb.0: +; LA32-NEXT: st.w $zero, $a0, 0 +; LA32-NEXT: ret +; +; LA64-LABEL: cast0: +; LA64: # %bb.0: +; LA64-NEXT: st.w $zero, $a0, 0 +; LA64-NEXT: ret + %ptr0 = addrspacecast ptr addrspace(1) %ptr to ptr addrspace(0) + store i32 0, ptr %ptr0 + ret void +} + +define void @cast1(ptr %ptr) { +; LA32-LABEL: cast1: +; LA32: # %bb.0: +; LA32-NEXT: addi.w $sp, $sp, -16 +; LA32-NEXT: .cfi_def_cfa_offset 16 +; LA32-NEXT: st.w $ra, $sp, 12 # 4-byte Folded Spill +; LA32-NEXT: .cfi_offset 1, -4 +; LA32-NEXT: bl %plt(foo) +; LA32-NEXT: ld.w $ra, $sp, 12 # 4-byte Folded Reload +; LA32-NEXT: addi.w $sp, $sp, 16 +; LA32-NEXT: ret +; +; LA64-LABEL: cast1: +; LA64: # %bb.0: +; LA64-NEXT: addi.d $sp, $sp, -16 +; LA64-NEXT: .cfi_def_cfa_offset 16 +; LA64-NEXT: st.d $ra, $sp, 8 # 8-byte Folded Spill +; LA64-NEXT: .cfi_offset 1, -8 +; LA64-NEXT: bl %plt(foo) +; LA64-NEXT: ld.d $ra, $sp, 8 # 8-byte Folded Reload +; LA64-NEXT: addi.d $sp, $sp, 16 +; LA64-NEXT: ret + %castptr = addrspacecast ptr %ptr to ptr addrspace(10) + call void @foo(ptr addrspace(10) %castptr) + ret void +} + +declare void @foo(ptr addrspace(10))