diff --git a/llvm/lib/Target/AArch64/AArch64InstrInfo.td b/llvm/lib/Target/AArch64/AArch64InstrInfo.td index e32209655ba95..fb0ed425b1098 100644 --- a/llvm/lib/Target/AArch64/AArch64InstrInfo.td +++ b/llvm/lib/Target/AArch64/AArch64InstrInfo.td @@ -3926,6 +3926,11 @@ defm STURBB : StoreUnscaled<0b00, 0, 0b00, GPR32z, "sturb", [(truncstorei8 GPR32z:$Rt, (am_unscaled8 GPR64sp:$Rn, simm9:$offset))]>; +// bf16 store pattern +def : Pat<(store (bf16 FPR16Op:$Rt), + (am_unscaled16 GPR64sp:$Rn, simm9:$offset)), + (STURHi FPR16:$Rt, GPR64sp:$Rn, simm9:$offset)>; + // Armv8.4 Weaker Release Consistency enhancements // LDAPR & STLR with Immediate Offset instructions let Predicates = [HasRCPC_IMMO] in { diff --git a/llvm/test/CodeGen/AArch64/bf16.ll b/llvm/test/CodeGen/AArch64/bf16.ll index 14ce317e7e383..7a171c6702e1f 100644 --- a/llvm/test/CodeGen/AArch64/bf16.ll +++ b/llvm/test/CodeGen/AArch64/bf16.ll @@ -45,6 +45,16 @@ define void @test_store(ptr %a, bfloat %b) nounwind { ret void } +define void @test_store_negative_offset(ptr %a, bfloat %b) nounwind { +; CHECK-LABEL: test_store_negative_offset: +; CHECK-NEXT: stur h0, [x0, #-4] +; CHECK-NEXT: ret +entry: + %0 = getelementptr inbounds bfloat, ptr %a, i64 -2 + store bfloat %b, ptr %0, align 2 + ret void +} + ; Simple store of v4bf16 define void @test_vec_store(ptr %a, <4 x bfloat> %b) nounwind { ; CHECK-LABEL: test_vec_store: