diff --git a/llvm/test/CodeGen/RISCV/rvv/rv32-vsetvli-intrinsics.ll b/llvm/test/CodeGen/RISCV/rvv/rv32-vsetvli-intrinsics.ll index 5e97df06470c6a..6975cf7909a3a9 100644 --- a/llvm/test/CodeGen/RISCV/rvv/rv32-vsetvli-intrinsics.ll +++ b/llvm/test/CodeGen/RISCV/rvv/rv32-vsetvli-intrinsics.ll @@ -30,3 +30,36 @@ define void @test_vsetvlimax_e64m8() nounwind { call i32 @llvm.riscv.vsetvlimax.i32(i32 3, i32 3) ret void } + +declare @llvm.riscv.vle.nxv4i32.i32(*, i32) + +; Check that we remove the redundant vsetvli when followed by another operation +; FIXME: We don't +define @redundant_vsetvli(i32 %avl, * %ptr) nounwind { +; CHECK-LABEL: redundant_vsetvli: +; CHECK: # %bb.0: +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vle32.v v8, (a1) +; CHECK-NEXT: ret + %vl = call i32 @llvm.riscv.vsetvli.i32(i32 %avl, i32 2, i32 1) + %x = call @llvm.riscv.vle.nxv4i32.i32(* %ptr, i32 %vl) + ret %x +} + +; Check that we remove the repeated/redundant vsetvli when followed by another +; operation +; FIXME: We don't +define @repeated_vsetvli(i32 %avl, * %ptr) nounwind { +; CHECK-LABEL: repeated_vsetvli: +; CHECK: # %bb.0: +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vle32.v v8, (a1) +; CHECK-NEXT: ret + %vl0 = call i32 @llvm.riscv.vsetvli.i32(i32 %avl, i32 2, i32 1) + %vl1 = call i32 @llvm.riscv.vsetvli.i32(i32 %vl0, i32 2, i32 1) + %x = call @llvm.riscv.vle.nxv4i32.i32(* %ptr, i32 %vl1) + ret %x +} diff --git a/llvm/test/CodeGen/RISCV/rvv/rv64-vsetvli-intrinsics.ll b/llvm/test/CodeGen/RISCV/rvv/rv64-vsetvli-intrinsics.ll index 78d1008ce28b54..9e693a876de8c0 100644 --- a/llvm/test/CodeGen/RISCV/rvv/rv64-vsetvli-intrinsics.ll +++ b/llvm/test/CodeGen/RISCV/rvv/rv64-vsetvli-intrinsics.ll @@ -48,3 +48,36 @@ define void @test_vsetvlimax_e64m4() nounwind { call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2) ret void } + +declare @llvm.riscv.vle.nxv4i32.i64(*, i64) + +; Check that we remove the redundant vsetvli when followed by another operation +; FIXME: We don't +define @redundant_vsetvli(i64 %avl, * %ptr) nounwind { +; CHECK-LABEL: redundant_vsetvli: +; CHECK: # %bb.0: +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vle32.v v8, (a1) +; CHECK-NEXT: ret + %vl = call i64 @llvm.riscv.vsetvli.i64(i64 %avl, i64 2, i64 1) + %x = call @llvm.riscv.vle.nxv4i32.i64(* %ptr, i64 %vl) + ret %x +} + +; Check that we remove the repeated/redundant vsetvli when followed by another +; operation +; FIXME: We don't +define @repeated_vsetvli(i64 %avl, * %ptr) nounwind { +; CHECK-LABEL: repeated_vsetvli: +; CHECK: # %bb.0: +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vsetvli a0, a0, e32,m2,ta,mu +; CHECK-NEXT: vle32.v v8, (a1) +; CHECK-NEXT: ret + %vl0 = call i64 @llvm.riscv.vsetvli.i64(i64 %avl, i64 2, i64 1) + %vl1 = call i64 @llvm.riscv.vsetvli.i64(i64 %vl0, i64 2, i64 1) + %x = call @llvm.riscv.vle.nxv4i32.i64(* %ptr, i64 %vl1) + ret %x +}