diff --git a/llvm/lib/Target/RISCV/RISCVSubtarget.cpp b/llvm/lib/Target/RISCV/RISCVSubtarget.cpp index 7589b44b81d3c..0446edefa979a 100644 --- a/llvm/lib/Target/RISCV/RISCVSubtarget.cpp +++ b/llvm/lib/Target/RISCV/RISCVSubtarget.cpp @@ -202,11 +202,9 @@ bool RISCVSubtarget::useRVVForFixedLengthVectors() const { } bool RISCVSubtarget::enableSubRegLiveness() const { - if (EnableSubRegLiveness.getNumOccurrences()) - return EnableSubRegLiveness; - // Enable subregister liveness for RVV to better handle LMUL>1 and segment - // load/store. - return hasVInstructions(); + // FIXME: Enable subregister liveness by default for RVV to better handle + // LMUL>1 and segment load/store. + return EnableSubRegLiveness; } void RISCVSubtarget::getPostRAMutations( diff --git a/llvm/test/CodeGen/RISCV/rvv/extract-subvector.ll b/llvm/test/CodeGen/RISCV/rvv/extract-subvector.ll index 4bdda69d97ddf..4dd0c3afa1d7e 100644 --- a/llvm/test/CodeGen/RISCV/rvv/extract-subvector.ll +++ b/llvm/test/CodeGen/RISCV/rvv/extract-subvector.ll @@ -5,6 +5,7 @@ define @extract_nxv8i32_nxv4i32_0( %vec) { ; CHECK-LABEL: extract_nxv8i32_nxv4i32_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m4 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv4i32.nxv8i32( %vec, i64 0) ret %c @@ -22,6 +23,7 @@ define @extract_nxv8i32_nxv4i32_4( %vec) { define @extract_nxv8i32_nxv2i32_0( %vec) { ; CHECK-LABEL: extract_nxv8i32_nxv2i32_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8m4 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv2i32.nxv8i32( %vec, i64 0) ret %c @@ -57,6 +59,7 @@ define @extract_nxv8i32_nxv2i32_6( %vec) { define @extract_nxv16i32_nxv8i32_0( %vec) { ; CHECK-LABEL: extract_nxv16i32_nxv8i32_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m8 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv8i32.nxv16i32( %vec, i64 0) ret %c @@ -74,6 +77,7 @@ define @extract_nxv16i32_nxv8i32_8( %vec) define @extract_nxv16i32_nxv4i32_0( %vec) { ; CHECK-LABEL: extract_nxv16i32_nxv4i32_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m8 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv4i32.nxv16i32( %vec, i64 0) ret %c @@ -109,6 +113,7 @@ define @extract_nxv16i32_nxv4i32_12( %vec) define @extract_nxv16i32_nxv2i32_0( %vec) { ; CHECK-LABEL: extract_nxv16i32_nxv2i32_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8m8 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv2i32.nxv16i32( %vec, i64 0) ret %c @@ -180,6 +185,7 @@ define @extract_nxv16i32_nxv2i32_14( %vec) define @extract_nxv16i32_nxv1i32_0( %vec) { ; CHECK-LABEL: extract_nxv16i32_nxv1i32_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8m8 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv1i32.nxv16i32( %vec, i64 0) ret %c @@ -241,6 +247,7 @@ define @extract_nxv2i32_nxv1i32_0( %vec) { define @extract_nxv32i8_nxv2i8_0( %vec) { ; CHECK-LABEL: extract_nxv32i8_nxv2i8_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8m4 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv2i8.nxv32i8( %vec, i64 0) ret %c @@ -337,6 +344,7 @@ define @extract_nxv4i8_nxv1i8_3( %vec) { define @extract_nxv2f16_nxv16f16_0( %vec) { ; CHECK-LABEL: extract_nxv2f16_nxv16f16_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8m4 ; CHECK-NEXT: ret %c = call @llvm.vector.extract.nxv2f16.nxv16f16( %vec, i64 0) ret %c @@ -459,6 +467,7 @@ define @extract_nxv16i1_nxv32i1_16( %x) { define @extract_nxv6f16_nxv12f16_0( %in) { ; CHECK-LABEL: extract_nxv6f16_nxv12f16_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m4 ; CHECK-NEXT: ret %res = call @llvm.vector.extract.nxv6f16.nxv12f16( %in, i64 0) ret %res @@ -470,13 +479,14 @@ define @extract_nxv6f16_nxv12f16_6( %in) ; CHECK-NEXT: csrr a0, vlenb ; CHECK-NEXT: srli a0, a0, 2 ; CHECK-NEXT: vsetvli a1, zero, e16, m1, ta, mu -; CHECK-NEXT: vslidedown.vx v11, v10, a0 -; CHECK-NEXT: vslidedown.vx v8, v9, a0 +; CHECK-NEXT: vslidedown.vx v14, v10, a0 +; CHECK-NEXT: vslidedown.vx v12, v9, a0 ; CHECK-NEXT: vsetvli zero, a0, e16, m1, tu, mu -; CHECK-NEXT: vslideup.vi v9, v11, 0 +; CHECK-NEXT: vslideup.vi v13, v14, 0 ; CHECK-NEXT: add a1, a0, a0 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, tu, mu -; CHECK-NEXT: vslideup.vx v8, v10, a0 +; CHECK-NEXT: vslideup.vx v12, v10, a0 +; CHECK-NEXT: vmv2r.v v8, v12 ; CHECK-NEXT: ret %res = call @llvm.vector.extract.nxv6f16.nxv12f16( %in, i64 6) ret %res diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vector-segN-load.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vector-segN-load.ll index 46be147b25935..c6edc79f96368 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vector-segN-load.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vector-segN-load.ll @@ -7,6 +7,7 @@ define <8 x i8> @load_factor2(<16 x i8>* %ptr) { ; CHECK: # %bb.0: ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret %1 = bitcast <16 x i8>* %ptr to i8* %2 = call { <8 x i8>, <8 x i8> } @llvm.riscv.seg2.load.v8i8.p0i8.i64(i8* %1, i64 8) @@ -20,6 +21,7 @@ define <8 x i8> @load_factor3(<24 x i8>* %ptr) { ; CHECK: # %bb.0: ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, mu ; CHECK-NEXT: vlseg3e8.v v6, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v6_v7_v8 ; CHECK-NEXT: ret %1 = bitcast <24 x i8>* %ptr to i8* %2 = call { <8 x i8>, <8 x i8>, <8 x i8> } @llvm.riscv.seg3.load.v8i8.p0i8.i64(i8* %1, i64 8) @@ -34,6 +36,7 @@ define <8 x i8> @load_factor4(<32 x i8>* %ptr) { ; CHECK: # %bb.0: ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, mu ; CHECK-NEXT: vlseg4e8.v v5, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v5_v6_v7_v8 ; CHECK-NEXT: ret %1 = bitcast <32 x i8>* %ptr to i8* %2 = call { <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8> } @llvm.riscv.seg4.load.v8i8.p0i8.i64(i8* %1, i64 8) @@ -49,6 +52,7 @@ define <8 x i8> @load_factor5(<40 x i8>* %ptr) { ; CHECK: # %bb.0: ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, mu ; CHECK-NEXT: vlseg5e8.v v4, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v4_v5_v6_v7_v8 ; CHECK-NEXT: ret %1 = bitcast <40 x i8>* %ptr to i8* %2 = call { <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8> } @llvm.riscv.seg5.load.v8i8.p0i8.i64(i8* %1, i64 8) @@ -65,6 +69,7 @@ define <8 x i8> @load_factor6(<48 x i8>* %ptr) { ; CHECK: # %bb.0: ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, mu ; CHECK-NEXT: vlseg6e8.v v3, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v3_v4_v5_v6_v7_v8 ; CHECK-NEXT: ret %1 = bitcast <48 x i8>* %ptr to i8* %2 = call { <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8> } @llvm.riscv.seg6.load.v8i8.p0i8.i64(i8* %1, i64 8) @@ -82,6 +87,7 @@ define <8 x i8> @load_factor7(<56 x i8>* %ptr) { ; CHECK: # %bb.0: ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, mu ; CHECK-NEXT: vlseg7e8.v v2, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v2_v3_v4_v5_v6_v7_v8 ; CHECK-NEXT: ret %1 = bitcast <56 x i8>* %ptr to i8* %2 = call { <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8> } @llvm.riscv.seg7.load.v8i8.p0i8.i64(i8* %1, i64 8) @@ -100,6 +106,7 @@ define <8 x i8> @load_factor8(<64 x i8>* %ptr) { ; CHECK: # %bb.0: ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, mu ; CHECK-NEXT: vlseg8e8.v v1, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v1_v2_v3_v4_v5_v6_v7_v8 ; CHECK-NEXT: ret %1 = bitcast <64 x i8>* %ptr to i8* %2 = call { <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8>, <8 x i8> } @llvm.riscv.seg8.load.v8i8.p0i8.i64(i8* %1, i64 8) diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vector-trunc-vp.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vector-trunc-vp.ll index 046252755d203..8d7eb7406b5fb 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vector-trunc-vp.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vector-trunc-vp.ll @@ -267,7 +267,7 @@ define <128 x i32> @vtrunc_nxv128i32_nxv128i64(<128 x i64> %a, <128 x i1> %m, i3 ; CHECK-NEXT: mv a4, a3 ; CHECK-NEXT: .LBB16_2: ; CHECK-NEXT: vsetivli zero, 4, e8, mf2, ta, mu -; CHECK-NEXT: vslidedown.vi v28, v2, 4 +; CHECK-NEXT: vslidedown.vi v3, v2, 4 ; CHECK-NEXT: addi a6, a4, -32 ; CHECK-NEXT: addi a3, a1, 640 ; CHECK-NEXT: mv a5, a2 @@ -276,7 +276,7 @@ define <128 x i32> @vtrunc_nxv128i32_nxv128i64(<128 x i64> %a, <128 x i1> %m, i3 ; CHECK-NEXT: mv a5, a6 ; CHECK-NEXT: .LBB16_4: ; CHECK-NEXT: vsetivli zero, 2, e8, mf4, ta, mu -; CHECK-NEXT: vslidedown.vi v0, v28, 2 +; CHECK-NEXT: vslidedown.vi v0, v3, 2 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, mu ; CHECK-NEXT: vle64.v v16, (a3) ; CHECK-NEXT: addi t0, a5, -16 @@ -301,7 +301,7 @@ define <128 x i32> @vtrunc_nxv128i32_nxv128i64(<128 x i64> %a, <128 x i1> %m, i3 ; CHECK-NEXT: .LBB16_8: ; CHECK-NEXT: vsetvli zero, a5, e32, m4, ta, mu ; CHECK-NEXT: li a5, 64 -; CHECK-NEXT: vmv1r.v v0, v28 +; CHECK-NEXT: vmv1r.v v0, v3 ; CHECK-NEXT: vncvt.x.x.w v16, v8, v0.t ; CHECK-NEXT: csrr a6, vlenb ; CHECK-NEXT: li t0, 48 @@ -314,7 +314,7 @@ define <128 x i32> @vtrunc_nxv128i32_nxv128i64(<128 x i64> %a, <128 x i1> %m, i3 ; CHECK-NEXT: li a7, 64 ; CHECK-NEXT: .LBB16_10: ; CHECK-NEXT: vsetivli zero, 4, e8, mf2, ta, mu -; CHECK-NEXT: vslidedown.vi v28, v1, 4 +; CHECK-NEXT: vslidedown.vi v3, v1, 4 ; CHECK-NEXT: addi t0, a7, -32 ; CHECK-NEXT: addi a5, a1, 128 ; CHECK-NEXT: mv a6, a2 @@ -323,7 +323,7 @@ define <128 x i32> @vtrunc_nxv128i32_nxv128i64(<128 x i64> %a, <128 x i1> %m, i3 ; CHECK-NEXT: mv a6, t0 ; CHECK-NEXT: .LBB16_12: ; CHECK-NEXT: vsetivli zero, 2, e8, mf4, ta, mu -; CHECK-NEXT: vslidedown.vi v0, v28, 2 +; CHECK-NEXT: vslidedown.vi v0, v3, 2 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, mu ; CHECK-NEXT: vle64.v v16, (a5) ; CHECK-NEXT: addi a5, a6, -16 @@ -347,7 +347,7 @@ define <128 x i32> @vtrunc_nxv128i32_nxv128i64(<128 x i64> %a, <128 x i1> %m, i3 ; CHECK-NEXT: .LBB16_16: ; CHECK-NEXT: addi t0, a1, 384 ; CHECK-NEXT: vsetvli zero, a6, e32, m4, ta, mu -; CHECK-NEXT: vmv1r.v v0, v28 +; CHECK-NEXT: vmv1r.v v0, v3 ; CHECK-NEXT: vncvt.x.x.w v16, v8, v0.t ; CHECK-NEXT: csrr a6, vlenb ; CHECK-NEXT: li t1, 40 diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-interleave.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-interleave.ll index 328ebf86b8f4a..20350200f9944 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-interleave.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-interleave.ll @@ -39,20 +39,22 @@ define <4 x double> @interleave_v2f64(<2 x double> %x, <2 x double> %y) { ; RV32-V128-LABEL: interleave_v2f64: ; RV32-V128: # %bb.0: ; RV32-V128-NEXT: vmv1r.v v12, v9 +; RV32-V128-NEXT: # kill: def $v8 killed $v8 def $v8m2 ; RV32-V128-NEXT: vsetivli zero, 4, e16, mf2, ta, mu -; RV32-V128-NEXT: vid.v v9 -; RV32-V128-NEXT: vsrl.vi v9, v9, 1 +; RV32-V128-NEXT: vid.v v10 +; RV32-V128-NEXT: vsrl.vi v14, v10, 1 ; RV32-V128-NEXT: vsetvli zero, zero, e64, m2, ta, mu ; RV32-V128-NEXT: li a0, 10 ; RV32-V128-NEXT: vmv.s.x v0, a0 -; RV32-V128-NEXT: vrgatherei16.vv v10, v8, v9 -; RV32-V128-NEXT: vrgatherei16.vv v10, v12, v9, v0.t +; RV32-V128-NEXT: vrgatherei16.vv v10, v8, v14 +; RV32-V128-NEXT: vrgatherei16.vv v10, v12, v14, v0.t ; RV32-V128-NEXT: vmv.v.v v8, v10 ; RV32-V128-NEXT: ret ; ; RV64-V128-LABEL: interleave_v2f64: ; RV64-V128: # %bb.0: ; RV64-V128-NEXT: vmv1r.v v12, v9 +; RV64-V128-NEXT: # kill: def $v8 killed $v8 def $v8m2 ; RV64-V128-NEXT: vsetivli zero, 4, e64, m2, ta, mu ; RV64-V128-NEXT: vid.v v10 ; RV64-V128-NEXT: vsrl.vi v14, v10, 1 @@ -267,9 +269,9 @@ define <64 x float> @interleave_v32f32(<32 x float> %x, <32 x float> %y) { ; RV32-V128-NEXT: vsetvli zero, a1, e32, m8, ta, mu ; RV32-V128-NEXT: vle32.v v0, (a0) ; RV32-V128-NEXT: vmv8r.v v24, v8 -; RV32-V128-NEXT: vrgather.vv v8, v24, v0 ; RV32-V128-NEXT: addi a0, sp, 16 -; RV32-V128-NEXT: vs8r.v v24, (a0) # Unknown-size Folded Spill +; RV32-V128-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill +; RV32-V128-NEXT: vrgather.vv v8, v24, v0 ; RV32-V128-NEXT: lui a0, %hi(.LCPI10_1) ; RV32-V128-NEXT: addi a0, a0, %lo(.LCPI10_1) ; RV32-V128-NEXT: vle32.v v24, (a0) @@ -317,9 +319,9 @@ define <64 x float> @interleave_v32f32(<32 x float> %x, <32 x float> %y) { ; RV64-V128-NEXT: vsetvli zero, a1, e32, m8, ta, mu ; RV64-V128-NEXT: vle32.v v0, (a0) ; RV64-V128-NEXT: vmv8r.v v24, v8 -; RV64-V128-NEXT: vrgather.vv v8, v24, v0 ; RV64-V128-NEXT: addi a0, sp, 16 -; RV64-V128-NEXT: vs8r.v v24, (a0) # Unknown-size Folded Spill +; RV64-V128-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill +; RV64-V128-NEXT: vrgather.vv v8, v24, v0 ; RV64-V128-NEXT: lui a0, %hi(.LCPI10_1) ; RV64-V128-NEXT: addi a0, a0, %lo(.LCPI10_1) ; RV64-V128-NEXT: vle32.v v24, (a0) diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-exttrunc.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-exttrunc.ll index d0f71f05a0901..7ea275b31aab0 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-exttrunc.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-exttrunc.ll @@ -92,16 +92,16 @@ define void @sext_v32i8_v32i32(<32 x i8>* %x, <32 x i32>* %z) { ; LMULMAX2-NEXT: vsetivli zero, 16, e8, m2, ta, mu ; LMULMAX2-NEXT: vslidedown.vi v10, v8, 16 ; LMULMAX2-NEXT: vsetivli zero, 8, e8, m1, ta, mu -; LMULMAX2-NEXT: vslidedown.vi v9, v10, 8 +; LMULMAX2-NEXT: vslidedown.vi v14, v10, 8 ; LMULMAX2-NEXT: vsetivli zero, 8, e32, m2, ta, mu -; LMULMAX2-NEXT: vsext.vf4 v14, v9 -; LMULMAX2-NEXT: vsext.vf4 v16, v8 +; LMULMAX2-NEXT: vsext.vf4 v16, v14 +; LMULMAX2-NEXT: vsext.vf4 v14, v8 ; LMULMAX2-NEXT: vsext.vf4 v8, v10 ; LMULMAX2-NEXT: addi a0, a1, 64 ; LMULMAX2-NEXT: vse32.v v8, (a0) -; LMULMAX2-NEXT: vse32.v v16, (a1) +; LMULMAX2-NEXT: vse32.v v14, (a1) ; LMULMAX2-NEXT: addi a0, a1, 96 -; LMULMAX2-NEXT: vse32.v v14, (a0) +; LMULMAX2-NEXT: vse32.v v16, (a0) ; LMULMAX2-NEXT: addi a0, a1, 32 ; LMULMAX2-NEXT: vse32.v v12, (a0) ; LMULMAX2-NEXT: ret diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-interleave.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-interleave.ll index fc2c6cdf32e2f..d39b6a81a70bf 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-interleave.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-interleave.ll @@ -52,20 +52,22 @@ define <4 x i64> @interleave_v2i64(<2 x i64> %x, <2 x i64> %y) { ; RV32-V128-LABEL: interleave_v2i64: ; RV32-V128: # %bb.0: ; RV32-V128-NEXT: vmv1r.v v12, v9 +; RV32-V128-NEXT: # kill: def $v8 killed $v8 def $v8m2 ; RV32-V128-NEXT: vsetivli zero, 4, e16, mf2, ta, mu -; RV32-V128-NEXT: vid.v v9 -; RV32-V128-NEXT: vsrl.vi v9, v9, 1 +; RV32-V128-NEXT: vid.v v10 +; RV32-V128-NEXT: vsrl.vi v14, v10, 1 ; RV32-V128-NEXT: vsetvli zero, zero, e64, m2, ta, mu ; RV32-V128-NEXT: li a0, 10 ; RV32-V128-NEXT: vmv.s.x v0, a0 -; RV32-V128-NEXT: vrgatherei16.vv v10, v8, v9 -; RV32-V128-NEXT: vrgatherei16.vv v10, v12, v9, v0.t +; RV32-V128-NEXT: vrgatherei16.vv v10, v8, v14 +; RV32-V128-NEXT: vrgatherei16.vv v10, v12, v14, v0.t ; RV32-V128-NEXT: vmv.v.v v8, v10 ; RV32-V128-NEXT: ret ; ; RV64-V128-LABEL: interleave_v2i64: ; RV64-V128: # %bb.0: ; RV64-V128-NEXT: vmv1r.v v12, v9 +; RV64-V128-NEXT: # kill: def $v8 killed $v8 def $v8m2 ; RV64-V128-NEXT: vsetivli zero, 4, e64, m2, ta, mu ; RV64-V128-NEXT: vid.v v10 ; RV64-V128-NEXT: vsrl.vi v14, v10, 1 @@ -373,9 +375,9 @@ define <64 x i32> @interleave_v32i32(<32 x i32> %x, <32 x i32> %y) { ; RV32-V128-NEXT: vsetvli zero, a1, e32, m8, ta, mu ; RV32-V128-NEXT: vle32.v v0, (a0) ; RV32-V128-NEXT: vmv8r.v v24, v8 -; RV32-V128-NEXT: vrgather.vv v8, v24, v0 ; RV32-V128-NEXT: addi a0, sp, 16 -; RV32-V128-NEXT: vs8r.v v24, (a0) # Unknown-size Folded Spill +; RV32-V128-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill +; RV32-V128-NEXT: vrgather.vv v8, v24, v0 ; RV32-V128-NEXT: lui a0, %hi(.LCPI15_1) ; RV32-V128-NEXT: addi a0, a0, %lo(.LCPI15_1) ; RV32-V128-NEXT: vle32.v v24, (a0) @@ -423,9 +425,9 @@ define <64 x i32> @interleave_v32i32(<32 x i32> %x, <32 x i32> %y) { ; RV64-V128-NEXT: vsetvli zero, a1, e32, m8, ta, mu ; RV64-V128-NEXT: vle32.v v0, (a0) ; RV64-V128-NEXT: vmv8r.v v24, v8 -; RV64-V128-NEXT: vrgather.vv v8, v24, v0 ; RV64-V128-NEXT: addi a0, sp, 16 -; RV64-V128-NEXT: vs8r.v v24, (a0) # Unknown-size Folded Spill +; RV64-V128-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill +; RV64-V128-NEXT: vrgather.vv v8, v24, v0 ; RV64-V128-NEXT: lui a0, %hi(.LCPI15_1) ; RV64-V128-NEXT: addi a0, a0, %lo(.LCPI15_1) ; RV64-V128-NEXT: vle32.v v24, (a0) diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-gather.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-gather.ll index dfeebbcf11879..01d8ad8b23d66 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-gather.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-gather.ll @@ -12819,8 +12819,8 @@ define <32 x i8> @mgather_baseidx_v32i8(i8* %base, <32 x i8> %idxs, <32 x i1> %m ; RV64ZVE32F-NEXT: beqz a2, .LBB98_23 ; RV64ZVE32F-NEXT: # %bb.22: # %cond.load37 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v9, v12, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v9 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v12, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 @@ -12832,25 +12832,25 @@ define <32 x i8> @mgather_baseidx_v32i8(i8* %base, <32 x i8> %idxs, <32 x i1> %m ; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, mu ; RV64ZVE32F-NEXT: lui a2, 4 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v9, v12, 2 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v12, 2 ; RV64ZVE32F-NEXT: beqz a2, .LBB98_25 ; RV64ZVE32F-NEXT: # %bb.24: # %cond.load40 -; RV64ZVE32F-NEXT: vmv.x.s a2, v9 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 ; RV64ZVE32F-NEXT: vsetvli zero, a3, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vmv.s.x v12, a2 +; RV64ZVE32F-NEXT: vmv.s.x v14, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 15, e8, m2, tu, mu -; RV64ZVE32F-NEXT: vslideup.vi v10, v12, 14 +; RV64ZVE32F-NEXT: vslideup.vi v10, v14, 14 ; RV64ZVE32F-NEXT: .LBB98_25: # %else41 ; RV64ZVE32F-NEXT: lui a2, 8 ; RV64ZVE32F-NEXT: and a2, a1, a2 ; RV64ZVE32F-NEXT: beqz a2, .LBB98_27 ; RV64ZVE32F-NEXT: # %bb.26: # %cond.load43 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v9, v9, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v9 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v12, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 @@ -12878,8 +12878,8 @@ define <32 x i8> @mgather_baseidx_v32i8(i8* %base, <32 x i8> %idxs, <32 x i1> %m ; RV64ZVE32F-NEXT: beqz a2, .LBB98_31 ; RV64ZVE32F-NEXT: # %bb.30: # %cond.load49 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v9 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 @@ -12891,10 +12891,10 @@ define <32 x i8> @mgather_baseidx_v32i8(i8* %base, <32 x i8> %idxs, <32 x i1> %m ; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, mu ; RV64ZVE32F-NEXT: lui a2, 64 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 2 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v8, 2 ; RV64ZVE32F-NEXT: beqz a2, .LBB98_33 ; RV64ZVE32F-NEXT: # %bb.32: # %cond.load52 -; RV64ZVE32F-NEXT: vmv.x.s a2, v12 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 @@ -12906,55 +12906,55 @@ define <32 x i8> @mgather_baseidx_v32i8(i8* %base, <32 x i8> %idxs, <32 x i1> %m ; RV64ZVE32F-NEXT: vsetivli zero, 4, e8, mf2, ta, mu ; RV64ZVE32F-NEXT: lui a2, 128 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 4 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 4 ; RV64ZVE32F-NEXT: beqz a2, .LBB98_35 ; RV64ZVE32F-NEXT: # %bb.34: # %cond.load55 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v12, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v12 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v13, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 ; RV64ZVE32F-NEXT: vsetvli zero, a3, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vmv.s.x v12, a2 +; RV64ZVE32F-NEXT: vmv.s.x v14, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 20, e8, m2, tu, mu -; RV64ZVE32F-NEXT: vslideup.vi v10, v12, 19 +; RV64ZVE32F-NEXT: vslideup.vi v10, v14, 19 ; RV64ZVE32F-NEXT: .LBB98_35: # %else56 ; RV64ZVE32F-NEXT: lui a2, 256 ; RV64ZVE32F-NEXT: and a2, a1, a2 ; RV64ZVE32F-NEXT: beqz a2, .LBB98_37 ; RV64ZVE32F-NEXT: # %bb.36: # %cond.load58 ; RV64ZVE32F-NEXT: vsetivli zero, 0, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vmv.x.s a2, v9 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 ; RV64ZVE32F-NEXT: vsetvli zero, a3, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vmv.s.x v12, a2 +; RV64ZVE32F-NEXT: vmv.s.x v14, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 21, e8, m2, tu, mu -; RV64ZVE32F-NEXT: vslideup.vi v10, v12, 20 +; RV64ZVE32F-NEXT: vslideup.vi v10, v14, 20 ; RV64ZVE32F-NEXT: .LBB98_37: # %else59 ; RV64ZVE32F-NEXT: lui a2, 512 ; RV64ZVE32F-NEXT: and a2, a1, a2 ; RV64ZVE32F-NEXT: beqz a2, .LBB98_39 ; RV64ZVE32F-NEXT: # %bb.38: # %cond.load61 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v9, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v12 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v12, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: lb a2, 0(a2) ; RV64ZVE32F-NEXT: li a3, 32 ; RV64ZVE32F-NEXT: vsetvli zero, a3, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vmv.s.x v12, a2 +; RV64ZVE32F-NEXT: vmv.s.x v14, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 22, e8, m2, tu, mu -; RV64ZVE32F-NEXT: vslideup.vi v10, v12, 21 +; RV64ZVE32F-NEXT: vslideup.vi v10, v14, 21 ; RV64ZVE32F-NEXT: .LBB98_39: # %else62 ; RV64ZVE32F-NEXT: vsetivli zero, 8, e8, m1, ta, mu ; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 8 ; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, mu ; RV64ZVE32F-NEXT: lui a2, 1024 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v9, v9, 2 +; RV64ZVE32F-NEXT: vslidedown.vi v9, v12, 2 ; RV64ZVE32F-NEXT: beqz a2, .LBB98_41 ; RV64ZVE32F-NEXT: # %bb.40: # %cond.load64 ; RV64ZVE32F-NEXT: vmv.x.s a2, v9 diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-scatter.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-scatter.ll index d245d048e6ee7..5b9f800e16d91 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-scatter.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-masked-scatter.ll @@ -11157,8 +11157,8 @@ define void @mscatter_baseidx_v32i8(<32 x i8> %val, i8* %base, <32 x i8> %idxs, ; RV64ZVE32F-NEXT: beqz a2, .LBB92_23 ; RV64ZVE32F-NEXT: # %bb.22: # %cond.store25 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v11, v12, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v11 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v12, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu ; RV64ZVE32F-NEXT: vslidedown.vi v14, v8, 13 @@ -11167,22 +11167,22 @@ define void @mscatter_baseidx_v32i8(<32 x i8> %val, i8* %base, <32 x i8> %idxs, ; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, mu ; RV64ZVE32F-NEXT: lui a2, 4 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v11, v12, 2 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v12, 2 ; RV64ZVE32F-NEXT: beqz a2, .LBB92_25 ; RV64ZVE32F-NEXT: # %bb.24: # %cond.store27 -; RV64ZVE32F-NEXT: vmv.x.s a2, v11 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 14 -; RV64ZVE32F-NEXT: vse8.v v12, (a2) +; RV64ZVE32F-NEXT: vslidedown.vi v14, v8, 14 +; RV64ZVE32F-NEXT: vse8.v v14, (a2) ; RV64ZVE32F-NEXT: .LBB92_25: # %else28 ; RV64ZVE32F-NEXT: lui a2, 8 ; RV64ZVE32F-NEXT: and a2, a1, a2 ; RV64ZVE32F-NEXT: beqz a2, .LBB92_27 ; RV64ZVE32F-NEXT: # %bb.26: # %cond.store29 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v11, v11, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v11 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v12, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu ; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 15 @@ -11204,8 +11204,8 @@ define void @mscatter_baseidx_v32i8(<32 x i8> %val, i8* %base, <32 x i8> %idxs, ; RV64ZVE32F-NEXT: beqz a2, .LBB92_31 ; RV64ZVE32F-NEXT: # %bb.30: # %cond.store33 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v11, v10, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v11 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v10, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu ; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 17 @@ -11214,10 +11214,10 @@ define void @mscatter_baseidx_v32i8(<32 x i8> %val, i8* %base, <32 x i8> %idxs, ; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, mu ; RV64ZVE32F-NEXT: lui a2, 64 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v12, v10, 2 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v10, 2 ; RV64ZVE32F-NEXT: beqz a2, .LBB92_33 ; RV64ZVE32F-NEXT: # %bb.32: # %cond.store35 -; RV64ZVE32F-NEXT: vmv.x.s a2, v12 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu ; RV64ZVE32F-NEXT: vslidedown.vi v14, v8, 18 @@ -11226,46 +11226,46 @@ define void @mscatter_baseidx_v32i8(<32 x i8> %val, i8* %base, <32 x i8> %idxs, ; RV64ZVE32F-NEXT: vsetivli zero, 4, e8, mf2, ta, mu ; RV64ZVE32F-NEXT: lui a2, 128 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v11, v10, 4 +; RV64ZVE32F-NEXT: vslidedown.vi v12, v10, 4 ; RV64ZVE32F-NEXT: beqz a2, .LBB92_35 ; RV64ZVE32F-NEXT: # %bb.34: # %cond.store37 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v12, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v12 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v13, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 19 -; RV64ZVE32F-NEXT: vse8.v v12, (a2) +; RV64ZVE32F-NEXT: vslidedown.vi v14, v8, 19 +; RV64ZVE32F-NEXT: vse8.v v14, (a2) ; RV64ZVE32F-NEXT: .LBB92_35: # %else38 ; RV64ZVE32F-NEXT: lui a2, 256 ; RV64ZVE32F-NEXT: and a2, a1, a2 ; RV64ZVE32F-NEXT: beqz a2, .LBB92_37 ; RV64ZVE32F-NEXT: # %bb.36: # %cond.store39 ; RV64ZVE32F-NEXT: vsetivli zero, 0, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vmv.x.s a2, v11 +; RV64ZVE32F-NEXT: vmv.x.s a2, v12 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 20 -; RV64ZVE32F-NEXT: vse8.v v12, (a2) +; RV64ZVE32F-NEXT: vslidedown.vi v14, v8, 20 +; RV64ZVE32F-NEXT: vse8.v v14, (a2) ; RV64ZVE32F-NEXT: .LBB92_37: # %else40 ; RV64ZVE32F-NEXT: lui a2, 512 ; RV64ZVE32F-NEXT: and a2, a1, a2 ; RV64ZVE32F-NEXT: beqz a2, .LBB92_39 ; RV64ZVE32F-NEXT: # %bb.38: # %cond.store41 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v11, 1 -; RV64ZVE32F-NEXT: vmv.x.s a2, v12 +; RV64ZVE32F-NEXT: vslidedown.vi v13, v12, 1 +; RV64ZVE32F-NEXT: vmv.x.s a2, v13 ; RV64ZVE32F-NEXT: add a2, a0, a2 ; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, m2, ta, mu -; RV64ZVE32F-NEXT: vslidedown.vi v12, v8, 21 -; RV64ZVE32F-NEXT: vse8.v v12, (a2) +; RV64ZVE32F-NEXT: vslidedown.vi v14, v8, 21 +; RV64ZVE32F-NEXT: vse8.v v14, (a2) ; RV64ZVE32F-NEXT: .LBB92_39: # %else42 ; RV64ZVE32F-NEXT: vsetivli zero, 8, e8, m1, ta, mu ; RV64ZVE32F-NEXT: vslidedown.vi v10, v10, 8 ; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, mu ; RV64ZVE32F-NEXT: lui a2, 1024 ; RV64ZVE32F-NEXT: and a2, a1, a2 -; RV64ZVE32F-NEXT: vslidedown.vi v11, v11, 2 +; RV64ZVE32F-NEXT: vslidedown.vi v11, v12, 2 ; RV64ZVE32F-NEXT: beqz a2, .LBB92_41 ; RV64ZVE32F-NEXT: # %bb.40: # %cond.store43 ; RV64ZVE32F-NEXT: vmv.x.s a2, v11 diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-fp.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-fp.ll index 8cebb8f5bddfa..7dccf2c9d7df6 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-fp.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-fp.ll @@ -747,15 +747,15 @@ define float @vreduce_ord_fwadd_v64f32(<64 x half>* %x, float %s) { ; CHECK-NEXT: vsetvli zero, a0, e16, m8, ta, mu ; CHECK-NEXT: vslidedown.vx v8, v16, a0 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, mu -; CHECK-NEXT: vfmv.s.f v12, fa0 +; CHECK-NEXT: vfmv.s.f v24, fa0 ; CHECK-NEXT: vsetvli zero, a0, e16, m4, ta, mu -; CHECK-NEXT: vfwredosum.vs v12, v16, v12 +; CHECK-NEXT: vfwredosum.vs v16, v16, v24 ; CHECK-NEXT: vsetivli zero, 0, e32, m1, ta, mu -; CHECK-NEXT: vfmv.f.s ft0, v12 +; CHECK-NEXT: vfmv.f.s ft0, v16 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, mu -; CHECK-NEXT: vfmv.s.f v12, ft0 +; CHECK-NEXT: vfmv.s.f v16, ft0 ; CHECK-NEXT: vsetvli zero, a0, e16, m4, ta, mu -; CHECK-NEXT: vfwredosum.vs v8, v8, v12 +; CHECK-NEXT: vfwredosum.vs v8, v8, v16 ; CHECK-NEXT: vsetivli zero, 0, e32, m1, ta, mu ; CHECK-NEXT: vfmv.f.s fa0, v8 ; CHECK-NEXT: ret @@ -1195,15 +1195,15 @@ define double @vreduce_ord_fwadd_v32f64(<32 x float>* %x, double %s) { ; CHECK-NEXT: vsetivli zero, 16, e32, m8, ta, mu ; CHECK-NEXT: vslidedown.vi v8, v16, 16 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, mu -; CHECK-NEXT: vfmv.s.f v12, fa0 +; CHECK-NEXT: vfmv.s.f v24, fa0 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, mu -; CHECK-NEXT: vfwredosum.vs v12, v16, v12 +; CHECK-NEXT: vfwredosum.vs v16, v16, v24 ; CHECK-NEXT: vsetivli zero, 0, e64, m1, ta, mu -; CHECK-NEXT: vfmv.f.s ft0, v12 +; CHECK-NEXT: vfmv.f.s ft0, v16 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, mu -; CHECK-NEXT: vfmv.s.f v12, ft0 +; CHECK-NEXT: vfmv.s.f v16, ft0 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, mu -; CHECK-NEXT: vfwredosum.vs v8, v8, v12 +; CHECK-NEXT: vfwredosum.vs v8, v8, v16 ; CHECK-NEXT: vsetivli zero, 0, e64, m1, ta, mu ; CHECK-NEXT: vfmv.f.s fa0, v8 ; CHECK-NEXT: ret diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-int.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-int.ll index 8cc2298379a83..967689ca10976 100644 --- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-int.ll +++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-reduction-int.ll @@ -1596,11 +1596,11 @@ define i64 @vwreduce_add_v64i64(<64 x i32>* %x) { ; RV32-NEXT: li a2, 32 ; RV32-NEXT: vsetvli zero, a2, e32, m8, ta, mu ; RV32-NEXT: vle32.v v8, (a0) +; RV32-NEXT: addi a0, sp, 16 +; RV32-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV32-NEXT: vle32.v v16, (a1) ; RV32-NEXT: vsetivli zero, 16, e32, m8, ta, mu ; RV32-NEXT: vslidedown.vi v24, v8, 16 -; RV32-NEXT: addi a0, sp, 16 -; RV32-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV32-NEXT: csrr a0, vlenb ; RV32-NEXT: li a1, 24 ; RV32-NEXT: mul a0, a0, a1 @@ -1664,11 +1664,11 @@ define i64 @vwreduce_add_v64i64(<64 x i32>* %x) { ; RV64-NEXT: li a2, 32 ; RV64-NEXT: vsetvli zero, a2, e32, m8, ta, mu ; RV64-NEXT: vle32.v v8, (a0) +; RV64-NEXT: addi a0, sp, 16 +; RV64-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV64-NEXT: vle32.v v16, (a1) ; RV64-NEXT: vsetivli zero, 16, e32, m8, ta, mu ; RV64-NEXT: vslidedown.vi v24, v8, 16 -; RV64-NEXT: addi a0, sp, 16 -; RV64-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV64-NEXT: csrr a0, vlenb ; RV64-NEXT: li a1, 24 ; RV64-NEXT: mul a0, a0, a1 @@ -1735,11 +1735,11 @@ define i64 @vwreduce_uadd_v64i64(<64 x i32>* %x) { ; RV32-NEXT: li a2, 32 ; RV32-NEXT: vsetvli zero, a2, e32, m8, ta, mu ; RV32-NEXT: vle32.v v8, (a0) +; RV32-NEXT: addi a0, sp, 16 +; RV32-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV32-NEXT: vle32.v v16, (a1) ; RV32-NEXT: vsetivli zero, 16, e32, m8, ta, mu ; RV32-NEXT: vslidedown.vi v24, v8, 16 -; RV32-NEXT: addi a0, sp, 16 -; RV32-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV32-NEXT: csrr a0, vlenb ; RV32-NEXT: li a1, 24 ; RV32-NEXT: mul a0, a0, a1 @@ -1803,11 +1803,11 @@ define i64 @vwreduce_uadd_v64i64(<64 x i32>* %x) { ; RV64-NEXT: li a2, 32 ; RV64-NEXT: vsetvli zero, a2, e32, m8, ta, mu ; RV64-NEXT: vle32.v v8, (a0) +; RV64-NEXT: addi a0, sp, 16 +; RV64-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV64-NEXT: vle32.v v16, (a1) ; RV64-NEXT: vsetivli zero, 16, e32, m8, ta, mu ; RV64-NEXT: vslidedown.vi v24, v8, 16 -; RV64-NEXT: addi a0, sp, 16 -; RV64-NEXT: vs8r.v v8, (a0) # Unknown-size Folded Spill ; RV64-NEXT: csrr a0, vlenb ; RV64-NEXT: li a1, 24 ; RV64-NEXT: mul a0, a0, a1 diff --git a/llvm/test/CodeGen/RISCV/rvv/insert-subvector.ll b/llvm/test/CodeGen/RISCV/rvv/insert-subvector.ll index 02551c887b65b..33da8b5d8b277 100644 --- a/llvm/test/CodeGen/RISCV/rvv/insert-subvector.ll +++ b/llvm/test/CodeGen/RISCV/rvv/insert-subvector.ll @@ -367,6 +367,7 @@ define @insert_nxv32f16_nxv2f16_26( %ve define @insert_nxv32f16_undef_nxv1f16_0( %subvec) { ; CHECK-LABEL: insert_nxv32f16_undef_nxv1f16_0: ; CHECK: # %bb.0: +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8m8 ; CHECK-NEXT: ret %v = call @llvm.vector.insert.nxv1f16.nxv32f16( undef, %subvec, i64 0) ret %v @@ -380,7 +381,8 @@ define @insert_nxv32f16_undef_nxv1f16_26( @llvm.vector.insert.nxv1f16.nxv32f16( undef, %subvec, i64 26) ret %v diff --git a/llvm/test/CodeGen/RISCV/rvv/rv32-spill-zvlsseg.ll b/llvm/test/CodeGen/RISCV/rvv/rv32-spill-zvlsseg.ll index 62ac11fa68ef1..9ef58a3b92d6e 100644 --- a/llvm/test/CodeGen/RISCV/rvv/rv32-spill-zvlsseg.ll +++ b/llvm/test/CodeGen/RISCV/rvv/rv32-spill-zvlsseg.ll @@ -46,6 +46,7 @@ define @spill_zvlsseg_nxv1i32(i32* %base, i32 %vl) nounwind { ; SPILL-O2-NEXT: vl1r.v v7, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl1r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 1 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -101,6 +102,7 @@ define @spill_zvlsseg_nxv2i32(i32* %base, i32 %vl) nounwind { ; SPILL-O2-NEXT: vl1r.v v7, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl1r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 1 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -158,6 +160,7 @@ define @spill_zvlsseg_nxv4i32(i32* %base, i32 %vl) nounwind { ; SPILL-O2-NEXT: vl2r.v v6, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl2r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 2 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -215,6 +218,7 @@ define @spill_zvlsseg_nxv8i32(i32* %base, i32 %vl) nounwind { ; SPILL-O2-NEXT: vl4r.v v4, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl4r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 3 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -277,6 +281,7 @@ define @spill_zvlsseg3_nxv4i32(i32* %base, i32 %vl) nounwind ; SPILL-O2-NEXT: vl2r.v v8, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl2r.v v10, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: li a1, 6 ; SPILL-O2-NEXT: mul a0, a0, a1 diff --git a/llvm/test/CodeGen/RISCV/rvv/rv64-spill-zvlsseg.ll b/llvm/test/CodeGen/RISCV/rvv/rv64-spill-zvlsseg.ll index daab4089aba0d..99e0dd97d8dca 100644 --- a/llvm/test/CodeGen/RISCV/rvv/rv64-spill-zvlsseg.ll +++ b/llvm/test/CodeGen/RISCV/rvv/rv64-spill-zvlsseg.ll @@ -46,6 +46,7 @@ define @spill_zvlsseg_nxv1i32(i32* %base, i64 %vl) nounwind { ; SPILL-O2-NEXT: vl1r.v v7, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl1r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 1 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -101,6 +102,7 @@ define @spill_zvlsseg_nxv2i32(i32* %base, i64 %vl) nounwind { ; SPILL-O2-NEXT: vl1r.v v7, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl1r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 1 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -158,6 +160,7 @@ define @spill_zvlsseg_nxv4i32(i32* %base, i64 %vl) nounwind { ; SPILL-O2-NEXT: vl2r.v v6, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl2r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 2 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -215,6 +218,7 @@ define @spill_zvlsseg_nxv8i32(i32* %base, i64 %vl) nounwind { ; SPILL-O2-NEXT: vl4r.v v4, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl4r.v v8, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: slli a0, a0, 3 ; SPILL-O2-NEXT: add sp, sp, a0 @@ -277,6 +281,7 @@ define @spill_zvlsseg3_nxv4i32(i32* %base, i64 %vl) nounwind ; SPILL-O2-NEXT: vl2r.v v8, (a0) # Unknown-size Folded Reload ; SPILL-O2-NEXT: add a0, a0, a1 ; SPILL-O2-NEXT: vl2r.v v10, (a0) # Unknown-size Folded Reload +; SPILL-O2-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; SPILL-O2-NEXT: csrr a0, vlenb ; SPILL-O2-NEXT: li a1, 6 ; SPILL-O2-NEXT: mul a0, a0, a1 diff --git a/llvm/test/CodeGen/RISCV/rvv/vfptrunc-vp.ll b/llvm/test/CodeGen/RISCV/rvv/vfptrunc-vp.ll index 24ffabb00924e..0f4ea22a7822e 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vfptrunc-vp.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vfptrunc-vp.ll @@ -94,7 +94,14 @@ declare @llvm.vp.fptrunc.nxv16f64.nxv16f32( @vfptrunc_nxv16f32_nxv16f64( %a, %m, i32 zeroext %vl) { ; CHECK-LABEL: vfptrunc_nxv16f32_nxv16f64: ; CHECK: # %bb.0: +; CHECK-NEXT: addi sp, sp, -16 +; CHECK-NEXT: .cfi_def_cfa_offset 16 +; CHECK-NEXT: csrr a1, vlenb +; CHECK-NEXT: slli a1, a1, 3 +; CHECK-NEXT: sub sp, sp, a1 ; CHECK-NEXT: vmv1r.v v24, v0 +; CHECK-NEXT: addi a1, sp, 16 +; CHECK-NEXT: vs8r.v v8, (a1) # Unknown-size Folded Spill ; CHECK-NEXT: li a2, 0 ; CHECK-NEXT: csrr a1, vlenb ; CHECK-NEXT: srli a4, a1, 3 @@ -106,15 +113,20 @@ define @vfptrunc_nxv16f32_nxv16f64( ; CHECK-NEXT: mv a2, a3 ; CHECK-NEXT: .LBB7_2: ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu -; CHECK-NEXT: vfncvt.f.f.w v28, v16, v0.t +; CHECK-NEXT: vfncvt.f.f.w v12, v16, v0.t ; CHECK-NEXT: bltu a0, a1, .LBB7_4 ; CHECK-NEXT: # %bb.3: ; CHECK-NEXT: mv a0, a1 ; CHECK-NEXT: .LBB7_4: ; CHECK-NEXT: vsetvli zero, a0, e32, m4, ta, mu ; CHECK-NEXT: vmv1r.v v0, v24 -; CHECK-NEXT: vfncvt.f.f.w v24, v8, v0.t -; CHECK-NEXT: vmv8r.v v8, v24 +; CHECK-NEXT: addi a0, sp, 16 +; CHECK-NEXT: vl8re8.v v16, (a0) # Unknown-size Folded Reload +; CHECK-NEXT: vfncvt.f.f.w v8, v16, v0.t +; CHECK-NEXT: csrr a0, vlenb +; CHECK-NEXT: slli a0, a0, 3 +; CHECK-NEXT: add sp, sp, a0 +; CHECK-NEXT: addi sp, sp, 16 ; CHECK-NEXT: ret %v = call @llvm.vp.fptrunc.nxv16f64.nxv16f32( %a, %m, i32 %vl) ret %v diff --git a/llvm/test/CodeGen/RISCV/rvv/vloxseg-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vloxseg-rv32.ll index 749ea71a4665f..f9fcc4a0906b1 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vloxseg-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vloxseg-rv32.ll @@ -24,6 +24,7 @@ define @test_vloxseg2_mask_nxv16i16_nxv16i16(,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -53,6 +54,7 @@ define @test_vloxseg2_mask_nxv16i16_nxv16i8(,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -82,6 +84,7 @@ define @test_vloxseg2_mask_nxv16i16_nxv16i32(,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -111,6 +114,7 @@ define @test_vloxseg2_mask_nxv1i8_nxv1i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i8.nxv1i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -140,6 +144,7 @@ define @test_vloxseg2_mask_nxv1i8_nxv1i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i8.nxv1i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -169,6 +174,7 @@ define @test_vloxseg2_mask_nxv1i8_nxv1i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i8.nxv1i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -195,11 +201,12 @@ entry: define @test_vloxseg3_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i8.nxv1i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -226,11 +233,12 @@ entry: define @test_vloxseg3_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i8.nxv1i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -257,11 +265,12 @@ entry: define @test_vloxseg3_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i8.nxv1i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -289,9 +298,9 @@ define @test_vloxseg4_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -322,9 +331,9 @@ define @test_vloxseg4_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -355,9 +364,9 @@ define @test_vloxseg4_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -388,10 +397,10 @@ define @test_vloxseg5_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -422,10 +431,10 @@ define @test_vloxseg5_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -456,10 +465,10 @@ define @test_vloxseg5_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -490,11 +499,11 @@ define @test_vloxseg6_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -525,11 +534,11 @@ define @test_vloxseg6_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -560,11 +569,11 @@ define @test_vloxseg6_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -595,12 +604,12 @@ define @test_vloxseg7_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -631,12 +640,12 @@ define @test_vloxseg7_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -667,12 +676,12 @@ define @test_vloxseg7_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -703,13 +712,13 @@ define @test_vloxseg8_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -740,13 +749,13 @@ define @test_vloxseg8_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -777,13 +786,13 @@ define @test_vloxseg8_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -816,6 +825,7 @@ define @test_vloxseg2_mask_nxv16i8_nxv16i16(,} @llvm.riscv.vloxseg2.mask.nxv16i8.nxv16i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -845,6 +855,7 @@ define @test_vloxseg2_mask_nxv16i8_nxv16i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv16i8.nxv16i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -874,6 +885,7 @@ define @test_vloxseg2_mask_nxv16i8_nxv16i32(,} @llvm.riscv.vloxseg2.mask.nxv16i8.nxv16i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -901,9 +913,10 @@ define @test_vloxseg3_mask_nxv16i8_nxv16i16(,,} @llvm.riscv.vloxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -930,11 +943,12 @@ entry: define @test_vloxseg3_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv16i8.nxv16i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -962,9 +976,10 @@ define @test_vloxseg3_mask_nxv16i8_nxv16i32(,,} @llvm.riscv.vloxseg3.mask.nxv16i8.nxv16i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -991,12 +1006,13 @@ entry: define @test_vloxseg4_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vloxseg4ei16.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei16.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv16i8.nxv16i16( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -1024,9 +1040,9 @@ define @test_vloxseg4_mask_nxv16i8_nxv16i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -1057,10 +1073,11 @@ define @test_vloxseg4_mask_nxv16i8_nxv16i32(,,,} @llvm.riscv.vloxseg4.mask.nxv16i8.nxv16i32( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -1090,6 +1107,7 @@ define @test_vloxseg2_mask_nxv2i32_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i32.nxv2i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1119,6 +1137,7 @@ define @test_vloxseg2_mask_nxv2i32_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i32.nxv2i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1148,6 +1167,7 @@ define @test_vloxseg2_mask_nxv2i32_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i32.nxv2i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1174,11 +1194,12 @@ entry: define @test_vloxseg3_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i32.nxv2i32( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1205,11 +1226,12 @@ entry: define @test_vloxseg3_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i32.nxv2i8( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1236,11 +1258,12 @@ entry: define @test_vloxseg3_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i32.nxv2i16( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1268,9 +1291,9 @@ define @test_vloxseg4_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1301,9 +1324,9 @@ define @test_vloxseg4_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1334,9 +1357,9 @@ define @test_vloxseg4_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1367,10 +1390,10 @@ define @test_vloxseg5_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1401,10 +1424,10 @@ define @test_vloxseg5_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1435,10 +1458,10 @@ define @test_vloxseg5_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1469,11 +1492,11 @@ define @test_vloxseg6_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1504,11 +1527,11 @@ define @test_vloxseg6_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1539,11 +1562,11 @@ define @test_vloxseg6_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1574,12 +1597,12 @@ define @test_vloxseg7_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1610,12 +1633,12 @@ define @test_vloxseg7_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1646,12 +1669,12 @@ define @test_vloxseg7_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1682,13 +1705,13 @@ define @test_vloxseg8_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1719,13 +1742,13 @@ define @test_vloxseg8_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1756,13 +1779,13 @@ define @test_vloxseg8_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1795,6 +1818,7 @@ define @test_vloxseg2_mask_nxv4i16_nxv4i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i16.nxv4i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1824,6 +1848,7 @@ define @test_vloxseg2_mask_nxv4i16_nxv4i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i16.nxv4i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1853,6 +1878,7 @@ define @test_vloxseg2_mask_nxv4i16_nxv4i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i16.nxv4i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1879,11 +1905,12 @@ entry: define @test_vloxseg3_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i16.nxv4i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1910,11 +1937,12 @@ entry: define @test_vloxseg3_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i16.nxv4i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1942,9 +1970,10 @@ define @test_vloxseg3_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1972,9 +2001,9 @@ define @test_vloxseg4_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2005,9 +2034,9 @@ define @test_vloxseg4_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2037,12 +2066,13 @@ entry: define @test_vloxseg4_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4i16.nxv4i32( %val, %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -2070,10 +2100,10 @@ define @test_vloxseg5_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2104,10 +2134,10 @@ define @test_vloxseg5_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2138,10 +2168,10 @@ define @test_vloxseg5_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2172,11 +2202,11 @@ define @test_vloxseg6_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2207,11 +2237,11 @@ define @test_vloxseg6_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2242,11 +2272,11 @@ define @test_vloxseg6_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2277,12 +2307,12 @@ define @test_vloxseg7_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2313,12 +2343,12 @@ define @test_vloxseg7_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2349,12 +2379,12 @@ define @test_vloxseg7_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2385,13 +2415,13 @@ define @test_vloxseg8_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2422,13 +2452,13 @@ define @test_vloxseg8_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2459,13 +2489,13 @@ define @test_vloxseg8_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2498,6 +2528,7 @@ define @test_vloxseg2_mask_nxv1i32_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i32.nxv1i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2527,6 +2558,7 @@ define @test_vloxseg2_mask_nxv1i32_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i32.nxv1i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2556,6 +2588,7 @@ define @test_vloxseg2_mask_nxv1i32_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i32.nxv1i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2582,11 +2615,12 @@ entry: define @test_vloxseg3_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i32.nxv1i8( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2613,11 +2647,12 @@ entry: define @test_vloxseg3_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i32.nxv1i32( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2644,11 +2679,12 @@ entry: define @test_vloxseg3_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i32.nxv1i16( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2676,9 +2712,9 @@ define @test_vloxseg4_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2709,9 +2745,9 @@ define @test_vloxseg4_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2742,9 +2778,9 @@ define @test_vloxseg4_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2775,10 +2811,10 @@ define @test_vloxseg5_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2809,10 +2845,10 @@ define @test_vloxseg5_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2843,10 +2879,10 @@ define @test_vloxseg5_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2877,11 +2913,11 @@ define @test_vloxseg6_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2912,11 +2948,11 @@ define @test_vloxseg6_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2947,11 +2983,11 @@ define @test_vloxseg6_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2982,12 +3018,12 @@ define @test_vloxseg7_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3018,12 +3054,12 @@ define @test_vloxseg7_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3054,12 +3090,12 @@ define @test_vloxseg7_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3090,13 +3126,13 @@ define @test_vloxseg8_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3127,13 +3163,13 @@ define @test_vloxseg8_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3164,13 +3200,13 @@ define @test_vloxseg8_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3203,6 +3239,7 @@ define @test_vloxseg2_mask_nxv8i16_nxv8i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i16.nxv8i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3232,6 +3269,7 @@ define @test_vloxseg2_mask_nxv8i16_nxv8i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i16.nxv8i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3261,6 +3299,7 @@ define @test_vloxseg2_mask_nxv8i16_nxv8i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i16.nxv8i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3287,11 +3326,12 @@ entry: define @test_vloxseg3_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i16.nxv8i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3318,11 +3358,12 @@ entry: define @test_vloxseg3_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i16.nxv8i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3350,9 +3391,10 @@ define @test_vloxseg3_mask_nxv8i16_nxv8i32( ; CHECK-LABEL: test_vloxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3380,9 +3422,9 @@ define @test_vloxseg4_mask_nxv8i16_nxv8i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -3413,9 +3455,9 @@ define @test_vloxseg4_mask_nxv8i16_nxv8i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -3445,12 +3487,13 @@ entry: define @test_vloxseg4_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i16.nxv8i32( %val, %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3480,6 +3523,7 @@ define @test_vloxseg2_mask_nxv8i8_nxv8i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i8.nxv8i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3509,6 +3553,7 @@ define @test_vloxseg2_mask_nxv8i8_nxv8i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i8.nxv8i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3538,6 +3583,7 @@ define @test_vloxseg2_mask_nxv8i8_nxv8i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i8.nxv8i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3565,9 +3611,10 @@ define @test_vloxseg3_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3594,11 +3641,12 @@ entry: define @test_vloxseg3_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i8.nxv8i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3626,9 +3674,10 @@ define @test_vloxseg3_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i8.nxv8i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3655,12 +3704,13 @@ entry: define @test_vloxseg4_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vloxseg4ei16.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i8.nxv8i16( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3688,9 +3738,9 @@ define @test_vloxseg4_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3721,10 +3771,11 @@ define @test_vloxseg4_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3752,10 +3803,10 @@ define @test_vloxseg5_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3786,10 +3837,10 @@ define @test_vloxseg5_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3820,11 +3871,12 @@ define @test_vloxseg5_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vloxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3852,11 +3904,11 @@ define @test_vloxseg6_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3887,11 +3939,11 @@ define @test_vloxseg6_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3921,14 +3973,15 @@ entry: define @test_vloxseg6_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vloxseg6ei32.v v7, (a0), v16, v0.t +; CHECK-NEXT: vloxseg6ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vloxseg6.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3956,12 +4009,12 @@ define @test_vloxseg7_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3992,12 +4045,12 @@ define @test_vloxseg7_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4028,12 +4081,12 @@ define @test_vloxseg7_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -4064,13 +4117,13 @@ define @test_vloxseg8_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4101,13 +4154,13 @@ define @test_vloxseg8_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4138,13 +4191,13 @@ define @test_vloxseg8_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -4177,6 +4230,7 @@ define @test_vloxseg2_mask_nxv8i32_nxv8i16( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i32.nxv8i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -4206,6 +4260,7 @@ define @test_vloxseg2_mask_nxv8i32_nxv8i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i32.nxv8i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -4235,6 +4290,7 @@ define @test_vloxseg2_mask_nxv8i32_nxv8i32( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i32.nxv8i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -4264,6 +4320,7 @@ define @test_vloxseg2_mask_nxv4i8_nxv4i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i8.nxv4i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4293,6 +4350,7 @@ define @test_vloxseg2_mask_nxv4i8_nxv4i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i8.nxv4i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4322,6 +4380,7 @@ define @test_vloxseg2_mask_nxv4i8_nxv4i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i8.nxv4i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4348,11 +4407,12 @@ entry: define @test_vloxseg3_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i8.nxv4i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4379,11 +4439,12 @@ entry: define @test_vloxseg3_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i8.nxv4i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4411,9 +4472,10 @@ define @test_vloxseg3_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4441,9 +4503,9 @@ define @test_vloxseg4_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4474,9 +4536,9 @@ define @test_vloxseg4_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4506,12 +4568,13 @@ entry: define @test_vloxseg4_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4i8.nxv4i32( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4539,10 +4602,10 @@ define @test_vloxseg5_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4573,10 +4636,10 @@ define @test_vloxseg5_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4607,10 +4670,10 @@ define @test_vloxseg5_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4641,11 +4704,11 @@ define @test_vloxseg6_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4676,11 +4739,11 @@ define @test_vloxseg6_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4711,11 +4774,11 @@ define @test_vloxseg6_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4746,12 +4809,12 @@ define @test_vloxseg7_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4782,12 +4845,12 @@ define @test_vloxseg7_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4818,12 +4881,12 @@ define @test_vloxseg7_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4854,13 +4917,13 @@ define @test_vloxseg8_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4891,13 +4954,13 @@ define @test_vloxseg8_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4928,13 +4991,13 @@ define @test_vloxseg8_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4967,6 +5030,7 @@ define @test_vloxseg2_mask_nxv1i16_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i16.nxv1i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -4996,6 +5060,7 @@ define @test_vloxseg2_mask_nxv1i16_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i16.nxv1i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5025,6 +5090,7 @@ define @test_vloxseg2_mask_nxv1i16_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i16.nxv1i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5051,11 +5117,12 @@ entry: define @test_vloxseg3_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i16.nxv1i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5082,11 +5149,12 @@ entry: define @test_vloxseg3_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i16.nxv1i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5113,11 +5181,12 @@ entry: define @test_vloxseg3_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i16.nxv1i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5145,9 +5214,9 @@ define @test_vloxseg4_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5178,9 +5247,9 @@ define @test_vloxseg4_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5211,9 +5280,9 @@ define @test_vloxseg4_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5244,10 +5313,10 @@ define @test_vloxseg5_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5278,10 +5347,10 @@ define @test_vloxseg5_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5312,10 +5381,10 @@ define @test_vloxseg5_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5346,11 +5415,11 @@ define @test_vloxseg6_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5381,11 +5450,11 @@ define @test_vloxseg6_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5416,11 +5485,11 @@ define @test_vloxseg6_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5451,12 +5520,12 @@ define @test_vloxseg7_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5487,12 +5556,12 @@ define @test_vloxseg7_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5523,12 +5592,12 @@ define @test_vloxseg7_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5559,13 +5628,13 @@ define @test_vloxseg8_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5596,13 +5665,13 @@ define @test_vloxseg8_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5633,13 +5702,13 @@ define @test_vloxseg8_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5672,6 +5741,7 @@ define @test_vloxseg2_mask_nxv32i8_nxv32i16(,} @llvm.riscv.vloxseg2.mask.nxv32i8.nxv32i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5701,6 +5771,7 @@ define @test_vloxseg2_mask_nxv32i8_nxv32i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv32i8.nxv32i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5730,6 +5801,7 @@ define @test_vloxseg2_mask_nxv2i8_nxv2i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i8.nxv2i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5759,6 +5831,7 @@ define @test_vloxseg2_mask_nxv2i8_nxv2i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i8.nxv2i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5788,6 +5861,7 @@ define @test_vloxseg2_mask_nxv2i8_nxv2i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i8.nxv2i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5814,11 +5888,12 @@ entry: define @test_vloxseg3_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i8.nxv2i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5845,11 +5920,12 @@ entry: define @test_vloxseg3_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i8.nxv2i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5876,11 +5952,12 @@ entry: define @test_vloxseg3_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i8.nxv2i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5908,9 +5985,9 @@ define @test_vloxseg4_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5941,9 +6018,9 @@ define @test_vloxseg4_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5974,9 +6051,9 @@ define @test_vloxseg4_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6007,10 +6084,10 @@ define @test_vloxseg5_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6041,10 +6118,10 @@ define @test_vloxseg5_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6075,10 +6152,10 @@ define @test_vloxseg5_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6109,11 +6186,11 @@ define @test_vloxseg6_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6144,11 +6221,11 @@ define @test_vloxseg6_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6179,11 +6256,11 @@ define @test_vloxseg6_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6214,12 +6291,12 @@ define @test_vloxseg7_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6250,12 +6327,12 @@ define @test_vloxseg7_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6286,12 +6363,12 @@ define @test_vloxseg7_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6322,13 +6399,13 @@ define @test_vloxseg8_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6359,13 +6436,13 @@ define @test_vloxseg8_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6396,13 +6473,13 @@ define @test_vloxseg8_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6435,6 +6512,7 @@ define @test_vloxseg2_mask_nxv2i16_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i16.nxv2i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6464,6 +6542,7 @@ define @test_vloxseg2_mask_nxv2i16_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i16.nxv2i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6493,6 +6572,7 @@ define @test_vloxseg2_mask_nxv2i16_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i16.nxv2i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6519,11 +6599,12 @@ entry: define @test_vloxseg3_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i16.nxv2i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6550,11 +6631,12 @@ entry: define @test_vloxseg3_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i16.nxv2i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6581,11 +6663,12 @@ entry: define @test_vloxseg3_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i16.nxv2i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6613,9 +6696,9 @@ define @test_vloxseg4_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6646,9 +6729,9 @@ define @test_vloxseg4_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6679,9 +6762,9 @@ define @test_vloxseg4_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6712,10 +6795,10 @@ define @test_vloxseg5_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6746,10 +6829,10 @@ define @test_vloxseg5_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6780,10 +6863,10 @@ define @test_vloxseg5_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6814,11 +6897,11 @@ define @test_vloxseg6_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6849,11 +6932,11 @@ define @test_vloxseg6_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6884,11 +6967,11 @@ define @test_vloxseg6_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6919,12 +7002,12 @@ define @test_vloxseg7_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6955,12 +7038,12 @@ define @test_vloxseg7_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6991,12 +7074,12 @@ define @test_vloxseg7_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7027,13 +7110,13 @@ define @test_vloxseg8_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7064,13 +7147,13 @@ define @test_vloxseg8_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7101,13 +7184,13 @@ define @test_vloxseg8_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7140,6 +7223,7 @@ define @test_vloxseg2_mask_nxv4i32_nxv4i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i32.nxv4i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7169,6 +7253,7 @@ define @test_vloxseg2_mask_nxv4i32_nxv4i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i32.nxv4i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7198,6 +7283,7 @@ define @test_vloxseg2_mask_nxv4i32_nxv4i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i32.nxv4i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7224,11 +7310,12 @@ entry: define @test_vloxseg3_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i32.nxv4i16( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7255,11 +7342,12 @@ entry: define @test_vloxseg3_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i32.nxv4i8( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7286,11 +7374,12 @@ entry: define @test_vloxseg3_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i32.nxv4i32( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7318,9 +7407,9 @@ define @test_vloxseg4_mask_nxv4i32_nxv4i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -7351,9 +7440,9 @@ define @test_vloxseg4_mask_nxv4i32_nxv4i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -7384,9 +7473,9 @@ define @test_vloxseg4_mask_nxv4i32_nxv4i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -7419,6 +7508,7 @@ define @test_vloxseg2_mask_nxv16f16_nxv16i16(,} @llvm.riscv.vloxseg2.mask.nxv16f16.nxv16i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -7448,6 +7538,7 @@ define @test_vloxseg2_mask_nxv16f16_nxv16i8(,} @llvm.riscv.vloxseg2.mask.nxv16f16.nxv16i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -7477,6 +7568,7 @@ define @test_vloxseg2_mask_nxv16f16_nxv16i32(,} @llvm.riscv.vloxseg2.mask.nxv16f16.nxv16i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -7506,6 +7598,7 @@ define @test_vloxseg2_mask_nxv4f64_nxv4i16(,} @llvm.riscv.vloxseg2.mask.nxv4f64.nxv4i16( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7535,6 +7628,7 @@ define @test_vloxseg2_mask_nxv4f64_nxv4i8(,} @llvm.riscv.vloxseg2.mask.nxv4f64.nxv4i8( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7564,6 +7658,7 @@ define @test_vloxseg2_mask_nxv4f64_nxv4i32(,} @llvm.riscv.vloxseg2.mask.nxv4f64.nxv4i32( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7593,6 +7688,7 @@ define @test_vloxseg2_mask_nxv1f64_nxv1i8(,} @llvm.riscv.vloxseg2.mask.nxv1f64.nxv1i8( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7622,6 +7718,7 @@ define @test_vloxseg2_mask_nxv1f64_nxv1i32(,} @llvm.riscv.vloxseg2.mask.nxv1f64.nxv1i32( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7651,6 +7748,7 @@ define @test_vloxseg2_mask_nxv1f64_nxv1i16(,} @llvm.riscv.vloxseg2.mask.nxv1f64.nxv1i16( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7677,11 +7775,12 @@ entry: define @test_vloxseg3_mask_nxv1f64_nxv1i8( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f64.nxv1i8( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7708,11 +7807,12 @@ entry: define @test_vloxseg3_mask_nxv1f64_nxv1i32( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f64.nxv1i32( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7739,11 +7839,12 @@ entry: define @test_vloxseg3_mask_nxv1f64_nxv1i16( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f64.nxv1i16( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7771,9 +7872,9 @@ define @test_vloxseg4_mask_nxv1f64_nxv1i8( @test_vloxseg4_mask_nxv1f64_nxv1i32( @test_vloxseg4_mask_nxv1f64_nxv1i16( @test_vloxseg5_mask_nxv1f64_nxv1i8( @test_vloxseg5_mask_nxv1f64_nxv1i32( @test_vloxseg5_mask_nxv1f64_nxv1i16( @test_vloxseg6_mask_nxv1f64_nxv1i8( @test_vloxseg6_mask_nxv1f64_nxv1i32( @test_vloxseg6_mask_nxv1f64_nxv1i16( @test_vloxseg7_mask_nxv1f64_nxv1i8( @test_vloxseg7_mask_nxv1f64_nxv1i32( @test_vloxseg7_mask_nxv1f64_nxv1i16( @test_vloxseg8_mask_nxv1f64_nxv1i8( @test_vloxseg8_mask_nxv1f64_nxv1i32( @test_vloxseg8_mask_nxv1f64_nxv1i16( @test_vloxseg2_mask_nxv2f32_nxv2i32(,} @llvm.riscv.vloxseg2.mask.nxv2f32.nxv2i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8327,6 +8429,7 @@ define @test_vloxseg2_mask_nxv2f32_nxv2i8(,} @llvm.riscv.vloxseg2.mask.nxv2f32.nxv2i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8356,6 +8459,7 @@ define @test_vloxseg2_mask_nxv2f32_nxv2i16(,} @llvm.riscv.vloxseg2.mask.nxv2f32.nxv2i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8382,11 +8486,12 @@ entry: define @test_vloxseg3_mask_nxv2f32_nxv2i32( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f32.nxv2i32( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8413,11 +8518,12 @@ entry: define @test_vloxseg3_mask_nxv2f32_nxv2i8( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f32.nxv2i8( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8444,11 +8550,12 @@ entry: define @test_vloxseg3_mask_nxv2f32_nxv2i16( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f32.nxv2i16( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8476,9 +8583,9 @@ define @test_vloxseg4_mask_nxv2f32_nxv2i32( @test_vloxseg4_mask_nxv2f32_nxv2i8( @test_vloxseg4_mask_nxv2f32_nxv2i16( @test_vloxseg5_mask_nxv2f32_nxv2i32( @test_vloxseg5_mask_nxv2f32_nxv2i8( @test_vloxseg5_mask_nxv2f32_nxv2i16( @test_vloxseg6_mask_nxv2f32_nxv2i32( @test_vloxseg6_mask_nxv2f32_nxv2i8( @test_vloxseg6_mask_nxv2f32_nxv2i16( @test_vloxseg7_mask_nxv2f32_nxv2i32( @test_vloxseg7_mask_nxv2f32_nxv2i8( @test_vloxseg7_mask_nxv2f32_nxv2i16( @test_vloxseg8_mask_nxv2f32_nxv2i32( @test_vloxseg8_mask_nxv2f32_nxv2i8( @test_vloxseg8_mask_nxv2f32_nxv2i16( @test_vloxseg2_mask_nxv1f16_nxv1i8(,} @llvm.riscv.vloxseg2.mask.nxv1f16.nxv1i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9032,6 +9140,7 @@ define @test_vloxseg2_mask_nxv1f16_nxv1i32(,} @llvm.riscv.vloxseg2.mask.nxv1f16.nxv1i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9061,6 +9170,7 @@ define @test_vloxseg2_mask_nxv1f16_nxv1i16(,} @llvm.riscv.vloxseg2.mask.nxv1f16.nxv1i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9087,11 +9197,12 @@ entry: define @test_vloxseg3_mask_nxv1f16_nxv1i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f16.nxv1i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9118,11 +9229,12 @@ entry: define @test_vloxseg3_mask_nxv1f16_nxv1i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f16.nxv1i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9149,11 +9261,12 @@ entry: define @test_vloxseg3_mask_nxv1f16_nxv1i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f16.nxv1i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9181,9 +9294,9 @@ define @test_vloxseg4_mask_nxv1f16_nxv1i8( @test_vloxseg4_mask_nxv1f16_nxv1i32( @test_vloxseg4_mask_nxv1f16_nxv1i16( @test_vloxseg5_mask_nxv1f16_nxv1i8( @test_vloxseg5_mask_nxv1f16_nxv1i32( @test_vloxseg5_mask_nxv1f16_nxv1i16( @test_vloxseg6_mask_nxv1f16_nxv1i8( @test_vloxseg6_mask_nxv1f16_nxv1i32( @test_vloxseg6_mask_nxv1f16_nxv1i16( @test_vloxseg7_mask_nxv1f16_nxv1i8( @test_vloxseg7_mask_nxv1f16_nxv1i32( @test_vloxseg7_mask_nxv1f16_nxv1i16( @test_vloxseg8_mask_nxv1f16_nxv1i8( @test_vloxseg8_mask_nxv1f16_nxv1i32( @test_vloxseg8_mask_nxv1f16_nxv1i16( @test_vloxseg2_mask_nxv1f32_nxv1i8(,} @llvm.riscv.vloxseg2.mask.nxv1f32.nxv1i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9737,6 +9851,7 @@ define @test_vloxseg2_mask_nxv1f32_nxv1i32(,} @llvm.riscv.vloxseg2.mask.nxv1f32.nxv1i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9766,6 +9881,7 @@ define @test_vloxseg2_mask_nxv1f32_nxv1i16(,} @llvm.riscv.vloxseg2.mask.nxv1f32.nxv1i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9792,11 +9908,12 @@ entry: define @test_vloxseg3_mask_nxv1f32_nxv1i8( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f32.nxv1i8( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9823,11 +9940,12 @@ entry: define @test_vloxseg3_mask_nxv1f32_nxv1i32( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f32.nxv1i32( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9854,11 +9972,12 @@ entry: define @test_vloxseg3_mask_nxv1f32_nxv1i16( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f32.nxv1i16( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9886,9 +10005,9 @@ define @test_vloxseg4_mask_nxv1f32_nxv1i8( @test_vloxseg4_mask_nxv1f32_nxv1i32( @test_vloxseg4_mask_nxv1f32_nxv1i16( @test_vloxseg5_mask_nxv1f32_nxv1i8( @test_vloxseg5_mask_nxv1f32_nxv1i32( @test_vloxseg5_mask_nxv1f32_nxv1i16( @test_vloxseg6_mask_nxv1f32_nxv1i8( @test_vloxseg6_mask_nxv1f32_nxv1i32( @test_vloxseg6_mask_nxv1f32_nxv1i16( @test_vloxseg7_mask_nxv1f32_nxv1i8( @test_vloxseg7_mask_nxv1f32_nxv1i32( @test_vloxseg7_mask_nxv1f32_nxv1i16( @test_vloxseg8_mask_nxv1f32_nxv1i8( @test_vloxseg8_mask_nxv1f32_nxv1i32( @test_vloxseg8_mask_nxv1f32_nxv1i16( @test_vloxseg2_mask_nxv8f16_nxv8i16(,} @llvm.riscv.vloxseg2.mask.nxv8f16.nxv8i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10442,6 +10562,7 @@ define @test_vloxseg2_mask_nxv8f16_nxv8i8(,} @llvm.riscv.vloxseg2.mask.nxv8f16.nxv8i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10471,6 +10592,7 @@ define @test_vloxseg2_mask_nxv8f16_nxv8i32(,} @llvm.riscv.vloxseg2.mask.nxv8f16.nxv8i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10497,11 +10619,12 @@ entry: define @test_vloxseg3_mask_nxv8f16_nxv8i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8f16.nxv8i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10528,11 +10651,12 @@ entry: define @test_vloxseg3_mask_nxv8f16_nxv8i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8f16.nxv8i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10560,9 +10684,10 @@ define @test_vloxseg3_mask_nxv8f16_nxv8i32(,,} @llvm.riscv.vloxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10590,9 +10715,9 @@ define @test_vloxseg4_mask_nxv8f16_nxv8i16( @test_vloxseg4_mask_nxv8f16_nxv8i8( @test_vloxseg4_mask_nxv8f16_nxv8i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8f16.nxv8i32( %val, %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10690,6 +10816,7 @@ define @test_vloxseg2_mask_nxv8f32_nxv8i16(,} @llvm.riscv.vloxseg2.mask.nxv8f32.nxv8i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -10719,6 +10846,7 @@ define @test_vloxseg2_mask_nxv8f32_nxv8i8(,} @llvm.riscv.vloxseg2.mask.nxv8f32.nxv8i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -10748,6 +10876,7 @@ define @test_vloxseg2_mask_nxv8f32_nxv8i32(,} @llvm.riscv.vloxseg2.mask.nxv8f32.nxv8i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -10777,6 +10906,7 @@ define @test_vloxseg2_mask_nxv2f64_nxv2i32(,} @llvm.riscv.vloxseg2.mask.nxv2f64.nxv2i32( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10806,6 +10936,7 @@ define @test_vloxseg2_mask_nxv2f64_nxv2i8(,} @llvm.riscv.vloxseg2.mask.nxv2f64.nxv2i8( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10835,6 +10966,7 @@ define @test_vloxseg2_mask_nxv2f64_nxv2i16(,} @llvm.riscv.vloxseg2.mask.nxv2f64.nxv2i16( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10861,11 +10993,12 @@ entry: define @test_vloxseg3_mask_nxv2f64_nxv2i32( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f64.nxv2i32( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10892,11 +11025,12 @@ entry: define @test_vloxseg3_mask_nxv2f64_nxv2i8( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f64.nxv2i8( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10923,11 +11057,12 @@ entry: define @test_vloxseg3_mask_nxv2f64_nxv2i16( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f64.nxv2i16( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10955,9 +11090,9 @@ define @test_vloxseg4_mask_nxv2f64_nxv2i32( @test_vloxseg4_mask_nxv2f64_nxv2i8( @test_vloxseg4_mask_nxv2f64_nxv2i16( @test_vloxseg2_mask_nxv4f16_nxv4i16(,} @llvm.riscv.vloxseg2.mask.nxv4f16.nxv4i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11085,6 +11221,7 @@ define @test_vloxseg2_mask_nxv4f16_nxv4i8(,} @llvm.riscv.vloxseg2.mask.nxv4f16.nxv4i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11114,6 +11251,7 @@ define @test_vloxseg2_mask_nxv4f16_nxv4i32(,} @llvm.riscv.vloxseg2.mask.nxv4f16.nxv4i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11140,11 +11278,12 @@ entry: define @test_vloxseg3_mask_nxv4f16_nxv4i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f16.nxv4i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11171,11 +11310,12 @@ entry: define @test_vloxseg3_mask_nxv4f16_nxv4i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f16.nxv4i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11203,9 +11343,10 @@ define @test_vloxseg3_mask_nxv4f16_nxv4i32(,,} @llvm.riscv.vloxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11233,9 +11374,9 @@ define @test_vloxseg4_mask_nxv4f16_nxv4i16( @test_vloxseg4_mask_nxv4f16_nxv4i8( @test_vloxseg4_mask_nxv4f16_nxv4i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4f16.nxv4i32( %val, %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11331,10 +11473,10 @@ define @test_vloxseg5_mask_nxv4f16_nxv4i16( @test_vloxseg5_mask_nxv4f16_nxv4i8( @test_vloxseg5_mask_nxv4f16_nxv4i32( @test_vloxseg6_mask_nxv4f16_nxv4i16( @test_vloxseg6_mask_nxv4f16_nxv4i8( @test_vloxseg6_mask_nxv4f16_nxv4i32( @test_vloxseg7_mask_nxv4f16_nxv4i16( @test_vloxseg7_mask_nxv4f16_nxv4i8( @test_vloxseg7_mask_nxv4f16_nxv4i32( @test_vloxseg8_mask_nxv4f16_nxv4i16( @test_vloxseg8_mask_nxv4f16_nxv4i8( @test_vloxseg8_mask_nxv4f16_nxv4i32( @test_vloxseg2_mask_nxv2f16_nxv2i32(,} @llvm.riscv.vloxseg2.mask.nxv2f16.nxv2i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11788,6 +11931,7 @@ define @test_vloxseg2_mask_nxv2f16_nxv2i8(,} @llvm.riscv.vloxseg2.mask.nxv2f16.nxv2i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11817,6 +11961,7 @@ define @test_vloxseg2_mask_nxv2f16_nxv2i16(,} @llvm.riscv.vloxseg2.mask.nxv2f16.nxv2i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11843,11 +11988,12 @@ entry: define @test_vloxseg3_mask_nxv2f16_nxv2i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f16.nxv2i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11874,11 +12020,12 @@ entry: define @test_vloxseg3_mask_nxv2f16_nxv2i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f16.nxv2i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11905,11 +12052,12 @@ entry: define @test_vloxseg3_mask_nxv2f16_nxv2i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f16.nxv2i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11937,9 +12085,9 @@ define @test_vloxseg4_mask_nxv2f16_nxv2i32( @test_vloxseg4_mask_nxv2f16_nxv2i8( @test_vloxseg4_mask_nxv2f16_nxv2i16( @test_vloxseg5_mask_nxv2f16_nxv2i32( @test_vloxseg5_mask_nxv2f16_nxv2i8( @test_vloxseg5_mask_nxv2f16_nxv2i16( @test_vloxseg6_mask_nxv2f16_nxv2i32( @test_vloxseg6_mask_nxv2f16_nxv2i8( @test_vloxseg6_mask_nxv2f16_nxv2i16( @test_vloxseg7_mask_nxv2f16_nxv2i32( @test_vloxseg7_mask_nxv2f16_nxv2i8( @test_vloxseg7_mask_nxv2f16_nxv2i16( @test_vloxseg8_mask_nxv2f16_nxv2i32( @test_vloxseg8_mask_nxv2f16_nxv2i8( @test_vloxseg8_mask_nxv2f16_nxv2i16( @test_vloxseg2_mask_nxv4f32_nxv4i16(,} @llvm.riscv.vloxseg2.mask.nxv4f32.nxv4i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12493,6 +12642,7 @@ define @test_vloxseg2_mask_nxv4f32_nxv4i8(,} @llvm.riscv.vloxseg2.mask.nxv4f32.nxv4i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12522,6 +12672,7 @@ define @test_vloxseg2_mask_nxv4f32_nxv4i32(,} @llvm.riscv.vloxseg2.mask.nxv4f32.nxv4i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12548,11 +12699,12 @@ entry: define @test_vloxseg3_mask_nxv4f32_nxv4i16( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f32.nxv4i16( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12579,11 +12731,12 @@ entry: define @test_vloxseg3_mask_nxv4f32_nxv4i8( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f32.nxv4i8( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12610,11 +12763,12 @@ entry: define @test_vloxseg3_mask_nxv4f32_nxv4i32( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f32.nxv4i32( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12642,9 +12796,9 @@ define @test_vloxseg4_mask_nxv4f32_nxv4i16( @test_vloxseg4_mask_nxv4f32_nxv4i8( @test_vloxseg4_mask_nxv4f32_nxv4i32( @test_vloxseg2_mask_nxv16i16_nxv16i16(,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -53,6 +54,7 @@ define @test_vloxseg2_mask_nxv16i16_nxv16i8(,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -82,6 +84,7 @@ define @test_vloxseg2_mask_nxv16i16_nxv16i32(,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -111,6 +114,7 @@ define @test_vloxseg2_mask_nxv4i32_nxv4i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i32.nxv4i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -140,6 +144,7 @@ define @test_vloxseg2_mask_nxv4i32_nxv4i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i32.nxv4i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -169,6 +174,7 @@ define @test_vloxseg2_mask_nxv4i32_nxv4i64( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i32.nxv4i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -198,6 +204,7 @@ define @test_vloxseg2_mask_nxv4i32_nxv4i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i32.nxv4i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -224,11 +231,12 @@ entry: define @test_vloxseg3_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i32.nxv4i32( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -255,11 +263,12 @@ entry: define @test_vloxseg3_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i32.nxv4i8( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -287,9 +296,10 @@ define @test_vloxseg3_mask_nxv4i32_nxv4i64( ; CHECK-LABEL: test_vloxseg3_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i32.nxv4i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -316,11 +326,12 @@ entry: define @test_vloxseg3_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i32.nxv4i16( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -348,9 +359,9 @@ define @test_vloxseg4_mask_nxv4i32_nxv4i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -381,9 +392,9 @@ define @test_vloxseg4_mask_nxv4i32_nxv4i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -413,12 +424,13 @@ entry: define @test_vloxseg4_mask_nxv4i32_nxv4i64( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg4ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4i32.nxv4i64( %val, %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -446,9 +458,9 @@ define @test_vloxseg4_mask_nxv4i32_nxv4i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -481,6 +493,7 @@ define @test_vloxseg2_mask_nxv16i8_nxv16i16(,} @llvm.riscv.vloxseg2.mask.nxv16i8.nxv16i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -510,6 +523,7 @@ define @test_vloxseg2_mask_nxv16i8_nxv16i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv16i8.nxv16i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -539,6 +553,7 @@ define @test_vloxseg2_mask_nxv16i8_nxv16i32(,} @llvm.riscv.vloxseg2.mask.nxv16i8.nxv16i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -566,9 +581,10 @@ define @test_vloxseg3_mask_nxv16i8_nxv16i16(,,} @llvm.riscv.vloxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -595,11 +611,12 @@ entry: define @test_vloxseg3_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv16i8.nxv16i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -627,9 +644,10 @@ define @test_vloxseg3_mask_nxv16i8_nxv16i32(,,} @llvm.riscv.vloxseg3.mask.nxv16i8.nxv16i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -656,12 +674,13 @@ entry: define @test_vloxseg4_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vloxseg4ei16.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei16.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv16i8.nxv16i16( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -689,9 +708,9 @@ define @test_vloxseg4_mask_nxv16i8_nxv16i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -722,10 +741,11 @@ define @test_vloxseg4_mask_nxv16i8_nxv16i32(,,,} @llvm.riscv.vloxseg4.mask.nxv16i8.nxv16i32( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -755,6 +775,7 @@ define @test_vloxseg2_mask_nxv1i64_nxv1i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i64.nxv1i64( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -784,6 +805,7 @@ define @test_vloxseg2_mask_nxv1i64_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i64.nxv1i32( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -813,6 +835,7 @@ define @test_vloxseg2_mask_nxv1i64_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i64.nxv1i16( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -842,6 +865,7 @@ define @test_vloxseg2_mask_nxv1i64_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i64.nxv1i8( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -868,11 +892,12 @@ entry: define @test_vloxseg3_mask_nxv1i64_nxv1i64( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i64.nxv1i64( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -899,11 +924,12 @@ entry: define @test_vloxseg3_mask_nxv1i64_nxv1i32( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i64.nxv1i32( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -930,11 +956,12 @@ entry: define @test_vloxseg3_mask_nxv1i64_nxv1i16( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i64.nxv1i16( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -961,11 +988,12 @@ entry: define @test_vloxseg3_mask_nxv1i64_nxv1i8( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i64.nxv1i8( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -993,9 +1021,9 @@ define @test_vloxseg4_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1026,9 +1054,9 @@ define @test_vloxseg4_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1059,9 +1087,9 @@ define @test_vloxseg4_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1092,9 +1120,9 @@ define @test_vloxseg4_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1125,10 +1153,10 @@ define @test_vloxseg5_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1159,10 +1187,10 @@ define @test_vloxseg5_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1193,10 +1221,10 @@ define @test_vloxseg5_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1227,10 +1255,10 @@ define @test_vloxseg5_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1261,11 +1289,11 @@ define @test_vloxseg6_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1296,11 +1324,11 @@ define @test_vloxseg6_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1331,11 +1359,11 @@ define @test_vloxseg6_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1366,11 +1394,11 @@ define @test_vloxseg6_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1401,12 +1429,12 @@ define @test_vloxseg7_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1437,12 +1465,12 @@ define @test_vloxseg7_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1473,12 +1501,12 @@ define @test_vloxseg7_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1509,12 +1537,12 @@ define @test_vloxseg7_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1545,13 +1573,13 @@ define @test_vloxseg8_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1582,13 +1610,13 @@ define @test_vloxseg8_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1619,13 +1647,13 @@ define @test_vloxseg8_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1656,13 +1684,13 @@ define @test_vloxseg8_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1695,6 +1723,7 @@ define @test_vloxseg2_mask_nxv1i32_nxv1i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i32.nxv1i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1724,6 +1753,7 @@ define @test_vloxseg2_mask_nxv1i32_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i32.nxv1i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1753,6 +1783,7 @@ define @test_vloxseg2_mask_nxv1i32_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i32.nxv1i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1782,6 +1813,7 @@ define @test_vloxseg2_mask_nxv1i32_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i32.nxv1i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1808,11 +1840,12 @@ entry: define @test_vloxseg3_mask_nxv1i32_nxv1i64( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i32.nxv1i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1839,11 +1872,12 @@ entry: define @test_vloxseg3_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i32.nxv1i32( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1870,11 +1904,12 @@ entry: define @test_vloxseg3_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i32.nxv1i16( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1901,11 +1936,12 @@ entry: define @test_vloxseg3_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i32.nxv1i8( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1933,9 +1969,9 @@ define @test_vloxseg4_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1966,9 +2002,9 @@ define @test_vloxseg4_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1999,9 +2035,9 @@ define @test_vloxseg4_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2032,9 +2068,9 @@ define @test_vloxseg4_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2065,10 +2101,10 @@ define @test_vloxseg5_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2099,10 +2135,10 @@ define @test_vloxseg5_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2133,10 +2169,10 @@ define @test_vloxseg5_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2167,10 +2203,10 @@ define @test_vloxseg5_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2201,11 +2237,11 @@ define @test_vloxseg6_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2236,11 +2272,11 @@ define @test_vloxseg6_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2271,11 +2307,11 @@ define @test_vloxseg6_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2306,11 +2342,11 @@ define @test_vloxseg6_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2341,12 +2377,12 @@ define @test_vloxseg7_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2377,12 +2413,12 @@ define @test_vloxseg7_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2413,12 +2449,12 @@ define @test_vloxseg7_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2449,12 +2485,12 @@ define @test_vloxseg7_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2485,13 +2521,13 @@ define @test_vloxseg8_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2522,13 +2558,13 @@ define @test_vloxseg8_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2559,13 +2595,13 @@ define @test_vloxseg8_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2596,13 +2632,13 @@ define @test_vloxseg8_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2635,6 +2671,7 @@ define @test_vloxseg2_mask_nxv8i16_nxv8i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i16.nxv8i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2664,6 +2701,7 @@ define @test_vloxseg2_mask_nxv8i16_nxv8i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i16.nxv8i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2693,6 +2731,7 @@ define @test_vloxseg2_mask_nxv8i16_nxv8i64( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i16.nxv8i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2722,6 +2761,7 @@ define @test_vloxseg2_mask_nxv8i16_nxv8i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i16.nxv8i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2748,11 +2788,12 @@ entry: define @test_vloxseg3_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i16.nxv8i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2779,11 +2820,12 @@ entry: define @test_vloxseg3_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i16.nxv8i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2811,9 +2853,10 @@ define @test_vloxseg3_mask_nxv8i16_nxv8i64( ; CHECK-LABEL: test_vloxseg3_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i16.nxv8i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2841,9 +2884,10 @@ define @test_vloxseg3_mask_nxv8i16_nxv8i32( ; CHECK-LABEL: test_vloxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2871,9 +2915,9 @@ define @test_vloxseg4_mask_nxv8i16_nxv8i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -2904,9 +2948,9 @@ define @test_vloxseg4_mask_nxv8i16_nxv8i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -2937,10 +2981,11 @@ define @test_vloxseg4_mask_nxv8i16_nxv8i64( ; CHECK-LABEL: test_vloxseg4_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i16.nxv8i64( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2967,12 +3012,13 @@ entry: define @test_vloxseg4_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i16.nxv8i32( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -3002,6 +3048,7 @@ define @test_vloxseg2_mask_nxv4i8_nxv4i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i8.nxv4i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3031,6 +3078,7 @@ define @test_vloxseg2_mask_nxv4i8_nxv4i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i8.nxv4i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3060,6 +3108,7 @@ define @test_vloxseg2_mask_nxv4i8_nxv4i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i8.nxv4i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3089,6 +3138,7 @@ define @test_vloxseg2_mask_nxv4i8_nxv4i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i8.nxv4i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3116,9 +3166,10 @@ define @test_vloxseg3_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3145,11 +3196,12 @@ entry: define @test_vloxseg3_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i8.nxv4i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3177,9 +3229,10 @@ define @test_vloxseg3_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i8.nxv4i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3206,11 +3259,12 @@ entry: define @test_vloxseg3_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i8.nxv4i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3237,12 +3291,13 @@ entry: define @test_vloxseg4_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4i8.nxv4i32( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3270,9 +3325,9 @@ define @test_vloxseg4_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3303,10 +3358,11 @@ define @test_vloxseg4_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3334,9 +3390,9 @@ define @test_vloxseg4_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3367,10 +3423,10 @@ define @test_vloxseg5_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3401,10 +3457,10 @@ define @test_vloxseg5_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3435,11 +3491,12 @@ define @test_vloxseg5_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vloxseg5.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3467,10 +3524,10 @@ define @test_vloxseg5_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3501,11 +3558,11 @@ define @test_vloxseg6_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3536,11 +3593,11 @@ define @test_vloxseg6_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3570,14 +3627,15 @@ entry: define @test_vloxseg6_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg6_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vloxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: vloxseg6ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vloxseg6.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3605,11 +3663,11 @@ define @test_vloxseg6_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3640,12 +3698,12 @@ define @test_vloxseg7_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3676,12 +3734,12 @@ define @test_vloxseg7_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3712,12 +3770,12 @@ define @test_vloxseg7_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -3748,12 +3806,12 @@ define @test_vloxseg7_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3784,13 +3842,13 @@ define @test_vloxseg8_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3821,13 +3879,13 @@ define @test_vloxseg8_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3858,13 +3916,13 @@ define @test_vloxseg8_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -3895,13 +3953,13 @@ define @test_vloxseg8_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3934,6 +3992,7 @@ define @test_vloxseg2_mask_nxv1i16_nxv1i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i16.nxv1i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -3963,6 +4022,7 @@ define @test_vloxseg2_mask_nxv1i16_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i16.nxv1i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -3992,6 +4052,7 @@ define @test_vloxseg2_mask_nxv1i16_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i16.nxv1i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4021,6 +4082,7 @@ define @test_vloxseg2_mask_nxv1i16_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i16.nxv1i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4047,11 +4109,12 @@ entry: define @test_vloxseg3_mask_nxv1i16_nxv1i64( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i16.nxv1i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4078,11 +4141,12 @@ entry: define @test_vloxseg3_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i16.nxv1i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4109,11 +4173,12 @@ entry: define @test_vloxseg3_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i16.nxv1i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4140,11 +4205,12 @@ entry: define @test_vloxseg3_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i16.nxv1i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4172,9 +4238,9 @@ define @test_vloxseg4_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4205,9 +4271,9 @@ define @test_vloxseg4_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4238,9 +4304,9 @@ define @test_vloxseg4_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4271,9 +4337,9 @@ define @test_vloxseg4_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4304,10 +4370,10 @@ define @test_vloxseg5_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4338,10 +4404,10 @@ define @test_vloxseg5_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4372,10 +4438,10 @@ define @test_vloxseg5_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4406,10 +4472,10 @@ define @test_vloxseg5_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4440,11 +4506,11 @@ define @test_vloxseg6_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4475,11 +4541,11 @@ define @test_vloxseg6_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4510,11 +4576,11 @@ define @test_vloxseg6_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4545,11 +4611,11 @@ define @test_vloxseg6_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4580,12 +4646,12 @@ define @test_vloxseg7_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4616,12 +4682,12 @@ define @test_vloxseg7_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4652,12 +4718,12 @@ define @test_vloxseg7_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4688,12 +4754,12 @@ define @test_vloxseg7_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4724,13 +4790,13 @@ define @test_vloxseg8_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4761,13 +4827,13 @@ define @test_vloxseg8_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4798,13 +4864,13 @@ define @test_vloxseg8_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4835,13 +4901,13 @@ define @test_vloxseg8_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4874,6 +4940,7 @@ define @test_vloxseg2_mask_nxv2i32_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i32.nxv2i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4903,6 +4970,7 @@ define @test_vloxseg2_mask_nxv2i32_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i32.nxv2i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4932,6 +5000,7 @@ define @test_vloxseg2_mask_nxv2i32_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i32.nxv2i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4961,6 +5030,7 @@ define @test_vloxseg2_mask_nxv2i32_nxv2i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i32.nxv2i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4987,11 +5057,12 @@ entry: define @test_vloxseg3_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i32.nxv2i32( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5018,11 +5089,12 @@ entry: define @test_vloxseg3_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i32.nxv2i8( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5049,11 +5121,12 @@ entry: define @test_vloxseg3_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i32.nxv2i16( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5081,9 +5154,10 @@ define @test_vloxseg3_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vloxseg3_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i32.nxv2i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5111,9 +5185,9 @@ define @test_vloxseg4_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5144,9 +5218,9 @@ define @test_vloxseg4_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5177,9 +5251,9 @@ define @test_vloxseg4_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5209,12 +5283,13 @@ entry: define @test_vloxseg4_mask_nxv2i32_nxv2i64( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv2i32.nxv2i64( %val, %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5242,10 +5317,10 @@ define @test_vloxseg5_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5276,10 +5351,10 @@ define @test_vloxseg5_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5310,10 +5385,10 @@ define @test_vloxseg5_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5344,10 +5419,10 @@ define @test_vloxseg5_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5378,11 +5453,11 @@ define @test_vloxseg6_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5413,11 +5488,11 @@ define @test_vloxseg6_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5448,11 +5523,11 @@ define @test_vloxseg6_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5483,11 +5558,11 @@ define @test_vloxseg6_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5518,12 +5593,12 @@ define @test_vloxseg7_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5554,12 +5629,12 @@ define @test_vloxseg7_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5590,12 +5665,12 @@ define @test_vloxseg7_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5626,12 +5701,12 @@ define @test_vloxseg7_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5662,13 +5737,13 @@ define @test_vloxseg8_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5699,13 +5774,13 @@ define @test_vloxseg8_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5736,13 +5811,13 @@ define @test_vloxseg8_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5773,13 +5848,13 @@ define @test_vloxseg8_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5812,6 +5887,7 @@ define @test_vloxseg2_mask_nxv8i8_nxv8i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i8.nxv8i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5841,6 +5917,7 @@ define @test_vloxseg2_mask_nxv8i8_nxv8i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i8.nxv8i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5870,6 +5947,7 @@ define @test_vloxseg2_mask_nxv8i8_nxv8i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i8.nxv8i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5899,6 +5977,7 @@ define @test_vloxseg2_mask_nxv8i8_nxv8i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i8.nxv8i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5926,9 +6005,10 @@ define @test_vloxseg3_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5955,11 +6035,12 @@ entry: define @test_vloxseg3_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i8.nxv8i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5987,9 +6068,10 @@ define @test_vloxseg3_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i8.nxv8i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6017,9 +6099,10 @@ define @test_vloxseg3_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8i8.nxv8i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6046,12 +6129,13 @@ entry: define @test_vloxseg4_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vloxseg4ei16.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i8.nxv8i16( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6079,9 +6163,9 @@ define @test_vloxseg4_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6112,10 +6196,11 @@ define @test_vloxseg4_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6143,10 +6228,11 @@ define @test_vloxseg4_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6174,10 +6260,10 @@ define @test_vloxseg5_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6208,10 +6294,10 @@ define @test_vloxseg5_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6242,11 +6328,12 @@ define @test_vloxseg5_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vloxseg5.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6274,11 +6361,12 @@ define @test_vloxseg5_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vloxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6306,11 +6394,11 @@ define @test_vloxseg6_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6341,11 +6429,11 @@ define @test_vloxseg6_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6376,12 +6464,13 @@ define @test_vloxseg6_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vloxseg6.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6408,14 +6497,15 @@ entry: define @test_vloxseg6_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vloxseg6ei32.v v7, (a0), v16, v0.t +; CHECK-NEXT: vloxseg6ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vloxseg6.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6443,12 +6533,12 @@ define @test_vloxseg7_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6479,12 +6569,12 @@ define @test_vloxseg7_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6515,13 +6605,14 @@ define @test_vloxseg7_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vloxseg7.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6549,12 +6640,12 @@ define @test_vloxseg7_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -6585,13 +6676,13 @@ define @test_vloxseg8_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6622,13 +6713,13 @@ define @test_vloxseg8_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6659,14 +6750,15 @@ define @test_vloxseg8_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vloxseg8.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6694,13 +6786,13 @@ define @test_vloxseg8_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -6733,6 +6825,7 @@ define @test_vloxseg2_mask_nxv4i64_nxv4i32( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i64.nxv4i32( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6762,6 +6855,7 @@ define @test_vloxseg2_mask_nxv4i64_nxv4i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i64.nxv4i8( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6791,6 +6885,7 @@ define @test_vloxseg2_mask_nxv4i64_nxv4i64( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i64.nxv4i64( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6820,6 +6915,7 @@ define @test_vloxseg2_mask_nxv4i64_nxv4i16( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i64.nxv4i16( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6849,6 +6945,7 @@ define @test_vloxseg2_mask_nxv4i16_nxv4i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i16.nxv4i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6878,6 +6975,7 @@ define @test_vloxseg2_mask_nxv4i16_nxv4i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i16.nxv4i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6907,6 +7005,7 @@ define @test_vloxseg2_mask_nxv4i16_nxv4i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i16.nxv4i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6936,6 +7035,7 @@ define @test_vloxseg2_mask_nxv4i16_nxv4i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv4i16.nxv4i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6963,9 +7063,10 @@ define @test_vloxseg3_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6992,11 +7093,12 @@ entry: define @test_vloxseg3_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i16.nxv4i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7024,9 +7126,10 @@ define @test_vloxseg3_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vloxseg3_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i16.nxv4i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7053,11 +7156,12 @@ entry: define @test_vloxseg3_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4i16.nxv4i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7084,12 +7188,13 @@ entry: define @test_vloxseg4_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4i16.nxv4i32( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7117,9 +7222,9 @@ define @test_vloxseg4_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7150,10 +7255,11 @@ define @test_vloxseg4_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7181,9 +7287,9 @@ define @test_vloxseg4_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7214,10 +7320,10 @@ define @test_vloxseg5_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7248,10 +7354,10 @@ define @test_vloxseg5_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7282,11 +7388,12 @@ define @test_vloxseg5_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vloxseg5_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vloxseg5.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7314,10 +7421,10 @@ define @test_vloxseg5_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7348,11 +7455,11 @@ define @test_vloxseg6_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7383,11 +7490,11 @@ define @test_vloxseg6_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7417,14 +7524,15 @@ entry: define @test_vloxseg6_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg6_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: vloxseg6ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vloxseg6.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7452,11 +7560,11 @@ define @test_vloxseg6_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7487,12 +7595,12 @@ define @test_vloxseg7_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7523,12 +7631,12 @@ define @test_vloxseg7_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7559,12 +7667,12 @@ define @test_vloxseg7_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vloxseg7_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -7595,12 +7703,12 @@ define @test_vloxseg7_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7631,13 +7739,13 @@ define @test_vloxseg8_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7668,13 +7776,13 @@ define @test_vloxseg8_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7705,13 +7813,13 @@ define @test_vloxseg8_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vloxseg8_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -7742,13 +7850,13 @@ define @test_vloxseg8_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7781,6 +7889,7 @@ define @test_vloxseg2_mask_nxv1i8_nxv1i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i8.nxv1i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7810,6 +7919,7 @@ define @test_vloxseg2_mask_nxv1i8_nxv1i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i8.nxv1i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7839,6 +7949,7 @@ define @test_vloxseg2_mask_nxv1i8_nxv1i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i8.nxv1i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7868,6 +7979,7 @@ define @test_vloxseg2_mask_nxv1i8_nxv1i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv1i8.nxv1i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7894,11 +8006,12 @@ entry: define @test_vloxseg3_mask_nxv1i8_nxv1i64( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i8.nxv1i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7925,11 +8038,12 @@ entry: define @test_vloxseg3_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i8.nxv1i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7956,11 +8070,12 @@ entry: define @test_vloxseg3_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i8.nxv1i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7987,11 +8102,12 @@ entry: define @test_vloxseg3_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1i8.nxv1i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8019,9 +8135,9 @@ define @test_vloxseg4_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8052,9 +8168,9 @@ define @test_vloxseg4_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8085,9 +8201,9 @@ define @test_vloxseg4_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8118,9 +8234,9 @@ define @test_vloxseg4_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8151,10 +8267,10 @@ define @test_vloxseg5_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8185,10 +8301,10 @@ define @test_vloxseg5_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8219,10 +8335,10 @@ define @test_vloxseg5_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8253,10 +8369,10 @@ define @test_vloxseg5_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8287,11 +8403,11 @@ define @test_vloxseg6_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8322,11 +8438,11 @@ define @test_vloxseg6_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8357,11 +8473,11 @@ define @test_vloxseg6_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8392,11 +8508,11 @@ define @test_vloxseg6_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8427,12 +8543,12 @@ define @test_vloxseg7_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8463,12 +8579,12 @@ define @test_vloxseg7_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8499,12 +8615,12 @@ define @test_vloxseg7_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8535,12 +8651,12 @@ define @test_vloxseg7_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8571,13 +8687,13 @@ define @test_vloxseg8_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8608,13 +8724,13 @@ define @test_vloxseg8_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8645,13 +8761,13 @@ define @test_vloxseg8_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8682,13 +8798,13 @@ define @test_vloxseg8_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8721,6 +8837,7 @@ define @test_vloxseg2_mask_nxv2i8_nxv2i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i8.nxv2i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8750,6 +8867,7 @@ define @test_vloxseg2_mask_nxv2i8_nxv2i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i8.nxv2i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8779,6 +8897,7 @@ define @test_vloxseg2_mask_nxv2i8_nxv2i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i8.nxv2i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8808,6 +8927,7 @@ define @test_vloxseg2_mask_nxv2i8_nxv2i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i8.nxv2i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8834,11 +8954,12 @@ entry: define @test_vloxseg3_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i8.nxv2i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8865,11 +8986,12 @@ entry: define @test_vloxseg3_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i8.nxv2i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8896,11 +9018,12 @@ entry: define @test_vloxseg3_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i8.nxv2i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8928,9 +9051,10 @@ define @test_vloxseg3_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vloxseg3_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i8.nxv2i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8958,9 +9082,9 @@ define @test_vloxseg4_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8991,9 +9115,9 @@ define @test_vloxseg4_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9024,9 +9148,9 @@ define @test_vloxseg4_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9056,12 +9180,13 @@ entry: define @test_vloxseg4_mask_nxv2i8_nxv2i64( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv2i8.nxv2i64( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -9089,10 +9214,10 @@ define @test_vloxseg5_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9123,10 +9248,10 @@ define @test_vloxseg5_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9157,10 +9282,10 @@ define @test_vloxseg5_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9191,10 +9316,10 @@ define @test_vloxseg5_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vloxseg5_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9225,11 +9350,11 @@ define @test_vloxseg6_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9260,11 +9385,11 @@ define @test_vloxseg6_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9295,11 +9420,11 @@ define @test_vloxseg6_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9330,11 +9455,11 @@ define @test_vloxseg6_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vloxseg6_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9365,12 +9490,12 @@ define @test_vloxseg7_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9401,12 +9526,12 @@ define @test_vloxseg7_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9437,12 +9562,12 @@ define @test_vloxseg7_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9473,12 +9598,12 @@ define @test_vloxseg7_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vloxseg7_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9509,13 +9634,13 @@ define @test_vloxseg8_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9546,13 +9671,13 @@ define @test_vloxseg8_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vloxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9583,13 +9708,13 @@ define @test_vloxseg8_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9620,13 +9745,13 @@ define @test_vloxseg8_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vloxseg8_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9659,6 +9784,7 @@ define @test_vloxseg2_mask_nxv8i32_nxv8i16( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i32.nxv8i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9688,6 +9814,7 @@ define @test_vloxseg2_mask_nxv8i32_nxv8i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i32.nxv8i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9717,6 +9844,7 @@ define @test_vloxseg2_mask_nxv8i32_nxv8i64( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v4, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i32.nxv8i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9746,6 +9874,7 @@ define @test_vloxseg2_mask_nxv8i32_nxv8i32( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv8i32.nxv8i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9775,6 +9904,7 @@ define @test_vloxseg2_mask_nxv32i8_nxv32i16(,} @llvm.riscv.vloxseg2.mask.nxv32i8.nxv32i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -9804,6 +9934,7 @@ define @test_vloxseg2_mask_nxv32i8_nxv32i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv32i8.nxv32i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -9833,6 +9964,7 @@ define @test_vloxseg2_mask_nxv2i16_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i16.nxv2i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9862,6 +9994,7 @@ define @test_vloxseg2_mask_nxv2i16_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i16.nxv2i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9891,6 +10024,7 @@ define @test_vloxseg2_mask_nxv2i16_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i16.nxv2i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9920,6 +10054,7 @@ define @test_vloxseg2_mask_nxv2i16_nxv2i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i16.nxv2i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9946,11 +10081,12 @@ entry: define @test_vloxseg3_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i16.nxv2i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9977,11 +10113,12 @@ entry: define @test_vloxseg3_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i16.nxv2i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10008,11 +10145,12 @@ entry: define @test_vloxseg3_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i16.nxv2i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10040,9 +10178,10 @@ define @test_vloxseg3_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vloxseg3_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i16.nxv2i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10070,9 +10209,9 @@ define @test_vloxseg4_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10103,9 +10242,9 @@ define @test_vloxseg4_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10136,9 +10275,9 @@ define @test_vloxseg4_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10168,12 +10307,13 @@ entry: define @test_vloxseg4_mask_nxv2i16_nxv2i64( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv2i16.nxv2i64( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10201,10 +10341,10 @@ define @test_vloxseg5_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10235,10 +10375,10 @@ define @test_vloxseg5_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10269,10 +10409,10 @@ define @test_vloxseg5_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10303,10 +10443,10 @@ define @test_vloxseg5_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vloxseg5_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10337,11 +10477,11 @@ define @test_vloxseg6_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10372,11 +10512,11 @@ define @test_vloxseg6_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10407,11 +10547,11 @@ define @test_vloxseg6_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10442,11 +10582,11 @@ define @test_vloxseg6_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vloxseg6_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10477,12 +10617,12 @@ define @test_vloxseg7_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10513,12 +10653,12 @@ define @test_vloxseg7_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10549,12 +10689,12 @@ define @test_vloxseg7_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10585,12 +10725,12 @@ define @test_vloxseg7_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vloxseg7_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10621,13 +10761,13 @@ define @test_vloxseg8_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10658,13 +10798,13 @@ define @test_vloxseg8_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10695,13 +10835,13 @@ define @test_vloxseg8_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10732,13 +10872,13 @@ define @test_vloxseg8_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vloxseg8_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vloxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10771,6 +10911,7 @@ define @test_vloxseg2_mask_nxv2i64_nxv2i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg2ei32.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i64.nxv2i32( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10800,6 +10941,7 @@ define @test_vloxseg2_mask_nxv2i64_nxv2i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i64.nxv2i8( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10829,6 +10971,7 @@ define @test_vloxseg2_mask_nxv2i64_nxv2i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i64.nxv2i16( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10858,6 +11001,7 @@ define @test_vloxseg2_mask_nxv2i64_nxv2i64( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg2ei64.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv2i64.nxv2i64( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10884,11 +11028,12 @@ entry: define @test_vloxseg3_mask_nxv2i64_nxv2i32( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i64.nxv2i32( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10915,11 +11060,12 @@ entry: define @test_vloxseg3_mask_nxv2i64_nxv2i8( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i64.nxv2i8( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10946,11 +11092,12 @@ entry: define @test_vloxseg3_mask_nxv2i64_nxv2i16( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i64.nxv2i16( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10977,11 +11124,12 @@ entry: define @test_vloxseg3_mask_nxv2i64_nxv2i64( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2i64.nxv2i64( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -11009,9 +11157,9 @@ define @test_vloxseg4_mask_nxv2i64_nxv2i32( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11042,9 +11190,9 @@ define @test_vloxseg4_mask_nxv2i64_nxv2i8( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11075,9 +11223,9 @@ define @test_vloxseg4_mask_nxv2i64_nxv2i16( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11108,9 +11256,9 @@ define @test_vloxseg4_mask_nxv2i64_nxv2i64( ; CHECK-LABEL: test_vloxseg4_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vloxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11143,6 +11291,7 @@ define @test_vloxseg2_mask_nxv16f16_nxv16i16(,} @llvm.riscv.vloxseg2.mask.nxv16f16.nxv16i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -11172,6 +11321,7 @@ define @test_vloxseg2_mask_nxv16f16_nxv16i8(,} @llvm.riscv.vloxseg2.mask.nxv16f16.nxv16i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -11201,6 +11351,7 @@ define @test_vloxseg2_mask_nxv16f16_nxv16i32(,} @llvm.riscv.vloxseg2.mask.nxv16f16.nxv16i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -11230,6 +11381,7 @@ define @test_vloxseg2_mask_nxv4f64_nxv4i32(,} @llvm.riscv.vloxseg2.mask.nxv4f64.nxv4i32( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11259,6 +11411,7 @@ define @test_vloxseg2_mask_nxv4f64_nxv4i8(,} @llvm.riscv.vloxseg2.mask.nxv4f64.nxv4i8( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11288,6 +11441,7 @@ define @test_vloxseg2_mask_nxv4f64_nxv4i64(,} @llvm.riscv.vloxseg2.mask.nxv4f64.nxv4i64( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11317,6 +11471,7 @@ define @test_vloxseg2_mask_nxv4f64_nxv4i16(,} @llvm.riscv.vloxseg2.mask.nxv4f64.nxv4i16( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11346,6 +11501,7 @@ define @test_vloxseg2_mask_nxv1f64_nxv1i64(,} @llvm.riscv.vloxseg2.mask.nxv1f64.nxv1i64( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11375,6 +11531,7 @@ define @test_vloxseg2_mask_nxv1f64_nxv1i32(,} @llvm.riscv.vloxseg2.mask.nxv1f64.nxv1i32( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11404,6 +11561,7 @@ define @test_vloxseg2_mask_nxv1f64_nxv1i16(,} @llvm.riscv.vloxseg2.mask.nxv1f64.nxv1i16( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11433,6 +11591,7 @@ define @test_vloxseg2_mask_nxv1f64_nxv1i8(,} @llvm.riscv.vloxseg2.mask.nxv1f64.nxv1i8( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11459,11 +11618,12 @@ entry: define @test_vloxseg3_mask_nxv1f64_nxv1i64( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f64.nxv1i64( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11490,11 +11650,12 @@ entry: define @test_vloxseg3_mask_nxv1f64_nxv1i32( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f64.nxv1i32( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11521,11 +11682,12 @@ entry: define @test_vloxseg3_mask_nxv1f64_nxv1i16( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f64.nxv1i16( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11552,11 +11714,12 @@ entry: define @test_vloxseg3_mask_nxv1f64_nxv1i8( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f64.nxv1i8( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11584,9 +11747,9 @@ define @test_vloxseg4_mask_nxv1f64_nxv1i64( @test_vloxseg4_mask_nxv1f64_nxv1i32( @test_vloxseg4_mask_nxv1f64_nxv1i16( @test_vloxseg4_mask_nxv1f64_nxv1i8( @test_vloxseg5_mask_nxv1f64_nxv1i64( @test_vloxseg5_mask_nxv1f64_nxv1i32( @test_vloxseg5_mask_nxv1f64_nxv1i16( @test_vloxseg5_mask_nxv1f64_nxv1i8( @test_vloxseg6_mask_nxv1f64_nxv1i64( @test_vloxseg6_mask_nxv1f64_nxv1i32( @test_vloxseg6_mask_nxv1f64_nxv1i16( @test_vloxseg6_mask_nxv1f64_nxv1i8( @test_vloxseg7_mask_nxv1f64_nxv1i64( @test_vloxseg7_mask_nxv1f64_nxv1i32( @test_vloxseg7_mask_nxv1f64_nxv1i16( @test_vloxseg7_mask_nxv1f64_nxv1i8( @test_vloxseg8_mask_nxv1f64_nxv1i64( @test_vloxseg8_mask_nxv1f64_nxv1i32( @test_vloxseg8_mask_nxv1f64_nxv1i16( @test_vloxseg8_mask_nxv1f64_nxv1i8( @test_vloxseg2_mask_nxv2f32_nxv2i32(,} @llvm.riscv.vloxseg2.mask.nxv2f32.nxv2i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12315,6 +12479,7 @@ define @test_vloxseg2_mask_nxv2f32_nxv2i8(,} @llvm.riscv.vloxseg2.mask.nxv2f32.nxv2i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12344,6 +12509,7 @@ define @test_vloxseg2_mask_nxv2f32_nxv2i16(,} @llvm.riscv.vloxseg2.mask.nxv2f32.nxv2i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12373,6 +12539,7 @@ define @test_vloxseg2_mask_nxv2f32_nxv2i64(,} @llvm.riscv.vloxseg2.mask.nxv2f32.nxv2i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12399,11 +12566,12 @@ entry: define @test_vloxseg3_mask_nxv2f32_nxv2i32( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f32.nxv2i32( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12430,11 +12598,12 @@ entry: define @test_vloxseg3_mask_nxv2f32_nxv2i8( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f32.nxv2i8( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12461,11 +12630,12 @@ entry: define @test_vloxseg3_mask_nxv2f32_nxv2i16( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f32.nxv2i16( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12493,9 +12663,10 @@ define @test_vloxseg3_mask_nxv2f32_nxv2i64(,,} @llvm.riscv.vloxseg3.mask.nxv2f32.nxv2i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12523,9 +12694,9 @@ define @test_vloxseg4_mask_nxv2f32_nxv2i32( @test_vloxseg4_mask_nxv2f32_nxv2i8( @test_vloxseg4_mask_nxv2f32_nxv2i16( @test_vloxseg4_mask_nxv2f32_nxv2i64( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv2f32.nxv2i64( %val, %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12654,10 +12826,10 @@ define @test_vloxseg5_mask_nxv2f32_nxv2i32( @test_vloxseg5_mask_nxv2f32_nxv2i8( @test_vloxseg5_mask_nxv2f32_nxv2i16( @test_vloxseg5_mask_nxv2f32_nxv2i64( @test_vloxseg6_mask_nxv2f32_nxv2i32( @test_vloxseg6_mask_nxv2f32_nxv2i8( @test_vloxseg6_mask_nxv2f32_nxv2i16( @test_vloxseg6_mask_nxv2f32_nxv2i64( @test_vloxseg7_mask_nxv2f32_nxv2i32( @test_vloxseg7_mask_nxv2f32_nxv2i8( @test_vloxseg7_mask_nxv2f32_nxv2i16( @test_vloxseg7_mask_nxv2f32_nxv2i64( @test_vloxseg8_mask_nxv2f32_nxv2i32( @test_vloxseg8_mask_nxv2f32_nxv2i8( @test_vloxseg8_mask_nxv2f32_nxv2i16( @test_vloxseg8_mask_nxv2f32_nxv2i64( @test_vloxseg2_mask_nxv1f16_nxv1i64(,} @llvm.riscv.vloxseg2.mask.nxv1f16.nxv1i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13253,6 +13426,7 @@ define @test_vloxseg2_mask_nxv1f16_nxv1i32(,} @llvm.riscv.vloxseg2.mask.nxv1f16.nxv1i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13282,6 +13456,7 @@ define @test_vloxseg2_mask_nxv1f16_nxv1i16(,} @llvm.riscv.vloxseg2.mask.nxv1f16.nxv1i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13311,6 +13486,7 @@ define @test_vloxseg2_mask_nxv1f16_nxv1i8(,} @llvm.riscv.vloxseg2.mask.nxv1f16.nxv1i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13337,11 +13513,12 @@ entry: define @test_vloxseg3_mask_nxv1f16_nxv1i64( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f16.nxv1i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13368,11 +13545,12 @@ entry: define @test_vloxseg3_mask_nxv1f16_nxv1i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f16.nxv1i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13399,11 +13577,12 @@ entry: define @test_vloxseg3_mask_nxv1f16_nxv1i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f16.nxv1i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13430,11 +13609,12 @@ entry: define @test_vloxseg3_mask_nxv1f16_nxv1i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f16.nxv1i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13462,9 +13642,9 @@ define @test_vloxseg4_mask_nxv1f16_nxv1i64( @test_vloxseg4_mask_nxv1f16_nxv1i32( @test_vloxseg4_mask_nxv1f16_nxv1i16( @test_vloxseg4_mask_nxv1f16_nxv1i8( @test_vloxseg5_mask_nxv1f16_nxv1i64( @test_vloxseg5_mask_nxv1f16_nxv1i32( @test_vloxseg5_mask_nxv1f16_nxv1i16( @test_vloxseg5_mask_nxv1f16_nxv1i8( @test_vloxseg6_mask_nxv1f16_nxv1i64( @test_vloxseg6_mask_nxv1f16_nxv1i32( @test_vloxseg6_mask_nxv1f16_nxv1i16( @test_vloxseg6_mask_nxv1f16_nxv1i8( @test_vloxseg7_mask_nxv1f16_nxv1i64( @test_vloxseg7_mask_nxv1f16_nxv1i32( @test_vloxseg7_mask_nxv1f16_nxv1i16( @test_vloxseg7_mask_nxv1f16_nxv1i8( @test_vloxseg8_mask_nxv1f16_nxv1i64( @test_vloxseg8_mask_nxv1f16_nxv1i32( @test_vloxseg8_mask_nxv1f16_nxv1i16( @test_vloxseg8_mask_nxv1f16_nxv1i8( @test_vloxseg2_mask_nxv1f32_nxv1i64(,} @llvm.riscv.vloxseg2.mask.nxv1f32.nxv1i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14193,6 +14374,7 @@ define @test_vloxseg2_mask_nxv1f32_nxv1i32(,} @llvm.riscv.vloxseg2.mask.nxv1f32.nxv1i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14222,6 +14404,7 @@ define @test_vloxseg2_mask_nxv1f32_nxv1i16(,} @llvm.riscv.vloxseg2.mask.nxv1f32.nxv1i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14251,6 +14434,7 @@ define @test_vloxseg2_mask_nxv1f32_nxv1i8(,} @llvm.riscv.vloxseg2.mask.nxv1f32.nxv1i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14277,11 +14461,12 @@ entry: define @test_vloxseg3_mask_nxv1f32_nxv1i64( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f32.nxv1i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14308,11 +14493,12 @@ entry: define @test_vloxseg3_mask_nxv1f32_nxv1i32( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f32.nxv1i32( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14339,11 +14525,12 @@ entry: define @test_vloxseg3_mask_nxv1f32_nxv1i16( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f32.nxv1i16( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14370,11 +14557,12 @@ entry: define @test_vloxseg3_mask_nxv1f32_nxv1i8( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv1f32.nxv1i8( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14402,9 +14590,9 @@ define @test_vloxseg4_mask_nxv1f32_nxv1i64( @test_vloxseg4_mask_nxv1f32_nxv1i32( @test_vloxseg4_mask_nxv1f32_nxv1i16( @test_vloxseg4_mask_nxv1f32_nxv1i8( @test_vloxseg5_mask_nxv1f32_nxv1i64( @test_vloxseg5_mask_nxv1f32_nxv1i32( @test_vloxseg5_mask_nxv1f32_nxv1i16( @test_vloxseg5_mask_nxv1f32_nxv1i8( @test_vloxseg6_mask_nxv1f32_nxv1i64( @test_vloxseg6_mask_nxv1f32_nxv1i32( @test_vloxseg6_mask_nxv1f32_nxv1i16( @test_vloxseg6_mask_nxv1f32_nxv1i8( @test_vloxseg7_mask_nxv1f32_nxv1i64( @test_vloxseg7_mask_nxv1f32_nxv1i32( @test_vloxseg7_mask_nxv1f32_nxv1i16( @test_vloxseg7_mask_nxv1f32_nxv1i8( @test_vloxseg8_mask_nxv1f32_nxv1i64( @test_vloxseg8_mask_nxv1f32_nxv1i32( @test_vloxseg8_mask_nxv1f32_nxv1i16( @test_vloxseg8_mask_nxv1f32_nxv1i8( @test_vloxseg2_mask_nxv8f16_nxv8i16(,} @llvm.riscv.vloxseg2.mask.nxv8f16.nxv8i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15133,6 +15322,7 @@ define @test_vloxseg2_mask_nxv8f16_nxv8i8(,} @llvm.riscv.vloxseg2.mask.nxv8f16.nxv8i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15162,6 +15352,7 @@ define @test_vloxseg2_mask_nxv8f16_nxv8i64(,} @llvm.riscv.vloxseg2.mask.nxv8f16.nxv8i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15191,6 +15382,7 @@ define @test_vloxseg2_mask_nxv8f16_nxv8i32(,} @llvm.riscv.vloxseg2.mask.nxv8f16.nxv8i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15217,11 +15409,12 @@ entry: define @test_vloxseg3_mask_nxv8f16_nxv8i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8f16.nxv8i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15248,11 +15441,12 @@ entry: define @test_vloxseg3_mask_nxv8f16_nxv8i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv8f16.nxv8i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15280,9 +15474,10 @@ define @test_vloxseg3_mask_nxv8f16_nxv8i64(,,} @llvm.riscv.vloxseg3.mask.nxv8f16.nxv8i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15310,9 +15505,10 @@ define @test_vloxseg3_mask_nxv8f16_nxv8i32(,,} @llvm.riscv.vloxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15340,9 +15536,9 @@ define @test_vloxseg4_mask_nxv8f16_nxv8i16( @test_vloxseg4_mask_nxv8f16_nxv8i8( @test_vloxseg4_mask_nxv8f16_nxv8i64(,,,} @llvm.riscv.vloxseg4.mask.nxv8f16.nxv8i64( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15436,12 +15633,13 @@ entry: define @test_vloxseg4_mask_nxv8f16_nxv8i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv8f16.nxv8i32( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15471,6 +15669,7 @@ define @test_vloxseg2_mask_nxv8f32_nxv8i16(,} @llvm.riscv.vloxseg2.mask.nxv8f32.nxv8i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15500,6 +15699,7 @@ define @test_vloxseg2_mask_nxv8f32_nxv8i8(,} @llvm.riscv.vloxseg2.mask.nxv8f32.nxv8i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15529,6 +15729,7 @@ define @test_vloxseg2_mask_nxv8f32_nxv8i64(,} @llvm.riscv.vloxseg2.mask.nxv8f32.nxv8i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15558,6 +15759,7 @@ define @test_vloxseg2_mask_nxv8f32_nxv8i32(,} @llvm.riscv.vloxseg2.mask.nxv8f32.nxv8i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15587,6 +15789,7 @@ define @test_vloxseg2_mask_nxv2f64_nxv2i32(,} @llvm.riscv.vloxseg2.mask.nxv2f64.nxv2i32( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15616,6 +15819,7 @@ define @test_vloxseg2_mask_nxv2f64_nxv2i8(,} @llvm.riscv.vloxseg2.mask.nxv2f64.nxv2i8( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15645,6 +15849,7 @@ define @test_vloxseg2_mask_nxv2f64_nxv2i16(,} @llvm.riscv.vloxseg2.mask.nxv2f64.nxv2i16( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15674,6 +15879,7 @@ define @test_vloxseg2_mask_nxv2f64_nxv2i64(,} @llvm.riscv.vloxseg2.mask.nxv2f64.nxv2i64( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15700,11 +15906,12 @@ entry: define @test_vloxseg3_mask_nxv2f64_nxv2i32( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f64.nxv2i32( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15731,11 +15938,12 @@ entry: define @test_vloxseg3_mask_nxv2f64_nxv2i8( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f64.nxv2i8( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15762,11 +15970,12 @@ entry: define @test_vloxseg3_mask_nxv2f64_nxv2i16( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f64.nxv2i16( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15793,11 +16002,12 @@ entry: define @test_vloxseg3_mask_nxv2f64_nxv2i64( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vloxseg3ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f64.nxv2i64( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15825,9 +16035,9 @@ define @test_vloxseg4_mask_nxv2f64_nxv2i32( @test_vloxseg4_mask_nxv2f64_nxv2i8( @test_vloxseg4_mask_nxv2f64_nxv2i16( @test_vloxseg4_mask_nxv2f64_nxv2i64( @test_vloxseg2_mask_nxv4f16_nxv4i32(,} @llvm.riscv.vloxseg2.mask.nxv4f16.nxv4i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15988,6 +16199,7 @@ define @test_vloxseg2_mask_nxv4f16_nxv4i8(,} @llvm.riscv.vloxseg2.mask.nxv4f16.nxv4i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16017,6 +16229,7 @@ define @test_vloxseg2_mask_nxv4f16_nxv4i64(,} @llvm.riscv.vloxseg2.mask.nxv4f16.nxv4i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16046,6 +16259,7 @@ define @test_vloxseg2_mask_nxv4f16_nxv4i16(,} @llvm.riscv.vloxseg2.mask.nxv4f16.nxv4i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16073,9 +16287,10 @@ define @test_vloxseg3_mask_nxv4f16_nxv4i32(,,} @llvm.riscv.vloxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16102,11 +16317,12 @@ entry: define @test_vloxseg3_mask_nxv4f16_nxv4i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f16.nxv4i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16134,9 +16350,10 @@ define @test_vloxseg3_mask_nxv4f16_nxv4i64(,,} @llvm.riscv.vloxseg3.mask.nxv4f16.nxv4i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16163,11 +16380,12 @@ entry: define @test_vloxseg3_mask_nxv4f16_nxv4i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f16.nxv4i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16194,12 +16412,13 @@ entry: define @test_vloxseg4_mask_nxv4f16_nxv4i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4f16.nxv4i32( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16227,9 +16446,9 @@ define @test_vloxseg4_mask_nxv4f16_nxv4i8( @test_vloxseg4_mask_nxv4f16_nxv4i64(,,,} @llvm.riscv.vloxseg4.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16291,9 +16511,9 @@ define @test_vloxseg4_mask_nxv4f16_nxv4i16( @test_vloxseg5_mask_nxv4f16_nxv4i32( @test_vloxseg5_mask_nxv4f16_nxv4i8( @test_vloxseg5_mask_nxv4f16_nxv4i64(,,,,} @llvm.riscv.vloxseg5.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16424,10 +16645,10 @@ define @test_vloxseg5_mask_nxv4f16_nxv4i16( @test_vloxseg6_mask_nxv4f16_nxv4i32( @test_vloxseg6_mask_nxv4f16_nxv4i8( @test_vloxseg6_mask_nxv4f16_nxv4i64( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg6_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vloxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: vloxseg6ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vloxseg6.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16562,11 +16784,11 @@ define @test_vloxseg6_mask_nxv4f16_nxv4i16( @test_vloxseg7_mask_nxv4f16_nxv4i32( @test_vloxseg7_mask_nxv4f16_nxv4i8( @test_vloxseg7_mask_nxv4f16_nxv4i64( @test_vloxseg7_mask_nxv4f16_nxv4i16( @test_vloxseg8_mask_nxv4f16_nxv4i32( @test_vloxseg8_mask_nxv4f16_nxv4i8( @test_vloxseg8_mask_nxv4f16_nxv4i64( @test_vloxseg8_mask_nxv4f16_nxv4i16( @test_vloxseg2_mask_nxv2f16_nxv2i32(,} @llvm.riscv.vloxseg2.mask.nxv2f16.nxv2i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16920,6 +17143,7 @@ define @test_vloxseg2_mask_nxv2f16_nxv2i8(,} @llvm.riscv.vloxseg2.mask.nxv2f16.nxv2i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16949,6 +17173,7 @@ define @test_vloxseg2_mask_nxv2f16_nxv2i16(,} @llvm.riscv.vloxseg2.mask.nxv2f16.nxv2i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16978,6 +17203,7 @@ define @test_vloxseg2_mask_nxv2f16_nxv2i64(,} @llvm.riscv.vloxseg2.mask.nxv2f16.nxv2i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17004,11 +17230,12 @@ entry: define @test_vloxseg3_mask_nxv2f16_nxv2i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f16.nxv2i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17035,11 +17262,12 @@ entry: define @test_vloxseg3_mask_nxv2f16_nxv2i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f16.nxv2i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17066,11 +17294,12 @@ entry: define @test_vloxseg3_mask_nxv2f16_nxv2i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vloxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv2f16.nxv2i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17098,9 +17327,10 @@ define @test_vloxseg3_mask_nxv2f16_nxv2i64(,,} @llvm.riscv.vloxseg3.mask.nxv2f16.nxv2i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17128,9 +17358,9 @@ define @test_vloxseg4_mask_nxv2f16_nxv2i32( @test_vloxseg4_mask_nxv2f16_nxv2i8( @test_vloxseg4_mask_nxv2f16_nxv2i16( @test_vloxseg4_mask_nxv2f16_nxv2i64( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vloxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vloxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv2f16.nxv2i64( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17259,10 +17490,10 @@ define @test_vloxseg5_mask_nxv2f16_nxv2i32( @test_vloxseg5_mask_nxv2f16_nxv2i8( @test_vloxseg5_mask_nxv2f16_nxv2i16( @test_vloxseg5_mask_nxv2f16_nxv2i64( @test_vloxseg6_mask_nxv2f16_nxv2i32( @test_vloxseg6_mask_nxv2f16_nxv2i8( @test_vloxseg6_mask_nxv2f16_nxv2i16( @test_vloxseg6_mask_nxv2f16_nxv2i64( @test_vloxseg7_mask_nxv2f16_nxv2i32( @test_vloxseg7_mask_nxv2f16_nxv2i8( @test_vloxseg7_mask_nxv2f16_nxv2i16( @test_vloxseg7_mask_nxv2f16_nxv2i64( @test_vloxseg8_mask_nxv2f16_nxv2i32( @test_vloxseg8_mask_nxv2f16_nxv2i8( @test_vloxseg8_mask_nxv2f16_nxv2i16( @test_vloxseg8_mask_nxv2f16_nxv2i64( @test_vloxseg2_mask_nxv4f32_nxv4i32(,} @llvm.riscv.vloxseg2.mask.nxv4f32.nxv4i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17858,6 +18090,7 @@ define @test_vloxseg2_mask_nxv4f32_nxv4i8(,} @llvm.riscv.vloxseg2.mask.nxv4f32.nxv4i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17887,6 +18120,7 @@ define @test_vloxseg2_mask_nxv4f32_nxv4i64(,} @llvm.riscv.vloxseg2.mask.nxv4f32.nxv4i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17916,6 +18150,7 @@ define @test_vloxseg2_mask_nxv4f32_nxv4i16(,} @llvm.riscv.vloxseg2.mask.nxv4f32.nxv4i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17942,11 +18177,12 @@ entry: define @test_vloxseg3_mask_nxv4f32_nxv4i32( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f32.nxv4i32( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17973,11 +18209,12 @@ entry: define @test_vloxseg3_mask_nxv4f32_nxv4i8( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f32.nxv4i8( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18005,9 +18242,10 @@ define @test_vloxseg3_mask_nxv4f32_nxv4i64(,,} @llvm.riscv.vloxseg3.mask.nxv4f32.nxv4i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18034,11 +18272,12 @@ entry: define @test_vloxseg3_mask_nxv4f32_nxv4i16( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vloxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vloxseg3.mask.nxv4f32.nxv4i16( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18066,9 +18305,9 @@ define @test_vloxseg4_mask_nxv4f32_nxv4i32( @test_vloxseg4_mask_nxv4f32_nxv4i8( @test_vloxseg4_mask_nxv4f32_nxv4i64( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vloxseg4_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vloxseg4ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: vloxseg4ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vloxseg4.mask.nxv4f32.nxv4i64( %val, %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18164,9 +18404,9 @@ define @test_vloxseg4_mask_nxv4f32_nxv4i16( @test_vlseg2_nxv16i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv16i16( undef, undef, i16* %base, i32 %vl) @@ -24,6 +25,7 @@ define @test_vlseg2_mask_nxv16i16(i16* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv16i16( undef, undef, i16* %base, i32 %vl) @@ -41,6 +43,7 @@ define @test_vlseg2_nxv1i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1i8( undef, undef, i8* %base, i32 %vl) @@ -55,6 +58,7 @@ define @test_vlseg2_mask_nxv1i8(i8* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv1i8( undef, undef, i8* %base, i32 %vl) @@ -72,6 +76,7 @@ define @test_vlseg3_nxv1i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1i8( undef, undef, undef, i8* %base, i32 %vl) @@ -87,6 +92,7 @@ define @test_vlseg3_mask_nxv1i8(i8* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv1i8( undef, undef, undef, i8* %base, i32 %vl) @@ -104,6 +110,7 @@ define @test_vlseg4_nxv1i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -120,6 +127,7 @@ define @test_vlseg4_mask_nxv1i8(i8* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -137,6 +145,7 @@ define @test_vlseg5_nxv1i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -154,6 +163,7 @@ define @test_vlseg5_mask_nxv1i8(i8* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -171,6 +181,7 @@ define @test_vlseg6_nxv1i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -189,6 +200,7 @@ define @test_vlseg6_mask_nxv1i8(i8* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -206,6 +218,7 @@ define @test_vlseg7_nxv1i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -225,6 +238,7 @@ define @test_vlseg7_mask_nxv1i8(i8* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -242,6 +256,7 @@ define @test_vlseg8_nxv1i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -262,6 +277,7 @@ define @test_vlseg8_mask_nxv1i8(i8* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -279,6 +295,7 @@ define @test_vlseg2_nxv16i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg2e8.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv16i8( undef, undef, i8* %base, i32 %vl) @@ -293,6 +310,7 @@ define @test_vlseg2_mask_nxv16i8(i8* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv16i8( undef, undef, i8* %base, i32 %vl) @@ -310,6 +328,7 @@ define @test_vlseg3_nxv16i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg3e8.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv16i8( undef, undef, undef, i8* %base, i32 %vl) @@ -325,6 +344,7 @@ define @test_vlseg3_mask_nxv16i8(i8* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv16i8( undef, undef, undef, i8* %base, i32 %vl) @@ -342,6 +362,7 @@ define @test_vlseg4_nxv16i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg4e8.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -358,6 +379,7 @@ define @test_vlseg4_mask_nxv16i8(i8* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -375,6 +397,7 @@ define @test_vlseg2_nxv2i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2i32( undef, undef, i32* %base, i32 %vl) @@ -389,6 +412,7 @@ define @test_vlseg2_mask_nxv2i32(i32* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv2i32( undef, undef, i32* %base, i32 %vl) @@ -406,6 +430,7 @@ define @test_vlseg3_nxv2i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2i32( undef, undef, undef, i32* %base, i32 %vl) @@ -421,6 +446,7 @@ define @test_vlseg3_mask_nxv2i32(i32* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv2i32( undef, undef, undef, i32* %base, i32 %vl) @@ -438,6 +464,7 @@ define @test_vlseg4_nxv2i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -454,6 +481,7 @@ define @test_vlseg4_mask_nxv2i32(i32* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -471,6 +499,7 @@ define @test_vlseg5_nxv2i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -488,6 +517,7 @@ define @test_vlseg5_mask_nxv2i32(i32* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -505,6 +535,7 @@ define @test_vlseg6_nxv2i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -523,6 +554,7 @@ define @test_vlseg6_mask_nxv2i32(i32* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -540,6 +572,7 @@ define @test_vlseg7_nxv2i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -559,6 +592,7 @@ define @test_vlseg7_mask_nxv2i32(i32* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -576,6 +610,7 @@ define @test_vlseg8_nxv2i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %vl) @@ -596,6 +631,7 @@ define @test_vlseg8_mask_nxv2i32(i32* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %vl) @@ -613,6 +649,7 @@ define @test_vlseg2_nxv4i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4i16( undef, undef, i16* %base, i32 %vl) @@ -627,6 +664,7 @@ define @test_vlseg2_mask_nxv4i16(i16* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv4i16( undef, undef, i16* %base, i32 %vl) @@ -644,6 +682,7 @@ define @test_vlseg3_nxv4i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4i16( undef, undef, undef, i16* %base, i32 %vl) @@ -659,6 +698,7 @@ define @test_vlseg3_mask_nxv4i16(i16* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv4i16( undef, undef, undef, i16* %base, i32 %vl) @@ -676,6 +716,7 @@ define @test_vlseg4_nxv4i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -692,6 +733,7 @@ define @test_vlseg4_mask_nxv4i16(i16* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -709,6 +751,7 @@ define @test_vlseg5_nxv4i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -726,6 +769,7 @@ define @test_vlseg5_mask_nxv4i16(i16* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -743,6 +787,7 @@ define @test_vlseg6_nxv4i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -761,6 +806,7 @@ define @test_vlseg6_mask_nxv4i16(i16* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -778,6 +824,7 @@ define @test_vlseg7_nxv4i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -797,6 +844,7 @@ define @test_vlseg7_mask_nxv4i16(i16* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -814,6 +862,7 @@ define @test_vlseg8_nxv4i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -834,6 +883,7 @@ define @test_vlseg8_mask_nxv4i16(i16* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -851,6 +901,7 @@ define @test_vlseg2_nxv1i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1i32( undef, undef, i32* %base, i32 %vl) @@ -865,6 +916,7 @@ define @test_vlseg2_mask_nxv1i32(i32* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv1i32( undef, undef, i32* %base, i32 %vl) @@ -882,6 +934,7 @@ define @test_vlseg3_nxv1i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1i32( undef, undef, undef, i32* %base, i32 %vl) @@ -897,6 +950,7 @@ define @test_vlseg3_mask_nxv1i32(i32* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv1i32( undef, undef, undef, i32* %base, i32 %vl) @@ -914,6 +968,7 @@ define @test_vlseg4_nxv1i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -930,6 +985,7 @@ define @test_vlseg4_mask_nxv1i32(i32* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -947,6 +1003,7 @@ define @test_vlseg5_nxv1i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -964,6 +1021,7 @@ define @test_vlseg5_mask_nxv1i32(i32* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -981,6 +1039,7 @@ define @test_vlseg6_nxv1i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -999,6 +1058,7 @@ define @test_vlseg6_mask_nxv1i32(i32* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -1016,6 +1076,7 @@ define @test_vlseg7_nxv1i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -1035,6 +1096,7 @@ define @test_vlseg7_mask_nxv1i32(i32* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -1052,6 +1114,7 @@ define @test_vlseg8_nxv1i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %vl) @@ -1072,6 +1135,7 @@ define @test_vlseg8_mask_nxv1i32(i32* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %vl) @@ -1089,6 +1153,7 @@ define @test_vlseg2_nxv8i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg2e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8i16( undef, undef, i16* %base, i32 %vl) @@ -1103,6 +1168,7 @@ define @test_vlseg2_mask_nxv8i16(i16* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv8i16( undef, undef, i16* %base, i32 %vl) @@ -1120,6 +1186,7 @@ define @test_vlseg3_nxv8i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv8i16( undef, undef, undef, i16* %base, i32 %vl) @@ -1135,6 +1202,7 @@ define @test_vlseg3_mask_nxv8i16(i16* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv8i16( undef, undef, undef, i16* %base, i32 %vl) @@ -1152,6 +1220,7 @@ define @test_vlseg4_nxv8i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1168,6 +1237,7 @@ define @test_vlseg4_mask_nxv8i16(i16* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1185,6 +1255,7 @@ define @test_vlseg2_nxv8i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8i8( undef, undef, i8* %base, i32 %vl) @@ -1199,6 +1270,7 @@ define @test_vlseg2_mask_nxv8i8(i8* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv8i8( undef, undef, i8* %base, i32 %vl) @@ -1216,6 +1288,7 @@ define @test_vlseg3_nxv8i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv8i8( undef, undef, undef, i8* %base, i32 %vl) @@ -1231,6 +1304,7 @@ define @test_vlseg3_mask_nxv8i8(i8* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv8i8( undef, undef, undef, i8* %base, i32 %vl) @@ -1248,6 +1322,7 @@ define @test_vlseg4_nxv8i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1264,6 +1339,7 @@ define @test_vlseg4_mask_nxv8i8(i8* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1281,6 +1357,7 @@ define @test_vlseg5_nxv8i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1298,6 +1375,7 @@ define @test_vlseg5_mask_nxv8i8(i8* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1315,6 +1393,7 @@ define @test_vlseg6_nxv8i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1333,6 +1412,7 @@ define @test_vlseg6_mask_nxv8i8(i8* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1350,6 +1430,7 @@ define @test_vlseg7_nxv8i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1369,6 +1450,7 @@ define @test_vlseg7_mask_nxv8i8(i8* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1386,6 +1468,7 @@ define @test_vlseg8_nxv8i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -1406,6 +1489,7 @@ define @test_vlseg8_mask_nxv8i8(i8* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -1423,6 +1507,7 @@ define @test_vlseg2_nxv8i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vlseg2e32.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8i32( undef, undef, i32* %base, i32 %vl) @@ -1437,6 +1522,7 @@ define @test_vlseg2_mask_nxv8i32(i32* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv8i32( undef, undef, i32* %base, i32 %vl) @@ -1454,6 +1540,7 @@ define @test_vlseg2_nxv4i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4i8( undef, undef, i8* %base, i32 %vl) @@ -1468,6 +1555,7 @@ define @test_vlseg2_mask_nxv4i8(i8* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv4i8( undef, undef, i8* %base, i32 %vl) @@ -1485,6 +1573,7 @@ define @test_vlseg3_nxv4i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4i8( undef, undef, undef, i8* %base, i32 %vl) @@ -1500,6 +1589,7 @@ define @test_vlseg3_mask_nxv4i8(i8* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv4i8( undef, undef, undef, i8* %base, i32 %vl) @@ -1517,6 +1607,7 @@ define @test_vlseg4_nxv4i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1533,6 +1624,7 @@ define @test_vlseg4_mask_nxv4i8(i8* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1550,6 +1642,7 @@ define @test_vlseg5_nxv4i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1567,6 +1660,7 @@ define @test_vlseg5_mask_nxv4i8(i8* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1584,6 +1678,7 @@ define @test_vlseg6_nxv4i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1602,6 +1697,7 @@ define @test_vlseg6_mask_nxv4i8(i8* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1619,6 +1715,7 @@ define @test_vlseg7_nxv4i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1638,6 +1735,7 @@ define @test_vlseg7_mask_nxv4i8(i8* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1655,6 +1753,7 @@ define @test_vlseg8_nxv4i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -1675,6 +1774,7 @@ define @test_vlseg8_mask_nxv4i8(i8* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -1692,6 +1792,7 @@ define @test_vlseg2_nxv1i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1i16( undef, undef, i16* %base, i32 %vl) @@ -1706,6 +1807,7 @@ define @test_vlseg2_mask_nxv1i16(i16* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv1i16( undef, undef, i16* %base, i32 %vl) @@ -1723,6 +1825,7 @@ define @test_vlseg3_nxv1i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1i16( undef, undef, undef, i16* %base, i32 %vl) @@ -1738,6 +1841,7 @@ define @test_vlseg3_mask_nxv1i16(i16* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv1i16( undef, undef, undef, i16* %base, i32 %vl) @@ -1755,6 +1859,7 @@ define @test_vlseg4_nxv1i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1771,6 +1876,7 @@ define @test_vlseg4_mask_nxv1i16(i16* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1788,6 +1894,7 @@ define @test_vlseg5_nxv1i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1805,6 +1912,7 @@ define @test_vlseg5_mask_nxv1i16(i16* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1822,6 +1930,7 @@ define @test_vlseg6_nxv1i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1840,6 +1949,7 @@ define @test_vlseg6_mask_nxv1i16(i16* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1857,6 +1967,7 @@ define @test_vlseg7_nxv1i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1876,6 +1987,7 @@ define @test_vlseg7_mask_nxv1i16(i16* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1893,6 +2005,7 @@ define @test_vlseg8_nxv1i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -1913,6 +2026,7 @@ define @test_vlseg8_mask_nxv1i16(i16* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -1930,6 +2044,7 @@ define @test_vlseg2_nxv32i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vlseg2e8.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv32i8( undef, undef, i8* %base, i32 %vl) @@ -1944,6 +2059,7 @@ define @test_vlseg2_mask_nxv32i8(i8* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv32i8( undef, undef, i8* %base, i32 %vl) @@ -1961,6 +2077,7 @@ define @test_vlseg2_nxv2i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2i8( undef, undef, i8* %base, i32 %vl) @@ -1975,6 +2092,7 @@ define @test_vlseg2_mask_nxv2i8(i8* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv2i8( undef, undef, i8* %base, i32 %vl) @@ -1992,6 +2110,7 @@ define @test_vlseg3_nxv2i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2i8( undef, undef, undef, i8* %base, i32 %vl) @@ -2007,6 +2126,7 @@ define @test_vlseg3_mask_nxv2i8(i8* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv2i8( undef, undef, undef, i8* %base, i32 %vl) @@ -2024,6 +2144,7 @@ define @test_vlseg4_nxv2i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2040,6 +2161,7 @@ define @test_vlseg4_mask_nxv2i8(i8* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2057,6 +2179,7 @@ define @test_vlseg5_nxv2i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2074,6 +2197,7 @@ define @test_vlseg5_mask_nxv2i8(i8* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2091,6 +2215,7 @@ define @test_vlseg6_nxv2i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2109,6 +2234,7 @@ define @test_vlseg6_mask_nxv2i8(i8* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2126,6 +2252,7 @@ define @test_vlseg7_nxv2i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2145,6 +2272,7 @@ define @test_vlseg7_mask_nxv2i8(i8* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2162,6 +2290,7 @@ define @test_vlseg8_nxv2i8(i8* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -2182,6 +2311,7 @@ define @test_vlseg8_mask_nxv2i8(i8* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -2199,6 +2329,7 @@ define @test_vlseg2_nxv2i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2i16( undef, undef, i16* %base, i32 %vl) @@ -2213,6 +2344,7 @@ define @test_vlseg2_mask_nxv2i16(i16* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv2i16( undef, undef, i16* %base, i32 %vl) @@ -2230,6 +2362,7 @@ define @test_vlseg3_nxv2i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2i16( undef, undef, undef, i16* %base, i32 %vl) @@ -2245,6 +2378,7 @@ define @test_vlseg3_mask_nxv2i16(i16* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv2i16( undef, undef, undef, i16* %base, i32 %vl) @@ -2262,6 +2396,7 @@ define @test_vlseg4_nxv2i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2278,6 +2413,7 @@ define @test_vlseg4_mask_nxv2i16(i16* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2295,6 +2431,7 @@ define @test_vlseg5_nxv2i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2312,6 +2449,7 @@ define @test_vlseg5_mask_nxv2i16(i16* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2329,6 +2467,7 @@ define @test_vlseg6_nxv2i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2347,6 +2486,7 @@ define @test_vlseg6_mask_nxv2i16(i16* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2364,6 +2504,7 @@ define @test_vlseg7_nxv2i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2383,6 +2524,7 @@ define @test_vlseg7_mask_nxv2i16(i16* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2400,6 +2542,7 @@ define @test_vlseg8_nxv2i16(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -2420,6 +2563,7 @@ define @test_vlseg8_mask_nxv2i16(i16* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -2437,6 +2581,7 @@ define @test_vlseg2_nxv4i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg2e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4i32( undef, undef, i32* %base, i32 %vl) @@ -2451,6 +2596,7 @@ define @test_vlseg2_mask_nxv4i32(i32* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv4i32( undef, undef, i32* %base, i32 %vl) @@ -2468,6 +2614,7 @@ define @test_vlseg3_nxv4i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4i32( undef, undef, undef, i32* %base, i32 %vl) @@ -2483,6 +2630,7 @@ define @test_vlseg3_mask_nxv4i32(i32* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv4i32( undef, undef, undef, i32* %base, i32 %vl) @@ -2500,6 +2648,7 @@ define @test_vlseg4_nxv4i32(i32* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -2516,6 +2665,7 @@ define @test_vlseg4_mask_nxv4i32(i32* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -2533,6 +2683,7 @@ define @test_vlseg2_nxv16f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv16f16( undef, undef, half* %base, i32 %vl) @@ -2547,6 +2698,7 @@ define @test_vlseg2_mask_nxv16f16(half* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv16f16( undef, undef, half* %base, i32 %vl) @@ -2564,6 +2716,7 @@ define @test_vlseg2_nxv4f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vlseg2e64.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f64( undef, undef, double* %base, i32 %vl) @@ -2578,6 +2731,7 @@ define @test_vlseg2_mask_nxv4f64(double* %base, i32 %vl, < ; CHECK-NEXT: vlseg2e64.v v4, (a0) ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlseg2e64.v v4, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f64( undef, undef, double* %base, i32 %vl) @@ -2595,6 +2749,7 @@ define @test_vlseg2_nxv1f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg2e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f64( undef, undef, double* %base, i32 %vl) @@ -2609,6 +2764,7 @@ define @test_vlseg2_mask_nxv1f64(double* %base, i32 %vl, < ; CHECK-NEXT: vlseg2e64.v v7, (a0) ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlseg2e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f64( undef, undef, double* %base, i32 %vl) @@ -2626,6 +2782,7 @@ define @test_vlseg3_nxv1f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg3e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f64( undef, undef, undef, double* %base, i32 %vl) @@ -2641,6 +2798,7 @@ define @test_vlseg3_mask_nxv1f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlseg3e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f64( undef, undef, undef, double* %base, i32 %vl) @@ -2658,6 +2816,7 @@ define @test_vlseg4_nxv1f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg4e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f64( undef, undef, undef, undef, double* %base, i32 %vl) @@ -2674,6 +2833,7 @@ define @test_vlseg4_mask_nxv1f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlseg4e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f64( undef, undef, undef, undef, double* %base, i32 %vl) @@ -2691,6 +2851,7 @@ define @test_vlseg5_nxv1f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg5e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -2708,6 +2869,7 @@ define @test_vlseg5_mask_nxv1f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlseg5e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -2725,6 +2887,7 @@ define @test_vlseg6_nxv1f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg6e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -2743,6 +2906,7 @@ define @test_vlseg6_mask_nxv1f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlseg6e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -2760,6 +2924,7 @@ define @test_vlseg7_nxv1f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg7e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -2779,6 +2944,7 @@ define @test_vlseg7_mask_nxv1f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlseg7e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -2796,6 +2962,7 @@ define @test_vlseg8_nxv1f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg8e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i32 %vl) @@ -2816,6 +2983,7 @@ define @test_vlseg8_mask_nxv1f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlseg8e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i32 %vl) @@ -2833,6 +3001,7 @@ define @test_vlseg2_nxv2f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f32( undef, undef, float* %base, i32 %vl) @@ -2847,6 +3016,7 @@ define @test_vlseg2_mask_nxv2f32(float* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv2f32( undef, undef, float* %base, i32 %vl) @@ -2864,6 +3034,7 @@ define @test_vlseg3_nxv2f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f32( undef, undef, undef, float* %base, i32 %vl) @@ -2879,6 +3050,7 @@ define @test_vlseg3_mask_nxv2f32(float* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv2f32( undef, undef, undef, float* %base, i32 %vl) @@ -2896,6 +3068,7 @@ define @test_vlseg4_nxv2f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -2912,6 +3085,7 @@ define @test_vlseg4_mask_nxv2f32(float* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv2f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -2929,6 +3103,7 @@ define @test_vlseg5_nxv2f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -2946,6 +3121,7 @@ define @test_vlseg5_mask_nxv2f32(float* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -2963,6 +3139,7 @@ define @test_vlseg6_nxv2f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -2981,6 +3158,7 @@ define @test_vlseg6_mask_nxv2f32(float* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -2998,6 +3176,7 @@ define @test_vlseg7_nxv2f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3017,6 +3196,7 @@ define @test_vlseg7_mask_nxv2f32(float* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3034,6 +3214,7 @@ define @test_vlseg8_nxv2f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %vl) @@ -3054,6 +3235,7 @@ define @test_vlseg8_mask_nxv2f32(float* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %vl) @@ -3071,6 +3253,7 @@ define @test_vlseg2_nxv1f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f16( undef, undef, half* %base, i32 %vl) @@ -3085,6 +3268,7 @@ define @test_vlseg2_mask_nxv1f16(half* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv1f16( undef, undef, half* %base, i32 %vl) @@ -3102,6 +3286,7 @@ define @test_vlseg3_nxv1f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f16( undef, undef, undef, half* %base, i32 %vl) @@ -3117,6 +3302,7 @@ define @test_vlseg3_mask_nxv1f16(half* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv1f16( undef, undef, undef, half* %base, i32 %vl) @@ -3134,6 +3320,7 @@ define @test_vlseg4_nxv1f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -3150,6 +3337,7 @@ define @test_vlseg4_mask_nxv1f16(half* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv1f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -3167,6 +3355,7 @@ define @test_vlseg5_nxv1f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3184,6 +3373,7 @@ define @test_vlseg5_mask_nxv1f16(half* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3201,6 +3391,7 @@ define @test_vlseg6_nxv1f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3219,6 +3410,7 @@ define @test_vlseg6_mask_nxv1f16(half* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3236,6 +3428,7 @@ define @test_vlseg7_nxv1f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3255,6 +3448,7 @@ define @test_vlseg7_mask_nxv1f16(half* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3272,6 +3466,7 @@ define @test_vlseg8_nxv1f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -3292,6 +3487,7 @@ define @test_vlseg8_mask_nxv1f16(half* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -3309,6 +3505,7 @@ define @test_vlseg2_nxv1f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f32( undef, undef, float* %base, i32 %vl) @@ -3323,6 +3520,7 @@ define @test_vlseg2_mask_nxv1f32(float* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv1f32( undef, undef, float* %base, i32 %vl) @@ -3340,6 +3538,7 @@ define @test_vlseg3_nxv1f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f32( undef, undef, undef, float* %base, i32 %vl) @@ -3355,6 +3554,7 @@ define @test_vlseg3_mask_nxv1f32(float* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv1f32( undef, undef, undef, float* %base, i32 %vl) @@ -3372,6 +3572,7 @@ define @test_vlseg4_nxv1f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -3388,6 +3589,7 @@ define @test_vlseg4_mask_nxv1f32(float* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv1f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -3405,6 +3607,7 @@ define @test_vlseg5_nxv1f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3422,6 +3625,7 @@ define @test_vlseg5_mask_nxv1f32(float* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3439,6 +3643,7 @@ define @test_vlseg6_nxv1f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3457,6 +3662,7 @@ define @test_vlseg6_mask_nxv1f32(float* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3474,6 +3680,7 @@ define @test_vlseg7_nxv1f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3493,6 +3700,7 @@ define @test_vlseg7_mask_nxv1f32(float* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3510,6 +3718,7 @@ define @test_vlseg8_nxv1f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %vl) @@ -3530,6 +3739,7 @@ define @test_vlseg8_mask_nxv1f32(float* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %vl) @@ -3547,6 +3757,7 @@ define @test_vlseg2_nxv8f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg2e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8f16( undef, undef, half* %base, i32 %vl) @@ -3561,6 +3772,7 @@ define @test_vlseg2_mask_nxv8f16(half* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv8f16( undef, undef, half* %base, i32 %vl) @@ -3578,6 +3790,7 @@ define @test_vlseg3_nxv8f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv8f16( undef, undef, undef, half* %base, i32 %vl) @@ -3593,6 +3806,7 @@ define @test_vlseg3_mask_nxv8f16(half* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv8f16( undef, undef, undef, half* %base, i32 %vl) @@ -3610,6 +3824,7 @@ define @test_vlseg4_nxv8f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv8f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -3626,6 +3841,7 @@ define @test_vlseg4_mask_nxv8f16(half* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv8f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -3643,6 +3859,7 @@ define @test_vlseg2_nxv8f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vlseg2e32.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8f32( undef, undef, float* %base, i32 %vl) @@ -3657,6 +3874,7 @@ define @test_vlseg2_mask_nxv8f32(float* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv8f32( undef, undef, float* %base, i32 %vl) @@ -3674,6 +3892,7 @@ define @test_vlseg2_nxv2f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg2e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f64( undef, undef, double* %base, i32 %vl) @@ -3688,6 +3907,7 @@ define @test_vlseg2_mask_nxv2f64(double* %base, i32 %vl, < ; CHECK-NEXT: vlseg2e64.v v6, (a0) ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlseg2e64.v v6, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f64( undef, undef, double* %base, i32 %vl) @@ -3705,6 +3925,7 @@ define @test_vlseg3_nxv2f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg3e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f64( undef, undef, undef, double* %base, i32 %vl) @@ -3720,6 +3941,7 @@ define @test_vlseg3_mask_nxv2f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlseg3e64.v v6, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f64( undef, undef, undef, double* %base, i32 %vl) @@ -3737,6 +3959,7 @@ define @test_vlseg4_nxv2f64(double* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg4e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f64( undef, undef, undef, undef, double* %base, i32 %vl) @@ -3753,6 +3976,7 @@ define @test_vlseg4_mask_nxv2f64(double* %base, i32 %vl, < ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlseg4e64.v v6, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f64( undef, undef, undef, undef, double* %base, i32 %vl) @@ -3770,6 +3994,7 @@ define @test_vlseg2_nxv4f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f16( undef, undef, half* %base, i32 %vl) @@ -3784,6 +4009,7 @@ define @test_vlseg2_mask_nxv4f16(half* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv4f16( undef, undef, half* %base, i32 %vl) @@ -3801,6 +4027,7 @@ define @test_vlseg3_nxv4f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4f16( undef, undef, undef, half* %base, i32 %vl) @@ -3816,6 +4043,7 @@ define @test_vlseg3_mask_nxv4f16(half* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv4f16( undef, undef, undef, half* %base, i32 %vl) @@ -3833,6 +4061,7 @@ define @test_vlseg4_nxv4f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -3849,6 +4078,7 @@ define @test_vlseg4_mask_nxv4f16(half* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv4f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -3866,6 +4096,7 @@ define @test_vlseg5_nxv4f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3883,6 +4114,7 @@ define @test_vlseg5_mask_nxv4f16(half* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3900,6 +4132,7 @@ define @test_vlseg6_nxv4f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3918,6 +4151,7 @@ define @test_vlseg6_mask_nxv4f16(half* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3935,6 +4169,7 @@ define @test_vlseg7_nxv4f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3954,6 +4189,7 @@ define @test_vlseg7_mask_nxv4f16(half* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3971,6 +4207,7 @@ define @test_vlseg8_nxv4f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -3991,6 +4228,7 @@ define @test_vlseg8_mask_nxv4f16(half* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -4008,6 +4246,7 @@ define @test_vlseg2_nxv2f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f16( undef, undef, half* %base, i32 %vl) @@ -4022,6 +4261,7 @@ define @test_vlseg2_mask_nxv2f16(half* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv2f16( undef, undef, half* %base, i32 %vl) @@ -4039,6 +4279,7 @@ define @test_vlseg3_nxv2f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f16( undef, undef, undef, half* %base, i32 %vl) @@ -4054,6 +4295,7 @@ define @test_vlseg3_mask_nxv2f16(half* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv2f16( undef, undef, undef, half* %base, i32 %vl) @@ -4071,6 +4313,7 @@ define @test_vlseg4_nxv2f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -4087,6 +4330,7 @@ define @test_vlseg4_mask_nxv2f16(half* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv2f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -4104,6 +4348,7 @@ define @test_vlseg5_nxv2f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4121,6 +4366,7 @@ define @test_vlseg5_mask_nxv2f16(half* %base, i32 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4138,6 +4384,7 @@ define @test_vlseg6_nxv2f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4156,6 +4403,7 @@ define @test_vlseg6_mask_nxv2f16(half* %base, i32 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4173,6 +4421,7 @@ define @test_vlseg7_nxv2f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4192,6 +4441,7 @@ define @test_vlseg7_mask_nxv2f16(half* %base, i32 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4209,6 +4459,7 @@ define @test_vlseg8_nxv2f16(half* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -4229,6 +4480,7 @@ define @test_vlseg8_mask_nxv2f16(half* %base, i32 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -4246,6 +4498,7 @@ define @test_vlseg2_nxv4f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg2e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f32( undef, undef, float* %base, i32 %vl) @@ -4260,6 +4513,7 @@ define @test_vlseg2_mask_nxv4f32(float* %base, i32 %vl, ,} @llvm.riscv.vlseg2.nxv4f32( undef, undef, float* %base, i32 %vl) @@ -4277,6 +4531,7 @@ define @test_vlseg3_nxv4f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4f32( undef, undef, undef, float* %base, i32 %vl) @@ -4292,6 +4547,7 @@ define @test_vlseg3_mask_nxv4f32(float* %base, i32 %vl, ,,} @llvm.riscv.vlseg3.nxv4f32( undef, undef, undef, float* %base, i32 %vl) @@ -4309,6 +4565,7 @@ define @test_vlseg4_nxv4f32(float* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -4325,6 +4582,7 @@ define @test_vlseg4_mask_nxv4f32(float* %base, i32 %vl, ,,,} @llvm.riscv.vlseg4.nxv4f32( undef, undef, undef, undef, float* %base, i32 %vl) diff --git a/llvm/test/CodeGen/RISCV/rvv/vlseg-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vlseg-rv64.ll index 28b50644aaead..a36940eb6c786 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vlseg-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vlseg-rv64.ll @@ -10,6 +10,7 @@ define @test_vlseg2_nxv16i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv16i16( undef, undef, i16* %base, i64 %vl) @@ -24,6 +25,7 @@ define @test_vlseg2_mask_nxv16i16(i16* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv16i16( undef, undef, i16* %base, i64 %vl) @@ -41,6 +43,7 @@ define @test_vlseg2_nxv4i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg2e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4i32( undef, undef, i32* %base, i64 %vl) @@ -55,6 +58,7 @@ define @test_vlseg2_mask_nxv4i32(i32* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv4i32( undef, undef, i32* %base, i64 %vl) @@ -72,6 +76,7 @@ define @test_vlseg3_nxv4i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4i32( undef, undef, undef, i32* %base, i64 %vl) @@ -87,6 +92,7 @@ define @test_vlseg3_mask_nxv4i32(i32* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv4i32( undef, undef, undef, i32* %base, i64 %vl) @@ -104,6 +110,7 @@ define @test_vlseg4_nxv4i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -120,6 +127,7 @@ define @test_vlseg4_mask_nxv4i32(i32* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -137,6 +145,7 @@ define @test_vlseg2_nxv16i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg2e8.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv16i8( undef, undef, i8* %base, i64 %vl) @@ -151,6 +160,7 @@ define @test_vlseg2_mask_nxv16i8(i8* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv16i8( undef, undef, i8* %base, i64 %vl) @@ -168,6 +178,7 @@ define @test_vlseg3_nxv16i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg3e8.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv16i8( undef, undef, undef, i8* %base, i64 %vl) @@ -183,6 +194,7 @@ define @test_vlseg3_mask_nxv16i8(i8* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv16i8( undef, undef, undef, i8* %base, i64 %vl) @@ -200,6 +212,7 @@ define @test_vlseg4_nxv16i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg4e8.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -216,6 +229,7 @@ define @test_vlseg4_mask_nxv16i8(i8* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -233,6 +247,7 @@ define @test_vlseg2_nxv1i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg2e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1i64( undef, undef, i64* %base, i64 %vl) @@ -247,6 +262,7 @@ define @test_vlseg2_mask_nxv1i64(i64* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv1i64( undef, undef, i64* %base, i64 %vl) @@ -264,6 +280,7 @@ define @test_vlseg3_nxv1i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg3e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1i64( undef, undef, undef, i64* %base, i64 %vl) @@ -279,6 +296,7 @@ define @test_vlseg3_mask_nxv1i64(i64* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv1i64( undef, undef, undef, i64* %base, i64 %vl) @@ -296,6 +314,7 @@ define @test_vlseg4_nxv1i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg4e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1i64( undef, undef, undef, undef, i64* %base, i64 %vl) @@ -312,6 +331,7 @@ define @test_vlseg4_mask_nxv1i64(i64* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv1i64( undef, undef, undef, undef, i64* %base, i64 %vl) @@ -329,6 +349,7 @@ define @test_vlseg5_nxv1i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg5e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1i64( undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -346,6 +367,7 @@ define @test_vlseg5_mask_nxv1i64(i64* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1i64( undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -363,6 +385,7 @@ define @test_vlseg6_nxv1i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg6e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1i64( undef, undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -381,6 +404,7 @@ define @test_vlseg6_mask_nxv1i64(i64* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1i64( undef, undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -398,6 +422,7 @@ define @test_vlseg7_nxv1i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg7e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1i64( undef, undef, undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -417,6 +442,7 @@ define @test_vlseg7_mask_nxv1i64(i64* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1i64( undef, undef, undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -434,6 +460,7 @@ define @test_vlseg8_nxv1i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg8e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1i64( undef, undef , undef , undef, undef , undef, undef, undef, i64* %base, i64 %vl) @@ -454,6 +481,7 @@ define @test_vlseg8_mask_nxv1i64(i64* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1i64( undef, undef , undef , undef, undef , undef, undef, undef, i64* %base, i64 %vl) @@ -471,6 +499,7 @@ define @test_vlseg2_nxv1i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1i32( undef, undef, i32* %base, i64 %vl) @@ -485,6 +514,7 @@ define @test_vlseg2_mask_nxv1i32(i32* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv1i32( undef, undef, i32* %base, i64 %vl) @@ -502,6 +532,7 @@ define @test_vlseg3_nxv1i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1i32( undef, undef, undef, i32* %base, i64 %vl) @@ -517,6 +548,7 @@ define @test_vlseg3_mask_nxv1i32(i32* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv1i32( undef, undef, undef, i32* %base, i64 %vl) @@ -534,6 +566,7 @@ define @test_vlseg4_nxv1i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -550,6 +583,7 @@ define @test_vlseg4_mask_nxv1i32(i32* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -567,6 +601,7 @@ define @test_vlseg5_nxv1i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -584,6 +619,7 @@ define @test_vlseg5_mask_nxv1i32(i32* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -601,6 +637,7 @@ define @test_vlseg6_nxv1i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -619,6 +656,7 @@ define @test_vlseg6_mask_nxv1i32(i32* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -636,6 +674,7 @@ define @test_vlseg7_nxv1i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -655,6 +694,7 @@ define @test_vlseg7_mask_nxv1i32(i32* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -672,6 +712,7 @@ define @test_vlseg8_nxv1i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %vl) @@ -692,6 +733,7 @@ define @test_vlseg8_mask_nxv1i32(i32* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %vl) @@ -709,6 +751,7 @@ define @test_vlseg2_nxv8i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg2e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8i16( undef, undef, i16* %base, i64 %vl) @@ -723,6 +766,7 @@ define @test_vlseg2_mask_nxv8i16(i16* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv8i16( undef, undef, i16* %base, i64 %vl) @@ -740,6 +784,7 @@ define @test_vlseg3_nxv8i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv8i16( undef, undef, undef, i16* %base, i64 %vl) @@ -755,6 +800,7 @@ define @test_vlseg3_mask_nxv8i16(i16* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv8i16( undef, undef, undef, i16* %base, i64 %vl) @@ -772,6 +818,7 @@ define @test_vlseg4_nxv8i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -788,6 +835,7 @@ define @test_vlseg4_mask_nxv8i16(i16* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -805,6 +853,7 @@ define @test_vlseg2_nxv4i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4i8( undef, undef, i8* %base, i64 %vl) @@ -819,6 +868,7 @@ define @test_vlseg2_mask_nxv4i8(i8* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv4i8( undef, undef, i8* %base, i64 %vl) @@ -836,6 +886,7 @@ define @test_vlseg3_nxv4i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4i8( undef, undef, undef, i8* %base, i64 %vl) @@ -851,6 +902,7 @@ define @test_vlseg3_mask_nxv4i8(i8* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv4i8( undef, undef, undef, i8* %base, i64 %vl) @@ -868,6 +920,7 @@ define @test_vlseg4_nxv4i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -884,6 +937,7 @@ define @test_vlseg4_mask_nxv4i8(i8* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -901,6 +955,7 @@ define @test_vlseg5_nxv4i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -918,6 +973,7 @@ define @test_vlseg5_mask_nxv4i8(i8* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -935,6 +991,7 @@ define @test_vlseg6_nxv4i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -953,6 +1010,7 @@ define @test_vlseg6_mask_nxv4i8(i8* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -970,6 +1028,7 @@ define @test_vlseg7_nxv4i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -989,6 +1048,7 @@ define @test_vlseg7_mask_nxv4i8(i8* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1006,6 +1066,7 @@ define @test_vlseg8_nxv4i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -1026,6 +1087,7 @@ define @test_vlseg8_mask_nxv4i8(i8* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -1043,6 +1105,7 @@ define @test_vlseg2_nxv1i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1i16( undef, undef, i16* %base, i64 %vl) @@ -1057,6 +1120,7 @@ define @test_vlseg2_mask_nxv1i16(i16* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv1i16( undef, undef, i16* %base, i64 %vl) @@ -1074,6 +1138,7 @@ define @test_vlseg3_nxv1i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1i16( undef, undef, undef, i16* %base, i64 %vl) @@ -1089,6 +1154,7 @@ define @test_vlseg3_mask_nxv1i16(i16* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv1i16( undef, undef, undef, i16* %base, i64 %vl) @@ -1106,6 +1172,7 @@ define @test_vlseg4_nxv1i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1122,6 +1189,7 @@ define @test_vlseg4_mask_nxv1i16(i16* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1139,6 +1207,7 @@ define @test_vlseg5_nxv1i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1156,6 +1225,7 @@ define @test_vlseg5_mask_nxv1i16(i16* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1173,6 +1243,7 @@ define @test_vlseg6_nxv1i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1191,6 +1262,7 @@ define @test_vlseg6_mask_nxv1i16(i16* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1208,6 +1280,7 @@ define @test_vlseg7_nxv1i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1227,6 +1300,7 @@ define @test_vlseg7_mask_nxv1i16(i16* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1244,6 +1318,7 @@ define @test_vlseg8_nxv1i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -1264,6 +1339,7 @@ define @test_vlseg8_mask_nxv1i16(i16* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -1281,6 +1357,7 @@ define @test_vlseg2_nxv2i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2i32( undef, undef, i32* %base, i64 %vl) @@ -1295,6 +1372,7 @@ define @test_vlseg2_mask_nxv2i32(i32* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv2i32( undef, undef, i32* %base, i64 %vl) @@ -1312,6 +1390,7 @@ define @test_vlseg3_nxv2i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2i32( undef, undef, undef, i32* %base, i64 %vl) @@ -1327,6 +1406,7 @@ define @test_vlseg3_mask_nxv2i32(i32* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv2i32( undef, undef, undef, i32* %base, i64 %vl) @@ -1344,6 +1424,7 @@ define @test_vlseg4_nxv2i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1360,6 +1441,7 @@ define @test_vlseg4_mask_nxv2i32(i32* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1377,6 +1459,7 @@ define @test_vlseg5_nxv2i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1394,6 +1477,7 @@ define @test_vlseg5_mask_nxv2i32(i32* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1411,6 +1495,7 @@ define @test_vlseg6_nxv2i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1429,6 +1514,7 @@ define @test_vlseg6_mask_nxv2i32(i32* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1446,6 +1532,7 @@ define @test_vlseg7_nxv2i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1465,6 +1552,7 @@ define @test_vlseg7_mask_nxv2i32(i32* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1482,6 +1570,7 @@ define @test_vlseg8_nxv2i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %vl) @@ -1502,6 +1591,7 @@ define @test_vlseg8_mask_nxv2i32(i32* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %vl) @@ -1519,6 +1609,7 @@ define @test_vlseg2_nxv8i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8i8( undef, undef, i8* %base, i64 %vl) @@ -1533,6 +1624,7 @@ define @test_vlseg2_mask_nxv8i8(i8* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv8i8( undef, undef, i8* %base, i64 %vl) @@ -1550,6 +1642,7 @@ define @test_vlseg3_nxv8i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv8i8( undef, undef, undef, i8* %base, i64 %vl) @@ -1565,6 +1658,7 @@ define @test_vlseg3_mask_nxv8i8(i8* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv8i8( undef, undef, undef, i8* %base, i64 %vl) @@ -1582,6 +1676,7 @@ define @test_vlseg4_nxv8i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1598,6 +1693,7 @@ define @test_vlseg4_mask_nxv8i8(i8* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1615,6 +1711,7 @@ define @test_vlseg5_nxv8i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1632,6 +1729,7 @@ define @test_vlseg5_mask_nxv8i8(i8* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1649,6 +1747,7 @@ define @test_vlseg6_nxv8i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1667,6 +1766,7 @@ define @test_vlseg6_mask_nxv8i8(i8* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1684,6 +1784,7 @@ define @test_vlseg7_nxv8i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1703,6 +1804,7 @@ define @test_vlseg7_mask_nxv8i8(i8* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1720,6 +1822,7 @@ define @test_vlseg8_nxv8i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -1740,6 +1843,7 @@ define @test_vlseg8_mask_nxv8i8(i8* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -1757,6 +1861,7 @@ define @test_vlseg2_nxv4i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vlseg2e64.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4i64( undef, undef, i64* %base, i64 %vl) @@ -1771,6 +1876,7 @@ define @test_vlseg2_mask_nxv4i64(i64* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv4i64( undef, undef, i64* %base, i64 %vl) @@ -1788,6 +1894,7 @@ define @test_vlseg2_nxv4i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4i16( undef, undef, i16* %base, i64 %vl) @@ -1802,6 +1909,7 @@ define @test_vlseg2_mask_nxv4i16(i16* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv4i16( undef, undef, i16* %base, i64 %vl) @@ -1819,6 +1927,7 @@ define @test_vlseg3_nxv4i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4i16( undef, undef, undef, i16* %base, i64 %vl) @@ -1834,6 +1943,7 @@ define @test_vlseg3_mask_nxv4i16(i16* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv4i16( undef, undef, undef, i16* %base, i64 %vl) @@ -1851,6 +1961,7 @@ define @test_vlseg4_nxv4i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1867,6 +1978,7 @@ define @test_vlseg4_mask_nxv4i16(i16* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1884,6 +1996,7 @@ define @test_vlseg5_nxv4i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1901,6 +2014,7 @@ define @test_vlseg5_mask_nxv4i16(i16* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1918,6 +2032,7 @@ define @test_vlseg6_nxv4i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1936,6 +2051,7 @@ define @test_vlseg6_mask_nxv4i16(i16* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1953,6 +2069,7 @@ define @test_vlseg7_nxv4i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1972,6 +2089,7 @@ define @test_vlseg7_mask_nxv4i16(i16* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1989,6 +2107,7 @@ define @test_vlseg8_nxv4i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -2009,6 +2128,7 @@ define @test_vlseg8_mask_nxv4i16(i16* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -2026,6 +2146,7 @@ define @test_vlseg2_nxv1i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1i8( undef, undef, i8* %base, i64 %vl) @@ -2040,6 +2161,7 @@ define @test_vlseg2_mask_nxv1i8(i8* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv1i8( undef, undef, i8* %base, i64 %vl) @@ -2057,6 +2179,7 @@ define @test_vlseg3_nxv1i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1i8( undef, undef, undef, i8* %base, i64 %vl) @@ -2072,6 +2195,7 @@ define @test_vlseg3_mask_nxv1i8(i8* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv1i8( undef, undef, undef, i8* %base, i64 %vl) @@ -2089,6 +2213,7 @@ define @test_vlseg4_nxv1i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2105,6 +2230,7 @@ define @test_vlseg4_mask_nxv1i8(i8* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2122,6 +2248,7 @@ define @test_vlseg5_nxv1i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2139,6 +2266,7 @@ define @test_vlseg5_mask_nxv1i8(i8* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2156,6 +2284,7 @@ define @test_vlseg6_nxv1i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2174,6 +2303,7 @@ define @test_vlseg6_mask_nxv1i8(i8* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2191,6 +2321,7 @@ define @test_vlseg7_nxv1i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2210,6 +2341,7 @@ define @test_vlseg7_mask_nxv1i8(i8* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2227,6 +2359,7 @@ define @test_vlseg8_nxv1i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -2247,6 +2380,7 @@ define @test_vlseg8_mask_nxv1i8(i8* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -2264,6 +2398,7 @@ define @test_vlseg2_nxv2i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg2e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2i8( undef, undef, i8* %base, i64 %vl) @@ -2278,6 +2413,7 @@ define @test_vlseg2_mask_nxv2i8(i8* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv2i8( undef, undef, i8* %base, i64 %vl) @@ -2295,6 +2431,7 @@ define @test_vlseg3_nxv2i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg3e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2i8( undef, undef, undef, i8* %base, i64 %vl) @@ -2310,6 +2447,7 @@ define @test_vlseg3_mask_nxv2i8(i8* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv2i8( undef, undef, undef, i8* %base, i64 %vl) @@ -2327,6 +2465,7 @@ define @test_vlseg4_nxv2i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg4e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2343,6 +2482,7 @@ define @test_vlseg4_mask_nxv2i8(i8* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2360,6 +2500,7 @@ define @test_vlseg5_nxv2i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg5e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2377,6 +2518,7 @@ define @test_vlseg5_mask_nxv2i8(i8* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2394,6 +2536,7 @@ define @test_vlseg6_nxv2i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg6e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2412,6 +2555,7 @@ define @test_vlseg6_mask_nxv2i8(i8* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2429,6 +2573,7 @@ define @test_vlseg7_nxv2i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg7e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2448,6 +2593,7 @@ define @test_vlseg7_mask_nxv2i8(i8* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2465,6 +2611,7 @@ define @test_vlseg8_nxv2i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg8e8.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -2485,6 +2632,7 @@ define @test_vlseg8_mask_nxv2i8(i8* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -2502,6 +2650,7 @@ define @test_vlseg2_nxv8i32(i32* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vlseg2e32.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8i32( undef, undef, i32* %base, i64 %vl) @@ -2516,6 +2665,7 @@ define @test_vlseg2_mask_nxv8i32(i32* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv8i32( undef, undef, i32* %base, i64 %vl) @@ -2533,6 +2683,7 @@ define @test_vlseg2_nxv32i8(i8* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vlseg2e8.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv32i8( undef, undef, i8* %base, i64 %vl) @@ -2547,6 +2698,7 @@ define @test_vlseg2_mask_nxv32i8(i8* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv32i8( undef, undef, i8* %base, i64 %vl) @@ -2564,6 +2716,7 @@ define @test_vlseg2_nxv2i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2i16( undef, undef, i16* %base, i64 %vl) @@ -2578,6 +2731,7 @@ define @test_vlseg2_mask_nxv2i16(i16* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv2i16( undef, undef, i16* %base, i64 %vl) @@ -2595,6 +2749,7 @@ define @test_vlseg3_nxv2i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2i16( undef, undef, undef, i16* %base, i64 %vl) @@ -2610,6 +2765,7 @@ define @test_vlseg3_mask_nxv2i16(i16* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv2i16( undef, undef, undef, i16* %base, i64 %vl) @@ -2627,6 +2783,7 @@ define @test_vlseg4_nxv2i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2643,6 +2800,7 @@ define @test_vlseg4_mask_nxv2i16(i16* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2660,6 +2818,7 @@ define @test_vlseg5_nxv2i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2677,6 +2836,7 @@ define @test_vlseg5_mask_nxv2i16(i16* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2694,6 +2854,7 @@ define @test_vlseg6_nxv2i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2712,6 +2873,7 @@ define @test_vlseg6_mask_nxv2i16(i16* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2729,6 +2891,7 @@ define @test_vlseg7_nxv2i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2748,6 +2911,7 @@ define @test_vlseg7_mask_nxv2i16(i16* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2765,6 +2929,7 @@ define @test_vlseg8_nxv2i16(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -2785,6 +2950,7 @@ define @test_vlseg8_mask_nxv2i16(i16* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -2802,6 +2968,7 @@ define @test_vlseg2_nxv2i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg2e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2i64( undef, undef, i64* %base, i64 %vl) @@ -2816,6 +2983,7 @@ define @test_vlseg2_mask_nxv2i64(i64* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv2i64( undef, undef, i64* %base, i64 %vl) @@ -2833,6 +3001,7 @@ define @test_vlseg3_nxv2i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg3e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2i64( undef, undef, undef, i64* %base, i64 %vl) @@ -2848,6 +3017,7 @@ define @test_vlseg3_mask_nxv2i64(i64* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv2i64( undef, undef, undef, i64* %base, i64 %vl) @@ -2865,6 +3035,7 @@ define @test_vlseg4_nxv2i64(i64* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg4e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2i64( undef, undef, undef, undef, i64* %base, i64 %vl) @@ -2881,6 +3052,7 @@ define @test_vlseg4_mask_nxv2i64(i64* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv2i64( undef, undef, undef, undef, i64* %base, i64 %vl) @@ -2898,6 +3070,7 @@ define @test_vlseg2_nxv16f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv16f16( undef, undef, half* %base, i64 %vl) @@ -2912,6 +3085,7 @@ define @test_vlseg2_mask_nxv16f16(half* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv16f16( undef, undef, half* %base, i64 %vl) @@ -2929,6 +3103,7 @@ define @test_vlseg2_nxv4f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vlseg2e64.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f64( undef, undef, double* %base, i64 %vl) @@ -2943,6 +3118,7 @@ define @test_vlseg2_mask_nxv4f64(double* %base, i64 %vl, < ; CHECK-NEXT: vlseg2e64.v v4, (a0) ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlseg2e64.v v4, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f64( undef, undef, double* %base, i64 %vl) @@ -2960,6 +3136,7 @@ define @test_vlseg2_nxv1f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg2e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f64( undef, undef, double* %base, i64 %vl) @@ -2974,6 +3151,7 @@ define @test_vlseg2_mask_nxv1f64(double* %base, i64 %vl, < ; CHECK-NEXT: vlseg2e64.v v7, (a0) ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlseg2e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f64( undef, undef, double* %base, i64 %vl) @@ -2991,6 +3169,7 @@ define @test_vlseg3_nxv1f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg3e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f64( undef, undef, undef, double* %base, i64 %vl) @@ -3006,6 +3185,7 @@ define @test_vlseg3_mask_nxv1f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlseg3e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f64( undef, undef, undef, double* %base, i64 %vl) @@ -3023,6 +3203,7 @@ define @test_vlseg4_nxv1f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg4e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f64( undef, undef, undef, undef, double* %base, i64 %vl) @@ -3039,6 +3220,7 @@ define @test_vlseg4_mask_nxv1f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlseg4e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f64( undef, undef, undef, undef, double* %base, i64 %vl) @@ -3056,6 +3238,7 @@ define @test_vlseg5_nxv1f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg5e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3073,6 +3256,7 @@ define @test_vlseg5_mask_nxv1f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlseg5e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3090,6 +3274,7 @@ define @test_vlseg6_nxv1f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg6e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3108,6 +3293,7 @@ define @test_vlseg6_mask_nxv1f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlseg6e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3125,6 +3311,7 @@ define @test_vlseg7_nxv1f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg7e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3144,6 +3331,7 @@ define @test_vlseg7_mask_nxv1f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlseg7e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3161,6 +3349,7 @@ define @test_vlseg8_nxv1f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg8e64.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i64 %vl) @@ -3181,6 +3370,7 @@ define @test_vlseg8_mask_nxv1f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlseg8e64.v v7, (a0), v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i64 %vl) @@ -3198,6 +3388,7 @@ define @test_vlseg2_nxv2f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f32( undef, undef, float* %base, i64 %vl) @@ -3212,6 +3403,7 @@ define @test_vlseg2_mask_nxv2f32(float* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv2f32( undef, undef, float* %base, i64 %vl) @@ -3229,6 +3421,7 @@ define @test_vlseg3_nxv2f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f32( undef, undef, undef, float* %base, i64 %vl) @@ -3244,6 +3437,7 @@ define @test_vlseg3_mask_nxv2f32(float* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv2f32( undef, undef, undef, float* %base, i64 %vl) @@ -3261,6 +3455,7 @@ define @test_vlseg4_nxv2f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -3277,6 +3472,7 @@ define @test_vlseg4_mask_nxv2f32(float* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv2f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -3294,6 +3490,7 @@ define @test_vlseg5_nxv2f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3311,6 +3508,7 @@ define @test_vlseg5_mask_nxv2f32(float* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3328,6 +3526,7 @@ define @test_vlseg6_nxv2f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3346,6 +3545,7 @@ define @test_vlseg6_mask_nxv2f32(float* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3363,6 +3563,7 @@ define @test_vlseg7_nxv2f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3382,6 +3583,7 @@ define @test_vlseg7_mask_nxv2f32(float* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3399,6 +3601,7 @@ define @test_vlseg8_nxv2f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %vl) @@ -3419,6 +3622,7 @@ define @test_vlseg8_mask_nxv2f32(float* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %vl) @@ -3436,6 +3640,7 @@ define @test_vlseg2_nxv1f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f16( undef, undef, half* %base, i64 %vl) @@ -3450,6 +3655,7 @@ define @test_vlseg2_mask_nxv1f16(half* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv1f16( undef, undef, half* %base, i64 %vl) @@ -3467,6 +3673,7 @@ define @test_vlseg3_nxv1f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f16( undef, undef, undef, half* %base, i64 %vl) @@ -3482,6 +3689,7 @@ define @test_vlseg3_mask_nxv1f16(half* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv1f16( undef, undef, undef, half* %base, i64 %vl) @@ -3499,6 +3707,7 @@ define @test_vlseg4_nxv1f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -3515,6 +3724,7 @@ define @test_vlseg4_mask_nxv1f16(half* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv1f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -3532,6 +3742,7 @@ define @test_vlseg5_nxv1f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -3549,6 +3760,7 @@ define @test_vlseg5_mask_nxv1f16(half* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -3566,6 +3778,7 @@ define @test_vlseg6_nxv1f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -3584,6 +3797,7 @@ define @test_vlseg6_mask_nxv1f16(half* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -3601,6 +3815,7 @@ define @test_vlseg7_nxv1f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -3620,6 +3835,7 @@ define @test_vlseg7_mask_nxv1f16(half* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -3637,6 +3853,7 @@ define @test_vlseg8_nxv1f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -3657,6 +3874,7 @@ define @test_vlseg8_mask_nxv1f16(half* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -3674,6 +3892,7 @@ define @test_vlseg2_nxv1f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg2e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv1f32( undef, undef, float* %base, i64 %vl) @@ -3688,6 +3907,7 @@ define @test_vlseg2_mask_nxv1f32(float* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv1f32( undef, undef, float* %base, i64 %vl) @@ -3705,6 +3925,7 @@ define @test_vlseg3_nxv1f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv1f32( undef, undef, undef, float* %base, i64 %vl) @@ -3720,6 +3941,7 @@ define @test_vlseg3_mask_nxv1f32(float* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv1f32( undef, undef, undef, float* %base, i64 %vl) @@ -3737,6 +3959,7 @@ define @test_vlseg4_nxv1f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv1f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -3753,6 +3976,7 @@ define @test_vlseg4_mask_nxv1f32(float* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv1f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -3770,6 +3994,7 @@ define @test_vlseg5_nxv1f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3787,6 +4012,7 @@ define @test_vlseg5_mask_nxv1f32(float* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3804,6 +4030,7 @@ define @test_vlseg6_nxv1f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3822,6 +4049,7 @@ define @test_vlseg6_mask_nxv1f32(float* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3839,6 +4067,7 @@ define @test_vlseg7_nxv1f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3858,6 +4087,7 @@ define @test_vlseg7_mask_nxv1f32(float* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3875,6 +4105,7 @@ define @test_vlseg8_nxv1f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %vl) @@ -3895,6 +4126,7 @@ define @test_vlseg8_mask_nxv1f32(float* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %vl) @@ -3912,6 +4144,7 @@ define @test_vlseg2_nxv8f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg2e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8f16( undef, undef, half* %base, i64 %vl) @@ -3926,6 +4159,7 @@ define @test_vlseg2_mask_nxv8f16(half* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv8f16( undef, undef, half* %base, i64 %vl) @@ -3943,6 +4177,7 @@ define @test_vlseg3_nxv8f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv8f16( undef, undef, undef, half* %base, i64 %vl) @@ -3958,6 +4193,7 @@ define @test_vlseg3_mask_nxv8f16(half* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv8f16( undef, undef, undef, half* %base, i64 %vl) @@ -3975,6 +4211,7 @@ define @test_vlseg4_nxv8f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv8f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -3991,6 +4228,7 @@ define @test_vlseg4_mask_nxv8f16(half* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv8f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4008,6 +4246,7 @@ define @test_vlseg2_nxv8f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vlseg2e32.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv8f32( undef, undef, float* %base, i64 %vl) @@ -4022,6 +4261,7 @@ define @test_vlseg2_mask_nxv8f32(float* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv8f32( undef, undef, float* %base, i64 %vl) @@ -4039,6 +4279,7 @@ define @test_vlseg2_nxv2f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg2e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f64( undef, undef, double* %base, i64 %vl) @@ -4053,6 +4294,7 @@ define @test_vlseg2_mask_nxv2f64(double* %base, i64 %vl, < ; CHECK-NEXT: vlseg2e64.v v6, (a0) ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlseg2e64.v v6, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f64( undef, undef, double* %base, i64 %vl) @@ -4070,6 +4312,7 @@ define @test_vlseg3_nxv2f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg3e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f64( undef, undef, undef, double* %base, i64 %vl) @@ -4085,6 +4328,7 @@ define @test_vlseg3_mask_nxv2f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlseg3e64.v v6, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f64( undef, undef, undef, double* %base, i64 %vl) @@ -4102,6 +4346,7 @@ define @test_vlseg4_nxv2f64(double* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg4e64.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f64( undef, undef, undef, undef, double* %base, i64 %vl) @@ -4118,6 +4363,7 @@ define @test_vlseg4_mask_nxv2f64(double* %base, i64 %vl, < ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlseg4e64.v v6, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f64( undef, undef, undef, undef, double* %base, i64 %vl) @@ -4135,6 +4381,7 @@ define @test_vlseg2_nxv4f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f16( undef, undef, half* %base, i64 %vl) @@ -4149,6 +4396,7 @@ define @test_vlseg2_mask_nxv4f16(half* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv4f16( undef, undef, half* %base, i64 %vl) @@ -4166,6 +4414,7 @@ define @test_vlseg3_nxv4f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4f16( undef, undef, undef, half* %base, i64 %vl) @@ -4181,6 +4430,7 @@ define @test_vlseg3_mask_nxv4f16(half* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv4f16( undef, undef, undef, half* %base, i64 %vl) @@ -4198,6 +4448,7 @@ define @test_vlseg4_nxv4f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4214,6 +4465,7 @@ define @test_vlseg4_mask_nxv4f16(half* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv4f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4231,6 +4483,7 @@ define @test_vlseg5_nxv4f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4248,6 +4501,7 @@ define @test_vlseg5_mask_nxv4f16(half* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4265,6 +4519,7 @@ define @test_vlseg6_nxv4f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4283,6 +4538,7 @@ define @test_vlseg6_mask_nxv4f16(half* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4300,6 +4556,7 @@ define @test_vlseg7_nxv4f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4319,6 +4576,7 @@ define @test_vlseg7_mask_nxv4f16(half* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4336,6 +4594,7 @@ define @test_vlseg8_nxv4f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -4356,6 +4615,7 @@ define @test_vlseg8_mask_nxv4f16(half* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -4373,6 +4633,7 @@ define @test_vlseg2_nxv2f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg2e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv2f16( undef, undef, half* %base, i64 %vl) @@ -4387,6 +4648,7 @@ define @test_vlseg2_mask_nxv2f16(half* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv2f16( undef, undef, half* %base, i64 %vl) @@ -4404,6 +4666,7 @@ define @test_vlseg3_nxv2f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv2f16( undef, undef, undef, half* %base, i64 %vl) @@ -4419,6 +4682,7 @@ define @test_vlseg3_mask_nxv2f16(half* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv2f16( undef, undef, undef, half* %base, i64 %vl) @@ -4436,6 +4700,7 @@ define @test_vlseg4_nxv2f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv2f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4452,6 +4717,7 @@ define @test_vlseg4_mask_nxv2f16(half* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv2f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4469,6 +4735,7 @@ define @test_vlseg5_nxv2f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4486,6 +4753,7 @@ define @test_vlseg5_mask_nxv2f16(half* %base, i64 %vl, ,,,,} @llvm.riscv.vlseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4503,6 +4771,7 @@ define @test_vlseg6_nxv2f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4521,6 +4790,7 @@ define @test_vlseg6_mask_nxv2f16(half* %base, i64 %vl, ,,,,,} @llvm.riscv.vlseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4538,6 +4808,7 @@ define @test_vlseg7_nxv2f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4557,6 +4828,7 @@ define @test_vlseg7_mask_nxv2f16(half* %base, i64 %vl, ,,,,,,} @llvm.riscv.vlseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4574,6 +4846,7 @@ define @test_vlseg8_nxv2f16(half* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16.v v7, (a0) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -4594,6 +4867,7 @@ define @test_vlseg8_mask_nxv2f16(half* %base, i64 %vl, ,,,,,,,} @llvm.riscv.vlseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -4611,6 +4885,7 @@ define @test_vlseg2_nxv4f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg2e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv4f32( undef, undef, float* %base, i64 %vl) @@ -4625,6 +4900,7 @@ define @test_vlseg2_mask_nxv4f32(float* %base, i64 %vl, ,} @llvm.riscv.vlseg2.nxv4f32( undef, undef, float* %base, i64 %vl) @@ -4642,6 +4918,7 @@ define @test_vlseg3_nxv4f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlseg3.nxv4f32( undef, undef, undef, float* %base, i64 %vl) @@ -4657,6 +4934,7 @@ define @test_vlseg3_mask_nxv4f32(float* %base, i64 %vl, ,,} @llvm.riscv.vlseg3.nxv4f32( undef, undef, undef, float* %base, i64 %vl) @@ -4674,6 +4952,7 @@ define @test_vlseg4_nxv4f32(float* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32.v v6, (a0) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlseg4.nxv4f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -4690,6 +4969,7 @@ define @test_vlseg4_mask_nxv4f32(float* %base, i64 %vl, ,,,} @llvm.riscv.vlseg4.nxv4f32( undef, undef, undef, undef, float* %base, i64 %vl) diff --git a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32-dead.ll b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32-dead.ll index 81bdab2b5563a..8269dbfc698d5 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32-dead.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32-dead.ll @@ -23,6 +23,7 @@ entry: define void @test_vlseg2ff_mask_dead_value( %val, i16* %base, i32 %vl, %mask, i32* %outvl) { ; CHECK-LABEL: test_vlseg2ff_mask_dead_value: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v8, (a0), v0.t @@ -41,6 +42,7 @@ define @test_vlseg2ff_dead_vl(i16* %base, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv16i16( undef, undef, i16* %base, i32 %vl) @@ -54,6 +56,7 @@ define @test_vlseg2ff_mask_dead_vl( %val, ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv16i16( %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -75,6 +78,7 @@ entry: define void @test_vlseg2ff_mask_dead_all( %val, i16* %base, i32 %vl, %mask) { ; CHECK-LABEL: test_vlseg2ff_mask_dead_all: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v8, (a0), v0.t diff --git a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32.ll index 8b03ec5973f3e..15b7bbd3f1fa6 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv32.ll @@ -12,6 +12,7 @@ define @test_vlseg2ff_nxv16i16(i16* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv16i16( undef, undef, i16* %base, i32 %vl) @@ -29,6 +30,7 @@ define @test_vlseg2ff_mask_nxv16i16( %val ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv16i16( %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -48,6 +50,7 @@ define @test_vlseg2ff_nxv1i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv1i8( undef, undef, i8* %base, i32 %vl) @@ -65,6 +68,7 @@ define @test_vlseg2ff_mask_nxv1i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv1i8( %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -84,6 +88,7 @@ define @test_vlseg3ff_nxv1i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv1i8( undef, undef, undef, i8* %base, i32 %vl) @@ -97,11 +102,12 @@ define @test_vlseg3ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv1i8( %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -121,6 +127,7 @@ define @test_vlseg4ff_nxv1i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv1i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -134,12 +141,13 @@ define @test_vlseg4ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv1i8( %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -159,6 +167,7 @@ define @test_vlseg5ff_nxv1i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -172,13 +181,14 @@ define @test_vlseg5ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv1i8( %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -198,6 +208,7 @@ define @test_vlseg6ff_nxv1i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -211,14 +222,15 @@ define @test_vlseg6ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv1i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -238,6 +250,7 @@ define @test_vlseg7ff_nxv1i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -251,15 +264,16 @@ define @test_vlseg7ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv1i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -279,6 +293,7 @@ define @test_vlseg8ff_nxv1i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -292,16 +307,17 @@ define @test_vlseg8ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv1i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -321,6 +337,7 @@ define @test_vlseg2ff_nxv16i8(i8* %base, i32 %vl, i32* %outvl ; CHECK-NEXT: vlseg2e8ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv16i8( undef, undef, i8* %base, i32 %vl) @@ -338,6 +355,7 @@ define @test_vlseg2ff_mask_nxv16i8( %val, i ; CHECK-NEXT: vlseg2e8ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv16i8( %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -357,6 +375,7 @@ define @test_vlseg3ff_nxv16i8(i8* %base, i32 %vl, i32* %outvl ; CHECK-NEXT: vlseg3e8ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv16i8( undef, undef, undef, i8* %base, i32 %vl) @@ -370,11 +389,12 @@ define @test_vlseg3ff_mask_nxv16i8( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv16i8( %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -394,6 +414,7 @@ define @test_vlseg4ff_nxv16i8(i8* %base, i32 %vl, i32* %outvl ; CHECK-NEXT: vlseg4e8ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv16i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -407,12 +428,13 @@ define @test_vlseg4ff_mask_nxv16i8( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv16i8( %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -432,6 +454,7 @@ define @test_vlseg2ff_nxv2i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv2i32( undef, undef, i32* %base, i32 %vl) @@ -449,6 +472,7 @@ define @test_vlseg2ff_mask_nxv2i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv2i32( %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -468,6 +492,7 @@ define @test_vlseg3ff_nxv2i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv2i32( undef, undef, undef, i32* %base, i32 %vl) @@ -481,11 +506,12 @@ define @test_vlseg3ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv2i32( %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -505,6 +531,7 @@ define @test_vlseg4ff_nxv2i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv2i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -518,12 +545,13 @@ define @test_vlseg4ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv2i32( %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -543,6 +571,7 @@ define @test_vlseg5ff_nxv2i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -556,13 +585,14 @@ define @test_vlseg5ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv2i32( %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -582,6 +612,7 @@ define @test_vlseg6ff_nxv2i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -595,14 +626,15 @@ define @test_vlseg6ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv2i32( %val, %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -622,6 +654,7 @@ define @test_vlseg7ff_nxv2i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -635,15 +668,16 @@ define @test_vlseg7ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv2i32( %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -663,6 +697,7 @@ define @test_vlseg8ff_nxv2i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %vl) @@ -676,16 +711,17 @@ define @test_vlseg8ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv2i32( %val, %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -705,6 +741,7 @@ define @test_vlseg2ff_nxv4i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv4i16( undef, undef, i16* %base, i32 %vl) @@ -722,6 +759,7 @@ define @test_vlseg2ff_mask_nxv4i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv4i16( %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -741,6 +779,7 @@ define @test_vlseg3ff_nxv4i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv4i16( undef, undef, undef, i16* %base, i32 %vl) @@ -754,11 +793,12 @@ define @test_vlseg3ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv4i16( %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -778,6 +818,7 @@ define @test_vlseg4ff_nxv4i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv4i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -791,12 +832,13 @@ define @test_vlseg4ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv4i16( %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -816,6 +858,7 @@ define @test_vlseg5ff_nxv4i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -829,13 +872,14 @@ define @test_vlseg5ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv4i16( %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -855,6 +899,7 @@ define @test_vlseg6ff_nxv4i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -868,14 +913,15 @@ define @test_vlseg6ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv4i16( %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -895,6 +941,7 @@ define @test_vlseg7ff_nxv4i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -908,15 +955,16 @@ define @test_vlseg7ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv4i16( %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -936,6 +984,7 @@ define @test_vlseg8ff_nxv4i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -949,16 +998,17 @@ define @test_vlseg8ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv4i16( %val, %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -978,6 +1028,7 @@ define @test_vlseg2ff_nxv1i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv1i32( undef, undef, i32* %base, i32 %vl) @@ -995,6 +1046,7 @@ define @test_vlseg2ff_mask_nxv1i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv1i32( %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1014,6 +1066,7 @@ define @test_vlseg3ff_nxv1i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv1i32( undef, undef, undef, i32* %base, i32 %vl) @@ -1027,11 +1080,12 @@ define @test_vlseg3ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv1i32( %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1051,6 +1105,7 @@ define @test_vlseg4ff_nxv1i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv1i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -1064,12 +1119,13 @@ define @test_vlseg4ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv1i32( %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1089,6 +1145,7 @@ define @test_vlseg5ff_nxv1i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -1102,13 +1159,14 @@ define @test_vlseg5ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv1i32( %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1128,6 +1186,7 @@ define @test_vlseg6ff_nxv1i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -1141,14 +1200,15 @@ define @test_vlseg6ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv1i32( %val, %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1168,6 +1228,7 @@ define @test_vlseg7ff_nxv1i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %vl) @@ -1181,15 +1242,16 @@ define @test_vlseg7ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv1i32( %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1209,6 +1271,7 @@ define @test_vlseg8ff_nxv1i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %vl) @@ -1222,16 +1285,17 @@ define @test_vlseg8ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv1i32( %val, %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1251,6 +1315,7 @@ define @test_vlseg2ff_nxv8i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv8i16( undef, undef, i16* %base, i32 %vl) @@ -1268,6 +1333,7 @@ define @test_vlseg2ff_mask_nxv8i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv8i16( %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -1287,6 +1353,7 @@ define @test_vlseg3ff_nxv8i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg3e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv8i16( undef, undef, undef, i16* %base, i32 %vl) @@ -1300,11 +1367,12 @@ define @test_vlseg3ff_mask_nxv8i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv8i16( %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -1324,6 +1392,7 @@ define @test_vlseg4ff_nxv8i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg4e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv8i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -1337,12 +1406,13 @@ define @test_vlseg4ff_mask_nxv8i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv8i16( %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -1362,6 +1432,7 @@ define @test_vlseg2ff_nxv8i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv8i8( undef, undef, i8* %base, i32 %vl) @@ -1379,6 +1450,7 @@ define @test_vlseg2ff_mask_nxv8i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv8i8( %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1398,6 +1470,7 @@ define @test_vlseg3ff_nxv8i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv8i8( undef, undef, undef, i8* %base, i32 %vl) @@ -1411,11 +1484,12 @@ define @test_vlseg3ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv8i8( %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1435,6 +1509,7 @@ define @test_vlseg4ff_nxv8i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv8i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1448,12 +1523,13 @@ define @test_vlseg4ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv8i8( %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1473,6 +1549,7 @@ define @test_vlseg5ff_nxv8i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1486,13 +1563,14 @@ define @test_vlseg5ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv8i8( %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1512,6 +1590,7 @@ define @test_vlseg6ff_nxv8i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1525,14 +1604,15 @@ define @test_vlseg6ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv8i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1552,6 +1632,7 @@ define @test_vlseg7ff_nxv8i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1565,15 +1646,16 @@ define @test_vlseg7ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv8i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1593,6 +1675,7 @@ define @test_vlseg8ff_nxv8i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -1606,16 +1689,17 @@ define @test_vlseg8ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv8i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1635,6 +1719,7 @@ define @test_vlseg2ff_nxv8i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e32ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv8i32( undef, undef, i32* %base, i32 %vl) @@ -1652,6 +1737,7 @@ define @test_vlseg2ff_mask_nxv8i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv8i32( %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -1671,6 +1757,7 @@ define @test_vlseg2ff_nxv4i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv4i8( undef, undef, i8* %base, i32 %vl) @@ -1688,6 +1775,7 @@ define @test_vlseg2ff_mask_nxv4i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv4i8( %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1707,6 +1795,7 @@ define @test_vlseg3ff_nxv4i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv4i8( undef, undef, undef, i8* %base, i32 %vl) @@ -1720,11 +1809,12 @@ define @test_vlseg3ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv4i8( %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1744,6 +1834,7 @@ define @test_vlseg4ff_nxv4i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv4i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1757,12 +1848,13 @@ define @test_vlseg4ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv4i8( %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1782,6 +1874,7 @@ define @test_vlseg5ff_nxv4i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1795,13 +1888,14 @@ define @test_vlseg5ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv4i8( %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1821,6 +1915,7 @@ define @test_vlseg6ff_nxv4i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1834,14 +1929,15 @@ define @test_vlseg6ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv4i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1861,6 +1957,7 @@ define @test_vlseg7ff_nxv4i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -1874,15 +1971,16 @@ define @test_vlseg7ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv4i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1902,6 +2000,7 @@ define @test_vlseg8ff_nxv4i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -1915,16 +2014,17 @@ define @test_vlseg8ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv4i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -1944,6 +2044,7 @@ define @test_vlseg2ff_nxv1i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv1i16( undef, undef, i16* %base, i32 %vl) @@ -1961,6 +2062,7 @@ define @test_vlseg2ff_mask_nxv1i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv1i16( %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -1980,6 +2082,7 @@ define @test_vlseg3ff_nxv1i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv1i16( undef, undef, undef, i16* %base, i32 %vl) @@ -1993,11 +2096,12 @@ define @test_vlseg3ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv1i16( %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2017,6 +2121,7 @@ define @test_vlseg4ff_nxv1i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv1i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2030,12 +2135,13 @@ define @test_vlseg4ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv1i16( %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2055,6 +2161,7 @@ define @test_vlseg5ff_nxv1i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2068,13 +2175,14 @@ define @test_vlseg5ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv1i16( %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2094,6 +2202,7 @@ define @test_vlseg6ff_nxv1i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2107,14 +2216,15 @@ define @test_vlseg6ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv1i16( %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2134,6 +2244,7 @@ define @test_vlseg7ff_nxv1i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2147,15 +2258,16 @@ define @test_vlseg7ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv1i16( %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2175,6 +2287,7 @@ define @test_vlseg8ff_nxv1i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -2188,16 +2301,17 @@ define @test_vlseg8ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv1i16( %val, %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2217,6 +2331,7 @@ define @test_vlseg2ff_nxv32i8(i8* %base, i32 %vl, i32* %outvl ; CHECK-NEXT: vlseg2e8ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv32i8( undef, undef, i8* %base, i32 %vl) @@ -2234,6 +2349,7 @@ define @test_vlseg2ff_mask_nxv32i8( %val, i ; CHECK-NEXT: vlseg2e8ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv32i8( %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2253,6 +2369,7 @@ define @test_vlseg2ff_nxv2i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv2i8( undef, undef, i8* %base, i32 %vl) @@ -2270,6 +2387,7 @@ define @test_vlseg2ff_mask_nxv2i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv2i8( %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2289,6 +2407,7 @@ define @test_vlseg3ff_nxv2i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv2i8( undef, undef, undef, i8* %base, i32 %vl) @@ -2302,11 +2421,12 @@ define @test_vlseg3ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv2i8( %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2326,6 +2446,7 @@ define @test_vlseg4ff_nxv2i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv2i8( undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2339,12 +2460,13 @@ define @test_vlseg4ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv2i8( %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2364,6 +2486,7 @@ define @test_vlseg5ff_nxv2i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2377,13 +2500,14 @@ define @test_vlseg5ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv2i8( %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2403,6 +2527,7 @@ define @test_vlseg6ff_nxv2i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2416,14 +2541,15 @@ define @test_vlseg6ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv2i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2443,6 +2569,7 @@ define @test_vlseg7ff_nxv2i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %vl) @@ -2456,15 +2583,16 @@ define @test_vlseg7ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv2i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2484,6 +2612,7 @@ define @test_vlseg8ff_nxv2i8(i8* %base, i32 %vl, i32* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %vl) @@ -2497,16 +2626,17 @@ define @test_vlseg8ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv2i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i32 %vl, i32 1) @@ -2526,6 +2656,7 @@ define @test_vlseg2ff_nxv2i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv2i16( undef, undef, i16* %base, i32 %vl) @@ -2543,6 +2674,7 @@ define @test_vlseg2ff_mask_nxv2i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv2i16( %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2562,6 +2694,7 @@ define @test_vlseg3ff_nxv2i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv2i16( undef, undef, undef, i16* %base, i32 %vl) @@ -2575,11 +2708,12 @@ define @test_vlseg3ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv2i16( %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2599,6 +2733,7 @@ define @test_vlseg4ff_nxv2i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv2i16( undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2612,12 +2747,13 @@ define @test_vlseg4ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv2i16( %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2637,6 +2773,7 @@ define @test_vlseg5ff_nxv2i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2650,13 +2787,14 @@ define @test_vlseg5ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv2i16( %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2676,6 +2814,7 @@ define @test_vlseg6ff_nxv2i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2689,14 +2828,15 @@ define @test_vlseg6ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv2i16( %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2716,6 +2856,7 @@ define @test_vlseg7ff_nxv2i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %vl) @@ -2729,15 +2870,16 @@ define @test_vlseg7ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv2i16( %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2757,6 +2899,7 @@ define @test_vlseg8ff_nxv2i16(i16* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %vl) @@ -2770,16 +2913,17 @@ define @test_vlseg8ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv2i16( %val, %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i32 %vl, i32 1) @@ -2799,6 +2943,7 @@ define @test_vlseg2ff_nxv4i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg2e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv4i32( undef, undef, i32* %base, i32 %vl) @@ -2816,6 +2961,7 @@ define @test_vlseg2ff_mask_nxv4i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv4i32( %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -2835,6 +2981,7 @@ define @test_vlseg3ff_nxv4i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg3e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv4i32( undef, undef, undef, i32* %base, i32 %vl) @@ -2848,11 +2995,12 @@ define @test_vlseg3ff_mask_nxv4i32( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv4i32( %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -2872,6 +3020,7 @@ define @test_vlseg4ff_nxv4i32(i32* %base, i32 %vl, i32* %outv ; CHECK-NEXT: vlseg4e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv4i32( undef, undef, undef, undef, i32* %base, i32 %vl) @@ -2885,12 +3034,13 @@ define @test_vlseg4ff_mask_nxv4i32( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv4i32( %val, %val, %val, %val, i32* %base, %mask, i32 %vl, i32 1) @@ -2910,6 +3060,7 @@ define @test_vlseg2ff_nxv16f16(half* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv16f16( undef, undef, half* %base, i32 %vl) @@ -2927,6 +3078,7 @@ define @test_vlseg2ff_mask_nxv16f16( %v ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv16f16( %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -2946,6 +3098,7 @@ define @test_vlseg2ff_nxv4f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg2e64ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv4f64( undef, undef, double* %base, i32 %vl) @@ -2963,6 +3116,7 @@ define @test_vlseg2ff_mask_nxv4f64( % ; CHECK-NEXT: vlseg2e64ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv4f64( %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -2982,6 +3136,7 @@ define @test_vlseg2ff_nxv1f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg2e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv1f64( undef, undef, double* %base, i32 %vl) @@ -2999,6 +3154,7 @@ define @test_vlseg2ff_mask_nxv1f64( % ; CHECK-NEXT: vlseg2e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv1f64( %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -3018,6 +3174,7 @@ define @test_vlseg3ff_nxv1f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg3e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv1f64( undef, undef, undef, double* %base, i32 %vl) @@ -3031,11 +3188,12 @@ define @test_vlseg3ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg3e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv1f64( %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -3055,6 +3213,7 @@ define @test_vlseg4ff_nxv1f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg4e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv1f64( undef, undef, undef, undef, double* %base, i32 %vl) @@ -3068,12 +3227,13 @@ define @test_vlseg4ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg4e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv1f64( %val, %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -3093,6 +3253,7 @@ define @test_vlseg5ff_nxv1f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg5e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv1f64( undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -3106,13 +3267,14 @@ define @test_vlseg5ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg5e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv1f64( %val, %val, %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -3132,6 +3294,7 @@ define @test_vlseg6ff_nxv1f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg6e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -3145,14 +3308,15 @@ define @test_vlseg6ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg6e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv1f64( %val, %val, %val, %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -3172,6 +3336,7 @@ define @test_vlseg7ff_nxv1f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg7e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i32 %vl) @@ -3185,15 +3350,16 @@ define @test_vlseg7ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg7e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv1f64( %val, %val, %val, %val, %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -3213,6 +3379,7 @@ define @test_vlseg8ff_nxv1f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg8e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i32 %vl) @@ -3226,16 +3393,17 @@ define @test_vlseg8ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg8e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv1f64( %val, %val, %val, %val, %val, %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -3255,6 +3423,7 @@ define @test_vlseg2ff_nxv2f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv2f32( undef, undef, float* %base, i32 %vl) @@ -3272,6 +3441,7 @@ define @test_vlseg2ff_mask_nxv2f32( %va ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv2f32( %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3291,6 +3461,7 @@ define @test_vlseg3ff_nxv2f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv2f32( undef, undef, undef, float* %base, i32 %vl) @@ -3304,11 +3475,12 @@ define @test_vlseg3ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv2f32( %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3328,6 +3500,7 @@ define @test_vlseg4ff_nxv2f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv2f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -3341,12 +3514,13 @@ define @test_vlseg4ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv2f32( %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3366,6 +3540,7 @@ define @test_vlseg5ff_nxv2f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv2f32( undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3379,13 +3554,14 @@ define @test_vlseg5ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg5ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv2f32( %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3405,6 +3581,7 @@ define @test_vlseg6ff_nxv2f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3418,14 +3595,15 @@ define @test_vlseg6ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg6ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv2f32( %val, %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3445,6 +3623,7 @@ define @test_vlseg7ff_nxv2f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3458,15 +3637,16 @@ define @test_vlseg7ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg7ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv2f32( %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3486,6 +3666,7 @@ define @test_vlseg8ff_nxv2f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %vl) @@ -3499,16 +3680,17 @@ define @test_vlseg8ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg8ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv2f32( %val, %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3528,6 +3710,7 @@ define @test_vlseg2ff_nxv1f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv1f16( undef, undef, half* %base, i32 %vl) @@ -3545,6 +3728,7 @@ define @test_vlseg2ff_mask_nxv1f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv1f16( %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -3564,6 +3748,7 @@ define @test_vlseg3ff_nxv1f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv1f16( undef, undef, undef, half* %base, i32 %vl) @@ -3577,11 +3762,12 @@ define @test_vlseg3ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv1f16( %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -3601,6 +3787,7 @@ define @test_vlseg4ff_nxv1f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv1f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -3614,12 +3801,13 @@ define @test_vlseg4ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv1f16( %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -3639,6 +3827,7 @@ define @test_vlseg5ff_nxv1f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv1f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3652,13 +3841,14 @@ define @test_vlseg5ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv1f16( %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -3678,6 +3868,7 @@ define @test_vlseg6ff_nxv1f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3691,14 +3882,15 @@ define @test_vlseg6ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv1f16( %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -3718,6 +3910,7 @@ define @test_vlseg7ff_nxv1f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -3731,15 +3924,16 @@ define @test_vlseg7ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv1f16( %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -3759,6 +3953,7 @@ define @test_vlseg8ff_nxv1f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -3772,16 +3967,17 @@ define @test_vlseg8ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv1f16( %val, %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -3801,6 +3997,7 @@ define @test_vlseg2ff_nxv1f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv1f32( undef, undef, float* %base, i32 %vl) @@ -3818,6 +4015,7 @@ define @test_vlseg2ff_mask_nxv1f32( %va ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv1f32( %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3837,6 +4035,7 @@ define @test_vlseg3ff_nxv1f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv1f32( undef, undef, undef, float* %base, i32 %vl) @@ -3850,11 +4049,12 @@ define @test_vlseg3ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv1f32( %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3874,6 +4074,7 @@ define @test_vlseg4ff_nxv1f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv1f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -3887,12 +4088,13 @@ define @test_vlseg4ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv1f32( %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3912,6 +4114,7 @@ define @test_vlseg5ff_nxv1f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv1f32( undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3925,13 +4128,14 @@ define @test_vlseg5ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv1f32( %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3951,6 +4155,7 @@ define @test_vlseg6ff_nxv1f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -3964,14 +4169,15 @@ define @test_vlseg6ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv1f32( %val, %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -3991,6 +4197,7 @@ define @test_vlseg7ff_nxv1f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %vl) @@ -4004,15 +4211,16 @@ define @test_vlseg7ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv1f32( %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -4032,6 +4240,7 @@ define @test_vlseg8ff_nxv1f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %vl) @@ -4045,16 +4254,17 @@ define @test_vlseg8ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv1f32( %val, %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -4074,6 +4284,7 @@ define @test_vlseg2ff_nxv8f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg2e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv8f16( undef, undef, half* %base, i32 %vl) @@ -4091,6 +4302,7 @@ define @test_vlseg2ff_mask_nxv8f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv8f16( %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4110,6 +4322,7 @@ define @test_vlseg3ff_nxv8f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg3e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv8f16( undef, undef, undef, half* %base, i32 %vl) @@ -4123,11 +4336,12 @@ define @test_vlseg3ff_mask_nxv8f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv8f16( %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4147,6 +4361,7 @@ define @test_vlseg4ff_nxv8f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg4e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv8f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -4160,12 +4375,13 @@ define @test_vlseg4ff_mask_nxv8f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv8f16( %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4185,6 +4401,7 @@ define @test_vlseg2ff_nxv8f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg2e32ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv8f32( undef, undef, float* %base, i32 %vl) @@ -4202,6 +4419,7 @@ define @test_vlseg2ff_mask_nxv8f32( %va ; CHECK-NEXT: vlseg2e32ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv8f32( %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -4221,6 +4439,7 @@ define @test_vlseg2ff_nxv2f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg2e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv2f64( undef, undef, double* %base, i32 %vl) @@ -4238,6 +4457,7 @@ define @test_vlseg2ff_mask_nxv2f64( % ; CHECK-NEXT: vlseg2e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv2f64( %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -4257,6 +4477,7 @@ define @test_vlseg3ff_nxv2f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg3e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv2f64( undef, undef, undef, double* %base, i32 %vl) @@ -4270,11 +4491,12 @@ define @test_vlseg3ff_mask_nxv2f64( % ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg3e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv2f64( %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -4294,6 +4516,7 @@ define @test_vlseg4ff_nxv2f64(double* %base, i32 %vl, i32* ; CHECK-NEXT: vlseg4e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv2f64( undef, undef, undef, undef, double* %base, i32 %vl) @@ -4307,12 +4530,13 @@ define @test_vlseg4ff_mask_nxv2f64( % ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg4e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv2f64( %val, %val, %val, %val, double* %base, %mask, i32 %vl, i32 1) @@ -4332,6 +4556,7 @@ define @test_vlseg2ff_nxv4f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv4f16( undef, undef, half* %base, i32 %vl) @@ -4349,6 +4574,7 @@ define @test_vlseg2ff_mask_nxv4f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv4f16( %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4368,6 +4594,7 @@ define @test_vlseg3ff_nxv4f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv4f16( undef, undef, undef, half* %base, i32 %vl) @@ -4381,11 +4608,12 @@ define @test_vlseg3ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv4f16( %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4405,6 +4633,7 @@ define @test_vlseg4ff_nxv4f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv4f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -4418,12 +4647,13 @@ define @test_vlseg4ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv4f16( %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4443,6 +4673,7 @@ define @test_vlseg5ff_nxv4f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv4f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4456,13 +4687,14 @@ define @test_vlseg5ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg5ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv4f16( %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4482,6 +4714,7 @@ define @test_vlseg6ff_nxv4f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4495,14 +4728,15 @@ define @test_vlseg6ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg6ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv4f16( %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4522,6 +4756,7 @@ define @test_vlseg7ff_nxv4f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4535,15 +4770,16 @@ define @test_vlseg7ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg7ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv4f16( %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4563,6 +4799,7 @@ define @test_vlseg8ff_nxv4f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -4576,16 +4813,17 @@ define @test_vlseg8ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg8ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv4f16( %val, %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4605,6 +4843,7 @@ define @test_vlseg2ff_nxv2f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv2f16( undef, undef, half* %base, i32 %vl) @@ -4622,6 +4861,7 @@ define @test_vlseg2ff_mask_nxv2f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv2f16( %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4641,6 +4881,7 @@ define @test_vlseg3ff_nxv2f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv2f16( undef, undef, undef, half* %base, i32 %vl) @@ -4654,11 +4895,12 @@ define @test_vlseg3ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv2f16( %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4678,6 +4920,7 @@ define @test_vlseg4ff_nxv2f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv2f16( undef, undef, undef, undef, half* %base, i32 %vl) @@ -4691,12 +4934,13 @@ define @test_vlseg4ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv2f16( %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4716,6 +4960,7 @@ define @test_vlseg5ff_nxv2f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.nxv2f16( undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4729,13 +4974,14 @@ define @test_vlseg5ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg5ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i32} @llvm.riscv.vlseg5ff.mask.nxv2f16( %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4755,6 +5001,7 @@ define @test_vlseg6ff_nxv2f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4768,14 +5015,15 @@ define @test_vlseg6ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg6ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i32} @llvm.riscv.vlseg6ff.mask.nxv2f16( %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4795,6 +5043,7 @@ define @test_vlseg7ff_nxv2f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %vl) @@ -4808,15 +5057,16 @@ define @test_vlseg7ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg7ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i32} @llvm.riscv.vlseg7ff.mask.nxv2f16( %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4836,6 +5086,7 @@ define @test_vlseg8ff_nxv2f16(half* %base, i32 %vl, i32* %ou ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %vl) @@ -4849,16 +5100,17 @@ define @test_vlseg8ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg8ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i32} @llvm.riscv.vlseg8ff.mask.nxv2f16( %val, %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i32 %vl, i32 1) @@ -4878,6 +5130,7 @@ define @test_vlseg2ff_nxv4f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg2e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.nxv4f32( undef, undef, float* %base, i32 %vl) @@ -4895,6 +5148,7 @@ define @test_vlseg2ff_mask_nxv4f32( %va ; CHECK-NEXT: vlseg2e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i32} @llvm.riscv.vlseg2ff.mask.nxv4f32( %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -4914,6 +5168,7 @@ define @test_vlseg3ff_nxv4f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg3e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.nxv4f32( undef, undef, undef, float* %base, i32 %vl) @@ -4927,11 +5182,12 @@ define @test_vlseg3ff_mask_nxv4f32( %va ; CHECK-LABEL: test_vlseg3ff_mask_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i32} @llvm.riscv.vlseg3ff.mask.nxv4f32( %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) @@ -4951,6 +5207,7 @@ define @test_vlseg4ff_nxv4f32(float* %base, i32 %vl, i32* % ; CHECK-NEXT: vlseg4e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.nxv4f32( undef, undef, undef, undef, float* %base, i32 %vl) @@ -4964,12 +5221,13 @@ define @test_vlseg4ff_mask_nxv4f32( %va ; CHECK-LABEL: test_vlseg4ff_mask_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sw a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i32} @llvm.riscv.vlseg4ff.mask.nxv4f32( %val, %val, %val, %val, float* %base, %mask, i32 %vl, i32 1) diff --git a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64-dead.ll b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64-dead.ll index ddeadefadcac8..779123e1fc0b7 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64-dead.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64-dead.ll @@ -23,6 +23,7 @@ entry: define void @test_vlseg2ff_mask_dead_value( %val, i16* %base, i64 %vl, %mask, i64* %outvl) { ; CHECK-LABEL: test_vlseg2ff_mask_dead_value: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v8, (a0), v0.t @@ -41,6 +42,7 @@ define @test_vlseg2ff_dead_vl(i16* %base, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv16i16( undef, undef, i16* %base, i64 %vl) @@ -54,6 +56,7 @@ define @test_vlseg2ff_mask_dead_vl( %val, ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16( %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -75,6 +78,7 @@ entry: define void @test_vlseg2ff_mask_dead_all( %val, i16* %base, i64 %vl, %mask) { ; CHECK-LABEL: test_vlseg2ff_mask_dead_all: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v8, (a0), v0.t diff --git a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64.ll index 7c9e1b75365a4..c591f569797c7 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vlsegff-rv64.ll @@ -12,6 +12,7 @@ define @test_vlseg2ff_nxv16i16(i16* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv16i16( undef, undef, i16* %base, i64 %vl) @@ -29,6 +30,7 @@ define @test_vlseg2ff_mask_nxv16i16( %val ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16( %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -48,6 +50,7 @@ define @test_vlseg2ff_nxv4i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv4i32( undef, undef, i32* %base, i64 %vl) @@ -65,6 +68,7 @@ define @test_vlseg2ff_mask_nxv4i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv4i32( %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -84,6 +88,7 @@ define @test_vlseg3ff_nxv4i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv4i32( undef, undef, undef, i32* %base, i64 %vl) @@ -97,11 +102,12 @@ define @test_vlseg3ff_mask_nxv4i32( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv4i32( %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -121,6 +127,7 @@ define @test_vlseg4ff_nxv4i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv4i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -134,12 +141,13 @@ define @test_vlseg4ff_mask_nxv4i32( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv4i32( %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -159,6 +167,7 @@ define @test_vlseg2ff_nxv16i8(i8* %base, i64 %vl, i64* %outvl ; CHECK-NEXT: vlseg2e8ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv16i8( undef, undef, i8* %base, i64 %vl) @@ -176,6 +185,7 @@ define @test_vlseg2ff_mask_nxv16i8( %val, i ; CHECK-NEXT: vlseg2e8ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv16i8( %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -195,6 +205,7 @@ define @test_vlseg3ff_nxv16i8(i8* %base, i64 %vl, i64* %outvl ; CHECK-NEXT: vlseg3e8ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv16i8( undef, undef, undef, i8* %base, i64 %vl) @@ -208,11 +219,12 @@ define @test_vlseg3ff_mask_nxv16i8( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv16i8( %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -232,6 +244,7 @@ define @test_vlseg4ff_nxv16i8(i8* %base, i64 %vl, i64* %outvl ; CHECK-NEXT: vlseg4e8ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv16i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -245,12 +258,13 @@ define @test_vlseg4ff_mask_nxv16i8( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv16i8( %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -270,6 +284,7 @@ define @test_vlseg2ff_nxv1i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv1i64( undef, undef, i64* %base, i64 %vl) @@ -287,6 +302,7 @@ define @test_vlseg2ff_mask_nxv1i64( %val, i ; CHECK-NEXT: vlseg2e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv1i64( %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -306,6 +322,7 @@ define @test_vlseg3ff_nxv1i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv1i64( undef, undef, undef, i64* %base, i64 %vl) @@ -319,11 +336,12 @@ define @test_vlseg3ff_mask_nxv1i64( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg3e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv1i64( %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -343,6 +361,7 @@ define @test_vlseg4ff_nxv1i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv1i64( undef, undef, undef, undef, i64* %base, i64 %vl) @@ -356,12 +375,13 @@ define @test_vlseg4ff_mask_nxv1i64( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg4e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv1i64( %val, %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -381,6 +401,7 @@ define @test_vlseg5ff_nxv1i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg5e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv1i64( undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -394,13 +415,14 @@ define @test_vlseg5ff_mask_nxv1i64( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg5e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv1i64( %val, %val, %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -420,6 +442,7 @@ define @test_vlseg6ff_nxv1i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg6e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv1i64( undef, undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -433,14 +456,15 @@ define @test_vlseg6ff_mask_nxv1i64( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg6e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv1i64( %val, %val, %val, %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -460,6 +484,7 @@ define @test_vlseg7ff_nxv1i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg7e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv1i64( undef, undef, undef, undef, undef, undef, undef, i64* %base, i64 %vl) @@ -473,15 +498,16 @@ define @test_vlseg7ff_mask_nxv1i64( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg7e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv1i64( %val, %val, %val, %val, %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -501,6 +527,7 @@ define @test_vlseg8ff_nxv1i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg8e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv1i64( undef, undef , undef , undef, undef , undef, undef, undef, i64* %base, i64 %vl) @@ -514,16 +541,17 @@ define @test_vlseg8ff_mask_nxv1i64( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg8e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv1i64( %val, %val, %val, %val, %val, %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -543,6 +571,7 @@ define @test_vlseg2ff_nxv1i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv1i32( undef, undef, i32* %base, i64 %vl) @@ -560,6 +589,7 @@ define @test_vlseg2ff_mask_nxv1i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv1i32( %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -579,6 +609,7 @@ define @test_vlseg3ff_nxv1i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv1i32( undef, undef, undef, i32* %base, i64 %vl) @@ -592,11 +623,12 @@ define @test_vlseg3ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv1i32( %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -616,6 +648,7 @@ define @test_vlseg4ff_nxv1i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv1i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -629,12 +662,13 @@ define @test_vlseg4ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv1i32( %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -654,6 +688,7 @@ define @test_vlseg5ff_nxv1i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -667,13 +702,14 @@ define @test_vlseg5ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv1i32( %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -693,6 +729,7 @@ define @test_vlseg6ff_nxv1i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -706,14 +743,15 @@ define @test_vlseg6ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv1i32( %val, %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -733,6 +771,7 @@ define @test_vlseg7ff_nxv1i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -746,15 +785,16 @@ define @test_vlseg7ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv1i32( %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -774,6 +814,7 @@ define @test_vlseg8ff_nxv1i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %vl) @@ -787,16 +828,17 @@ define @test_vlseg8ff_mask_nxv1i32( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv1i32( %val, %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -816,6 +858,7 @@ define @test_vlseg2ff_nxv8i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv8i16( undef, undef, i16* %base, i64 %vl) @@ -833,6 +876,7 @@ define @test_vlseg2ff_mask_nxv8i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv8i16( %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -852,6 +896,7 @@ define @test_vlseg3ff_nxv8i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv8i16( undef, undef, undef, i16* %base, i64 %vl) @@ -865,11 +910,12 @@ define @test_vlseg3ff_mask_nxv8i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv8i16( %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -889,6 +935,7 @@ define @test_vlseg4ff_nxv8i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv8i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -902,12 +949,13 @@ define @test_vlseg4ff_mask_nxv8i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv8i16( %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -927,6 +975,7 @@ define @test_vlseg2ff_nxv4i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv4i8( undef, undef, i8* %base, i64 %vl) @@ -944,6 +993,7 @@ define @test_vlseg2ff_mask_nxv4i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv4i8( %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -963,6 +1013,7 @@ define @test_vlseg3ff_nxv4i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv4i8( undef, undef, undef, i8* %base, i64 %vl) @@ -976,11 +1027,12 @@ define @test_vlseg3ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv4i8( %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1000,6 +1052,7 @@ define @test_vlseg4ff_nxv4i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv4i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1013,12 +1066,13 @@ define @test_vlseg4ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv4i8( %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1038,6 +1092,7 @@ define @test_vlseg5ff_nxv4i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1051,13 +1106,14 @@ define @test_vlseg5ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv4i8( %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1077,6 +1133,7 @@ define @test_vlseg6ff_nxv4i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1090,14 +1147,15 @@ define @test_vlseg6ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv4i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1117,6 +1175,7 @@ define @test_vlseg7ff_nxv4i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1130,15 +1189,16 @@ define @test_vlseg7ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv4i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1158,6 +1218,7 @@ define @test_vlseg8ff_nxv4i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -1171,16 +1232,17 @@ define @test_vlseg8ff_mask_nxv4i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv4i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1200,6 +1262,7 @@ define @test_vlseg2ff_nxv1i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv1i16( undef, undef, i16* %base, i64 %vl) @@ -1217,6 +1280,7 @@ define @test_vlseg2ff_mask_nxv1i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv1i16( %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -1236,6 +1300,7 @@ define @test_vlseg3ff_nxv1i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv1i16( undef, undef, undef, i16* %base, i64 %vl) @@ -1249,11 +1314,12 @@ define @test_vlseg3ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv1i16( %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -1273,6 +1339,7 @@ define @test_vlseg4ff_nxv1i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv1i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1286,12 +1353,13 @@ define @test_vlseg4ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv1i16( %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -1311,6 +1379,7 @@ define @test_vlseg5ff_nxv1i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1324,13 +1393,14 @@ define @test_vlseg5ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv1i16( %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -1350,6 +1420,7 @@ define @test_vlseg6ff_nxv1i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1363,14 +1434,15 @@ define @test_vlseg6ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv1i16( %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -1390,6 +1462,7 @@ define @test_vlseg7ff_nxv1i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -1403,15 +1476,16 @@ define @test_vlseg7ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv1i16( %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -1431,6 +1505,7 @@ define @test_vlseg8ff_nxv1i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -1444,16 +1519,17 @@ define @test_vlseg8ff_mask_nxv1i16( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv1i16( %val, %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -1473,6 +1549,7 @@ define @test_vlseg2ff_nxv2i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv2i32( undef, undef, i32* %base, i64 %vl) @@ -1490,6 +1567,7 @@ define @test_vlseg2ff_mask_nxv2i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv2i32( %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -1509,6 +1587,7 @@ define @test_vlseg3ff_nxv2i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv2i32( undef, undef, undef, i32* %base, i64 %vl) @@ -1522,11 +1601,12 @@ define @test_vlseg3ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv2i32( %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -1546,6 +1626,7 @@ define @test_vlseg4ff_nxv2i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv2i32( undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1559,12 +1640,13 @@ define @test_vlseg4ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv2i32( %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -1584,6 +1666,7 @@ define @test_vlseg5ff_nxv2i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1597,13 +1680,14 @@ define @test_vlseg5ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv2i32( %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -1623,6 +1707,7 @@ define @test_vlseg6ff_nxv2i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1636,14 +1721,15 @@ define @test_vlseg6ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv2i32( %val, %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -1663,6 +1749,7 @@ define @test_vlseg7ff_nxv2i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %vl) @@ -1676,15 +1763,16 @@ define @test_vlseg7ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv2i32( %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -1704,6 +1792,7 @@ define @test_vlseg8ff_nxv2i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %vl) @@ -1717,16 +1806,17 @@ define @test_vlseg8ff_mask_nxv2i32( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv2i32( %val, %val, %val, %val, %val, %val, %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -1746,6 +1836,7 @@ define @test_vlseg2ff_nxv8i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv8i8( undef, undef, i8* %base, i64 %vl) @@ -1763,6 +1854,7 @@ define @test_vlseg2ff_mask_nxv8i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv8i8( %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1782,6 +1874,7 @@ define @test_vlseg3ff_nxv8i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv8i8( undef, undef, undef, i8* %base, i64 %vl) @@ -1795,11 +1888,12 @@ define @test_vlseg3ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv8i8( %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1819,6 +1913,7 @@ define @test_vlseg4ff_nxv8i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv8i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1832,12 +1927,13 @@ define @test_vlseg4ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv8i8( %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1857,6 +1953,7 @@ define @test_vlseg5ff_nxv8i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1870,13 +1967,14 @@ define @test_vlseg5ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv8i8( %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1896,6 +1994,7 @@ define @test_vlseg6ff_nxv8i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1909,14 +2008,15 @@ define @test_vlseg6ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv8i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1936,6 +2036,7 @@ define @test_vlseg7ff_nxv8i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -1949,15 +2050,16 @@ define @test_vlseg7ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv8i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -1977,6 +2079,7 @@ define @test_vlseg8ff_nxv8i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -1990,16 +2093,17 @@ define @test_vlseg8ff_mask_nxv8i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv8i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2019,6 +2123,7 @@ define @test_vlseg2ff_nxv4i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e64ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv4i64( undef, undef, i64* %base, i64 %vl) @@ -2036,6 +2141,7 @@ define @test_vlseg2ff_mask_nxv4i64( %val, i ; CHECK-NEXT: vlseg2e64ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv4i64( %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -2055,6 +2161,7 @@ define @test_vlseg2ff_nxv4i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv4i16( undef, undef, i16* %base, i64 %vl) @@ -2072,6 +2179,7 @@ define @test_vlseg2ff_mask_nxv4i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv4i16( %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2091,6 +2199,7 @@ define @test_vlseg3ff_nxv4i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv4i16( undef, undef, undef, i16* %base, i64 %vl) @@ -2104,11 +2213,12 @@ define @test_vlseg3ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv4i16( %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2128,6 +2238,7 @@ define @test_vlseg4ff_nxv4i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv4i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2141,12 +2252,13 @@ define @test_vlseg4ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv4i16( %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2166,6 +2278,7 @@ define @test_vlseg5ff_nxv4i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2179,13 +2292,14 @@ define @test_vlseg5ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv4i16( %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2205,6 +2319,7 @@ define @test_vlseg6ff_nxv4i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2218,14 +2333,15 @@ define @test_vlseg6ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv4i16( %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2245,6 +2361,7 @@ define @test_vlseg7ff_nxv4i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -2258,15 +2375,16 @@ define @test_vlseg7ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv4i16( %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2286,6 +2404,7 @@ define @test_vlseg8ff_nxv4i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -2299,16 +2418,17 @@ define @test_vlseg8ff_mask_nxv4i16( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv4i16( %val, %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2328,6 +2448,7 @@ define @test_vlseg2ff_nxv1i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv1i8( undef, undef, i8* %base, i64 %vl) @@ -2345,6 +2466,7 @@ define @test_vlseg2ff_mask_nxv1i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv1i8( %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2364,6 +2486,7 @@ define @test_vlseg3ff_nxv1i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv1i8( undef, undef, undef, i8* %base, i64 %vl) @@ -2377,11 +2500,12 @@ define @test_vlseg3ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv1i8( %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2401,6 +2525,7 @@ define @test_vlseg4ff_nxv1i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv1i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2414,12 +2539,13 @@ define @test_vlseg4ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv1i8( %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2439,6 +2565,7 @@ define @test_vlseg5ff_nxv1i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2452,13 +2579,14 @@ define @test_vlseg5ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv1i8( %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2478,6 +2606,7 @@ define @test_vlseg6ff_nxv1i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2491,14 +2620,15 @@ define @test_vlseg6ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv1i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2518,6 +2648,7 @@ define @test_vlseg7ff_nxv1i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2531,15 +2662,16 @@ define @test_vlseg7ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv1i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2559,6 +2691,7 @@ define @test_vlseg8ff_nxv1i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -2572,16 +2705,17 @@ define @test_vlseg8ff_mask_nxv1i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv1i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2601,6 +2735,7 @@ define @test_vlseg2ff_nxv2i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg2e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv2i8( undef, undef, i8* %base, i64 %vl) @@ -2618,6 +2753,7 @@ define @test_vlseg2ff_mask_nxv2i8( %val, i8* ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv2i8( %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2637,6 +2773,7 @@ define @test_vlseg3ff_nxv2i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg3e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv2i8( undef, undef, undef, i8* %base, i64 %vl) @@ -2650,11 +2787,12 @@ define @test_vlseg3ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv2i8( %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2674,6 +2812,7 @@ define @test_vlseg4ff_nxv2i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg4e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv2i8( undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2687,12 +2826,13 @@ define @test_vlseg4ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv2i8( %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2712,6 +2852,7 @@ define @test_vlseg5ff_nxv2i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg5e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2725,13 +2866,14 @@ define @test_vlseg5ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv2i8( %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2751,6 +2893,7 @@ define @test_vlseg6ff_nxv2i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg6e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2764,14 +2907,15 @@ define @test_vlseg6ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv2i8( %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2791,6 +2935,7 @@ define @test_vlseg7ff_nxv2i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg7e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %vl) @@ -2804,15 +2949,16 @@ define @test_vlseg7ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv2i8( %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2832,6 +2978,7 @@ define @test_vlseg8ff_nxv2i8(i8* %base, i64 %vl, i64* %outvl) ; CHECK-NEXT: vlseg8e8ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %vl) @@ -2845,16 +2992,17 @@ define @test_vlseg8ff_mask_nxv2i8( %val, i8* ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv2i8( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2874,6 +3022,7 @@ define @test_vlseg2ff_nxv8i32(i32* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e32ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv8i32( undef, undef, i32* %base, i64 %vl) @@ -2891,6 +3040,7 @@ define @test_vlseg2ff_mask_nxv8i32( %val, i ; CHECK-NEXT: vlseg2e32ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv8i32( %val, %val, i32* %base, %mask, i64 %vl, i64 1) @@ -2910,6 +3060,7 @@ define @test_vlseg2ff_nxv32i8(i8* %base, i64 %vl, i64* %outvl ; CHECK-NEXT: vlseg2e8ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv32i8( undef, undef, i8* %base, i64 %vl) @@ -2927,6 +3078,7 @@ define @test_vlseg2ff_mask_nxv32i8( %val, i ; CHECK-NEXT: vlseg2e8ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv32i8( %val, %val, i8* %base, %mask, i64 %vl, i64 1) @@ -2946,6 +3098,7 @@ define @test_vlseg2ff_nxv2i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv2i16( undef, undef, i16* %base, i64 %vl) @@ -2963,6 +3116,7 @@ define @test_vlseg2ff_mask_nxv2i16( %val, i ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv2i16( %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -2982,6 +3136,7 @@ define @test_vlseg3ff_nxv2i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv2i16( undef, undef, undef, i16* %base, i64 %vl) @@ -2995,11 +3150,12 @@ define @test_vlseg3ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv2i16( %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -3019,6 +3175,7 @@ define @test_vlseg4ff_nxv2i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv2i16( undef, undef, undef, undef, i16* %base, i64 %vl) @@ -3032,12 +3189,13 @@ define @test_vlseg4ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv2i16( %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -3057,6 +3215,7 @@ define @test_vlseg5ff_nxv2i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -3070,13 +3229,14 @@ define @test_vlseg5ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv2i16( %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -3096,6 +3256,7 @@ define @test_vlseg6ff_nxv2i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -3109,14 +3270,15 @@ define @test_vlseg6ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv2i16( %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -3136,6 +3298,7 @@ define @test_vlseg7ff_nxv2i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %vl) @@ -3149,15 +3312,16 @@ define @test_vlseg7ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv2i16( %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -3177,6 +3341,7 @@ define @test_vlseg8ff_nxv2i16(i16* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %vl) @@ -3190,16 +3355,17 @@ define @test_vlseg8ff_mask_nxv2i16( %val, i ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv2i16( %val, %val, %val, %val, %val, %val, %val, %val, i16* %base, %mask, i64 %vl, i64 1) @@ -3219,6 +3385,7 @@ define @test_vlseg2ff_nxv2i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg2e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv2i64( undef, undef, i64* %base, i64 %vl) @@ -3236,6 +3403,7 @@ define @test_vlseg2ff_mask_nxv2i64( %val, i ; CHECK-NEXT: vlseg2e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv2i64( %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -3255,6 +3423,7 @@ define @test_vlseg3ff_nxv2i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg3e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv2i64( undef, undef, undef, i64* %base, i64 %vl) @@ -3268,11 +3437,12 @@ define @test_vlseg3ff_mask_nxv2i64( %val, i ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg3e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv2i64( %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -3292,6 +3462,7 @@ define @test_vlseg4ff_nxv2i64(i64* %base, i64 %vl, i64* %outv ; CHECK-NEXT: vlseg4e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv2i64( undef, undef, undef, undef, i64* %base, i64 %vl) @@ -3305,12 +3476,13 @@ define @test_vlseg4ff_mask_nxv2i64( %val, i ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg4e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv2i64( %val, %val, %val, %val, i64* %base, %mask, i64 %vl, i64 1) @@ -3330,6 +3502,7 @@ define @test_vlseg2ff_nxv16f16(half* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv16f16( undef, undef, half* %base, i64 %vl) @@ -3347,6 +3520,7 @@ define @test_vlseg2ff_mask_nxv16f16( %v ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv16f16( %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -3366,6 +3540,7 @@ define @test_vlseg2ff_nxv4f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg2e64ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv4f64( undef, undef, double* %base, i64 %vl) @@ -3383,6 +3558,7 @@ define @test_vlseg2ff_mask_nxv4f64( % ; CHECK-NEXT: vlseg2e64ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv4f64( %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3402,6 +3578,7 @@ define @test_vlseg2ff_nxv1f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg2e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv1f64( undef, undef, double* %base, i64 %vl) @@ -3419,6 +3596,7 @@ define @test_vlseg2ff_mask_nxv1f64( % ; CHECK-NEXT: vlseg2e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv1f64( %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3438,6 +3616,7 @@ define @test_vlseg3ff_nxv1f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg3e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv1f64( undef, undef, undef, double* %base, i64 %vl) @@ -3451,11 +3630,12 @@ define @test_vlseg3ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg3e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv1f64( %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3475,6 +3655,7 @@ define @test_vlseg4ff_nxv1f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg4e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv1f64( undef, undef, undef, undef, double* %base, i64 %vl) @@ -3488,12 +3669,13 @@ define @test_vlseg4ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg4e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv1f64( %val, %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3513,6 +3695,7 @@ define @test_vlseg5ff_nxv1f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg5e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv1f64( undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3526,13 +3709,14 @@ define @test_vlseg5ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg5e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv1f64( %val, %val, %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3552,6 +3736,7 @@ define @test_vlseg6ff_nxv1f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg6e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3565,14 +3750,15 @@ define @test_vlseg6ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg6e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv1f64( %val, %val, %val, %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3592,6 +3778,7 @@ define @test_vlseg7ff_nxv1f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg7e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i64 %vl) @@ -3605,15 +3792,16 @@ define @test_vlseg7ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg7e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv1f64( %val, %val, %val, %val, %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3633,6 +3821,7 @@ define @test_vlseg8ff_nxv1f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg8e64ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i64 %vl) @@ -3646,16 +3835,17 @@ define @test_vlseg8ff_mask_nxv1f64( % ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vlseg8e64ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv1f64( %val, %val, %val, %val, %val, %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -3675,6 +3865,7 @@ define @test_vlseg2ff_nxv2f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv2f32( undef, undef, float* %base, i64 %vl) @@ -3692,6 +3883,7 @@ define @test_vlseg2ff_mask_nxv2f32( %va ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv2f32( %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -3711,6 +3903,7 @@ define @test_vlseg3ff_nxv2f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv2f32( undef, undef, undef, float* %base, i64 %vl) @@ -3724,11 +3917,12 @@ define @test_vlseg3ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv2f32( %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -3748,6 +3942,7 @@ define @test_vlseg4ff_nxv2f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv2f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -3761,12 +3956,13 @@ define @test_vlseg4ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv2f32( %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -3786,6 +3982,7 @@ define @test_vlseg5ff_nxv2f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv2f32( undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3799,13 +3996,14 @@ define @test_vlseg5ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg5ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv2f32( %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -3825,6 +4023,7 @@ define @test_vlseg6ff_nxv2f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3838,14 +4037,15 @@ define @test_vlseg6ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg6ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv2f32( %val, %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -3865,6 +4065,7 @@ define @test_vlseg7ff_nxv2f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -3878,15 +4079,16 @@ define @test_vlseg7ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg7ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv2f32( %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -3906,6 +4108,7 @@ define @test_vlseg8ff_nxv2f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %vl) @@ -3919,16 +4122,17 @@ define @test_vlseg8ff_mask_nxv2f32( %va ; CHECK-LABEL: test_vlseg8ff_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv2f32( %val, %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -3948,6 +4152,7 @@ define @test_vlseg2ff_nxv1f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv1f16( undef, undef, half* %base, i64 %vl) @@ -3965,6 +4170,7 @@ define @test_vlseg2ff_mask_nxv1f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv1f16( %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -3984,6 +4190,7 @@ define @test_vlseg3ff_nxv1f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv1f16( undef, undef, undef, half* %base, i64 %vl) @@ -3997,11 +4204,12 @@ define @test_vlseg3ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv1f16( %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4021,6 +4229,7 @@ define @test_vlseg4ff_nxv1f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv1f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4034,12 +4243,13 @@ define @test_vlseg4ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv1f16( %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4059,6 +4269,7 @@ define @test_vlseg5ff_nxv1f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv1f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4072,13 +4283,14 @@ define @test_vlseg5ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv1f16( %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4098,6 +4310,7 @@ define @test_vlseg6ff_nxv1f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4111,14 +4324,15 @@ define @test_vlseg6ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv1f16( %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4138,6 +4352,7 @@ define @test_vlseg7ff_nxv1f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4151,15 +4366,16 @@ define @test_vlseg7ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv1f16( %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4179,6 +4395,7 @@ define @test_vlseg8ff_nxv1f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -4192,16 +4409,17 @@ define @test_vlseg8ff_mask_nxv1f16( %val, ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv1f16( %val, %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4221,6 +4439,7 @@ define @test_vlseg2ff_nxv1f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg2e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv1f32( undef, undef, float* %base, i64 %vl) @@ -4238,6 +4457,7 @@ define @test_vlseg2ff_mask_nxv1f32( %va ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv1f32( %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4257,6 +4477,7 @@ define @test_vlseg3ff_nxv1f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg3e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv1f32( undef, undef, undef, float* %base, i64 %vl) @@ -4270,11 +4491,12 @@ define @test_vlseg3ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv1f32( %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4294,6 +4516,7 @@ define @test_vlseg4ff_nxv1f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg4e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv1f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -4307,12 +4530,13 @@ define @test_vlseg4ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv1f32( %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4332,6 +4556,7 @@ define @test_vlseg5ff_nxv1f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg5e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv1f32( undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -4345,13 +4570,14 @@ define @test_vlseg5ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv1f32( %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4371,6 +4597,7 @@ define @test_vlseg6ff_nxv1f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg6e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -4384,14 +4611,15 @@ define @test_vlseg6ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv1f32( %val, %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4411,6 +4639,7 @@ define @test_vlseg7ff_nxv1f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg7e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %vl) @@ -4424,15 +4653,16 @@ define @test_vlseg7ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv1f32( %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4452,6 +4682,7 @@ define @test_vlseg8ff_nxv1f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg8e32ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %vl) @@ -4465,16 +4696,17 @@ define @test_vlseg8ff_mask_nxv1f32( %va ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv1f32( %val, %val, %val, %val, %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4494,6 +4726,7 @@ define @test_vlseg2ff_nxv8f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg2e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv8f16( undef, undef, half* %base, i64 %vl) @@ -4511,6 +4744,7 @@ define @test_vlseg2ff_mask_nxv8f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv8f16( %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4530,6 +4764,7 @@ define @test_vlseg3ff_nxv8f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg3e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv8f16( undef, undef, undef, half* %base, i64 %vl) @@ -4543,11 +4778,12 @@ define @test_vlseg3ff_mask_nxv8f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv8f16( %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4567,6 +4803,7 @@ define @test_vlseg4ff_nxv8f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg4e16ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv8f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4580,12 +4817,13 @@ define @test_vlseg4ff_mask_nxv8f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv8f16( %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4605,6 +4843,7 @@ define @test_vlseg2ff_nxv8f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg2e32ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv8f32( undef, undef, float* %base, i64 %vl) @@ -4622,6 +4861,7 @@ define @test_vlseg2ff_mask_nxv8f32( %va ; CHECK-NEXT: vlseg2e32ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv8f32( %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -4641,6 +4881,7 @@ define @test_vlseg2ff_nxv2f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg2e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv2f64( undef, undef, double* %base, i64 %vl) @@ -4658,6 +4899,7 @@ define @test_vlseg2ff_mask_nxv2f64( % ; CHECK-NEXT: vlseg2e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv2f64( %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -4677,6 +4919,7 @@ define @test_vlseg3ff_nxv2f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg3e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv2f64( undef, undef, undef, double* %base, i64 %vl) @@ -4690,11 +4933,12 @@ define @test_vlseg3ff_mask_nxv2f64( % ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg3e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv2f64( %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -4714,6 +4958,7 @@ define @test_vlseg4ff_nxv2f64(double* %base, i64 %vl, i64* ; CHECK-NEXT: vlseg4e64ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv2f64( undef, undef, undef, undef, double* %base, i64 %vl) @@ -4727,12 +4972,13 @@ define @test_vlseg4ff_mask_nxv2f64( % ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vlseg4e64ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv2f64( %val, %val, %val, %val, double* %base, %mask, i64 %vl, i64 1) @@ -4752,6 +4998,7 @@ define @test_vlseg2ff_nxv4f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv4f16( undef, undef, half* %base, i64 %vl) @@ -4769,6 +5016,7 @@ define @test_vlseg2ff_mask_nxv4f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv4f16( %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4788,6 +5036,7 @@ define @test_vlseg3ff_nxv4f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv4f16( undef, undef, undef, half* %base, i64 %vl) @@ -4801,11 +5050,12 @@ define @test_vlseg3ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv4f16( %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4825,6 +5075,7 @@ define @test_vlseg4ff_nxv4f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv4f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -4838,12 +5089,13 @@ define @test_vlseg4ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv4f16( %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4863,6 +5115,7 @@ define @test_vlseg5ff_nxv4f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv4f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4876,13 +5129,14 @@ define @test_vlseg5ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg5ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv4f16( %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4902,6 +5156,7 @@ define @test_vlseg6ff_nxv4f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4915,14 +5170,15 @@ define @test_vlseg6ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg6ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv4f16( %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4942,6 +5198,7 @@ define @test_vlseg7ff_nxv4f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -4955,15 +5212,16 @@ define @test_vlseg7ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg7ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv4f16( %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -4983,6 +5241,7 @@ define @test_vlseg8ff_nxv4f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -4996,16 +5255,17 @@ define @test_vlseg8ff_mask_nxv4f16( %val, ; CHECK-LABEL: test_vlseg8ff_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv4f16( %val, %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5025,6 +5285,7 @@ define @test_vlseg2ff_nxv2f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg2e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv2f16( undef, undef, half* %base, i64 %vl) @@ -5042,6 +5303,7 @@ define @test_vlseg2ff_mask_nxv2f16( %val, ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv2f16( %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5061,6 +5323,7 @@ define @test_vlseg3ff_nxv2f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg3e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv2f16( undef, undef, undef, half* %base, i64 %vl) @@ -5074,11 +5337,12 @@ define @test_vlseg3ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv2f16( %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5098,6 +5362,7 @@ define @test_vlseg4ff_nxv2f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg4e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv2f16( undef, undef, undef, undef, half* %base, i64 %vl) @@ -5111,12 +5376,13 @@ define @test_vlseg4ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv2f16( %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5136,6 +5402,7 @@ define @test_vlseg5ff_nxv2f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg5e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.nxv2f16( undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -5149,13 +5416,14 @@ define @test_vlseg5ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg5ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,, i64} @llvm.riscv.vlseg5ff.mask.nxv2f16( %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5175,6 +5443,7 @@ define @test_vlseg6ff_nxv2f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg6e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -5188,14 +5457,15 @@ define @test_vlseg6ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg6ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,, i64} @llvm.riscv.vlseg6ff.mask.nxv2f16( %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5215,6 +5485,7 @@ define @test_vlseg7ff_nxv2f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg7e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %vl) @@ -5228,15 +5499,16 @@ define @test_vlseg7ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg7ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,, i64} @llvm.riscv.vlseg7ff.mask.nxv2f16( %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5256,6 +5528,7 @@ define @test_vlseg8ff_nxv2f16(half* %base, i64 %vl, i64* %ou ; CHECK-NEXT: vlseg8e16ff.v v7, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %vl) @@ -5269,16 +5542,17 @@ define @test_vlseg8ff_mask_nxv2f16( %val, ; CHECK-LABEL: test_vlseg8ff_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,, i64} @llvm.riscv.vlseg8ff.mask.nxv2f16( %val, %val, %val, %val, %val, %val, %val, %val, half* %base, %mask, i64 %vl, i64 1) @@ -5298,6 +5572,7 @@ define @test_vlseg2ff_nxv4f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg2e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv4f32( undef, undef, float* %base, i64 %vl) @@ -5315,6 +5590,7 @@ define @test_vlseg2ff_mask_nxv4f32( %va ; CHECK-NEXT: vlseg2e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv4f32( %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -5334,6 +5610,7 @@ define @test_vlseg3ff_nxv4f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg3e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.nxv4f32( undef, undef, undef, float* %base, i64 %vl) @@ -5347,11 +5624,12 @@ define @test_vlseg3ff_mask_nxv4f32( %va ; CHECK-LABEL: test_vlseg3ff_mask_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg3e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,, i64} @llvm.riscv.vlseg3ff.mask.nxv4f32( %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) @@ -5371,6 +5649,7 @@ define @test_vlseg4ff_nxv4f32(float* %base, i64 %vl, i64* % ; CHECK-NEXT: vlseg4e32ff.v v6, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.nxv4f32( undef, undef, undef, undef, float* %base, i64 %vl) @@ -5384,12 +5663,13 @@ define @test_vlseg4ff_mask_nxv4f32( %va ; CHECK-LABEL: test_vlseg4ff_mask_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vlseg4e32ff.v v6, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a2) +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,, i64} @llvm.riscv.vlseg4ff.mask.nxv4f32( %val, %val, %val, %val, float* %base, %mask, i64 %vl, i64 1) diff --git a/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv32.ll index 3c2728e42871d..9881928df95fc 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv32.ll @@ -10,6 +10,7 @@ define @test_vlsseg2_nxv16i16(i16* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -24,6 +25,7 @@ define @test_vlsseg2_mask_nxv16i16(i16* %base, i32 %offset, ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -41,6 +43,7 @@ define @test_vlsseg2_nxv1i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -55,6 +58,7 @@ define @test_vlsseg2_mask_nxv1i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -72,6 +76,7 @@ define @test_vlsseg3_nxv1i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -87,6 +92,7 @@ define @test_vlsseg3_mask_nxv1i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -104,6 +110,7 @@ define @test_vlsseg4_nxv1i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -120,6 +127,7 @@ define @test_vlsseg4_mask_nxv1i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -137,6 +145,7 @@ define @test_vlsseg5_nxv1i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -154,6 +163,7 @@ define @test_vlsseg5_mask_nxv1i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -171,6 +181,7 @@ define @test_vlsseg6_nxv1i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -189,6 +200,7 @@ define @test_vlsseg6_mask_nxv1i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -206,6 +218,7 @@ define @test_vlsseg7_nxv1i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -225,6 +238,7 @@ define @test_vlsseg7_mask_nxv1i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -242,6 +256,7 @@ define @test_vlsseg8_nxv1i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -262,6 +277,7 @@ define @test_vlsseg8_mask_nxv1i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -279,6 +295,7 @@ define @test_vlsseg2_nxv16i8(i8* %base, i32 %offset, i32 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -293,6 +310,7 @@ define @test_vlsseg2_mask_nxv16i8(i8* %base, i32 %offset, i32 ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -310,6 +328,7 @@ define @test_vlsseg3_nxv16i8(i8* %base, i32 %offset, i32 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vlsseg3e8.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv16i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -325,6 +344,7 @@ define @test_vlsseg3_mask_nxv16i8(i8* %base, i32 %offset, i32 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e8.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv16i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -342,6 +362,7 @@ define @test_vlsseg4_nxv16i8(i8* %base, i32 %offset, i32 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vlsseg4e8.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -358,6 +379,7 @@ define @test_vlsseg4_mask_nxv16i8(i8* %base, i32 %offset, i32 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e8.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -375,6 +397,7 @@ define @test_vlsseg2_nxv2i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -389,6 +412,7 @@ define @test_vlsseg2_mask_nxv2i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -406,6 +430,7 @@ define @test_vlsseg3_nxv2i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i32( undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -421,6 +446,7 @@ define @test_vlsseg3_mask_nxv2i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i32( undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -438,6 +464,7 @@ define @test_vlsseg4_nxv2i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -454,6 +481,7 @@ define @test_vlsseg4_mask_nxv2i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -471,6 +499,7 @@ define @test_vlsseg5_nxv2i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -488,6 +517,7 @@ define @test_vlsseg5_mask_nxv2i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -505,6 +535,7 @@ define @test_vlsseg6_nxv2i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -523,6 +554,7 @@ define @test_vlsseg6_mask_nxv2i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -540,6 +572,7 @@ define @test_vlsseg7_nxv2i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -559,6 +592,7 @@ define @test_vlsseg7_mask_nxv2i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -576,6 +610,7 @@ define @test_vlsseg8_nxv2i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -596,6 +631,7 @@ define @test_vlsseg8_mask_nxv2i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -613,6 +649,7 @@ define @test_vlsseg2_nxv4i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -627,6 +664,7 @@ define @test_vlsseg2_mask_nxv4i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -644,6 +682,7 @@ define @test_vlsseg3_nxv4i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -659,6 +698,7 @@ define @test_vlsseg3_mask_nxv4i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -676,6 +716,7 @@ define @test_vlsseg4_nxv4i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -692,6 +733,7 @@ define @test_vlsseg4_mask_nxv4i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -709,6 +751,7 @@ define @test_vlsseg5_nxv4i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -726,6 +769,7 @@ define @test_vlsseg5_mask_nxv4i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -743,6 +787,7 @@ define @test_vlsseg6_nxv4i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -761,6 +806,7 @@ define @test_vlsseg6_mask_nxv4i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -778,6 +824,7 @@ define @test_vlsseg7_nxv4i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -797,6 +844,7 @@ define @test_vlsseg7_mask_nxv4i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -814,6 +862,7 @@ define @test_vlsseg8_nxv4i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -834,6 +883,7 @@ define @test_vlsseg8_mask_nxv4i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -851,6 +901,7 @@ define @test_vlsseg2_nxv1i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -865,6 +916,7 @@ define @test_vlsseg2_mask_nxv1i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -882,6 +934,7 @@ define @test_vlsseg3_nxv1i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i32( undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -897,6 +950,7 @@ define @test_vlsseg3_mask_nxv1i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i32( undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -914,6 +968,7 @@ define @test_vlsseg4_nxv1i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -930,6 +985,7 @@ define @test_vlsseg4_mask_nxv1i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -947,6 +1003,7 @@ define @test_vlsseg5_nxv1i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -964,6 +1021,7 @@ define @test_vlsseg5_mask_nxv1i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -981,6 +1039,7 @@ define @test_vlsseg6_nxv1i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -999,6 +1058,7 @@ define @test_vlsseg6_mask_nxv1i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -1016,6 +1076,7 @@ define @test_vlsseg7_nxv1i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -1035,6 +1096,7 @@ define @test_vlsseg7_mask_nxv1i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -1052,6 +1114,7 @@ define @test_vlsseg8_nxv1i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -1072,6 +1135,7 @@ define @test_vlsseg8_mask_nxv1i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -1089,6 +1153,7 @@ define @test_vlsseg2_nxv8i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1103,6 +1168,7 @@ define @test_vlsseg2_mask_nxv8i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1120,6 +1186,7 @@ define @test_vlsseg3_nxv8i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1135,6 +1202,7 @@ define @test_vlsseg3_mask_nxv8i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1152,6 +1220,7 @@ define @test_vlsseg4_nxv8i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1168,6 +1237,7 @@ define @test_vlsseg4_mask_nxv8i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1185,6 +1255,7 @@ define @test_vlsseg2_nxv8i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1199,6 +1270,7 @@ define @test_vlsseg2_mask_nxv8i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1216,6 +1288,7 @@ define @test_vlsseg3_nxv8i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1231,6 +1304,7 @@ define @test_vlsseg3_mask_nxv8i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1248,6 +1322,7 @@ define @test_vlsseg4_nxv8i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1264,6 +1339,7 @@ define @test_vlsseg4_mask_nxv8i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1281,6 +1357,7 @@ define @test_vlsseg5_nxv8i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1298,6 +1375,7 @@ define @test_vlsseg5_mask_nxv8i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1315,6 +1393,7 @@ define @test_vlsseg6_nxv8i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1333,6 +1412,7 @@ define @test_vlsseg6_mask_nxv8i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1350,6 +1430,7 @@ define @test_vlsseg7_nxv8i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1369,6 +1450,7 @@ define @test_vlsseg7_mask_nxv8i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1386,6 +1468,7 @@ define @test_vlsseg8_nxv8i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1406,6 +1489,7 @@ define @test_vlsseg8_mask_nxv8i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1423,6 +1507,7 @@ define @test_vlsseg2_nxv8i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -1437,6 +1522,7 @@ define @test_vlsseg2_mask_nxv8i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -1454,6 +1540,7 @@ define @test_vlsseg2_nxv4i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1468,6 +1555,7 @@ define @test_vlsseg2_mask_nxv4i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1485,6 +1573,7 @@ define @test_vlsseg3_nxv4i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1500,6 +1589,7 @@ define @test_vlsseg3_mask_nxv4i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1517,6 +1607,7 @@ define @test_vlsseg4_nxv4i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1533,6 +1624,7 @@ define @test_vlsseg4_mask_nxv4i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1550,6 +1642,7 @@ define @test_vlsseg5_nxv4i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1567,6 +1660,7 @@ define @test_vlsseg5_mask_nxv4i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1584,6 +1678,7 @@ define @test_vlsseg6_nxv4i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1602,6 +1697,7 @@ define @test_vlsseg6_mask_nxv4i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1619,6 +1715,7 @@ define @test_vlsseg7_nxv4i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1638,6 +1735,7 @@ define @test_vlsseg7_mask_nxv4i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1655,6 +1753,7 @@ define @test_vlsseg8_nxv4i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1675,6 +1774,7 @@ define @test_vlsseg8_mask_nxv4i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1692,6 +1792,7 @@ define @test_vlsseg2_nxv1i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1706,6 +1807,7 @@ define @test_vlsseg2_mask_nxv1i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1723,6 +1825,7 @@ define @test_vlsseg3_nxv1i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1738,6 +1841,7 @@ define @test_vlsseg3_mask_nxv1i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1755,6 +1859,7 @@ define @test_vlsseg4_nxv1i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1771,6 +1876,7 @@ define @test_vlsseg4_mask_nxv1i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1788,6 +1894,7 @@ define @test_vlsseg5_nxv1i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1805,6 +1912,7 @@ define @test_vlsseg5_mask_nxv1i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1822,6 +1930,7 @@ define @test_vlsseg6_nxv1i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1840,6 +1949,7 @@ define @test_vlsseg6_mask_nxv1i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1857,6 +1967,7 @@ define @test_vlsseg7_nxv1i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1876,6 +1987,7 @@ define @test_vlsseg7_mask_nxv1i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1893,6 +2005,7 @@ define @test_vlsseg8_nxv1i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1913,6 +2026,7 @@ define @test_vlsseg8_mask_nxv1i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -1930,6 +2044,7 @@ define @test_vlsseg2_nxv32i8(i8* %base, i32 %offset, i32 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m4, ta, mu ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv32i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1944,6 +2059,7 @@ define @test_vlsseg2_mask_nxv32i8(i8* %base, i32 %offset, i32 ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv32i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1961,6 +2077,7 @@ define @test_vlsseg2_nxv2i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1975,6 +2092,7 @@ define @test_vlsseg2_mask_nxv2i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i8( undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -1992,6 +2110,7 @@ define @test_vlsseg3_nxv2i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2007,6 +2126,7 @@ define @test_vlsseg3_mask_nxv2i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i8( undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2024,6 +2144,7 @@ define @test_vlsseg4_nxv2i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2040,6 +2161,7 @@ define @test_vlsseg4_mask_nxv2i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2057,6 +2179,7 @@ define @test_vlsseg5_nxv2i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2074,6 +2197,7 @@ define @test_vlsseg5_mask_nxv2i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2091,6 +2215,7 @@ define @test_vlsseg6_nxv2i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2109,6 +2234,7 @@ define @test_vlsseg6_mask_nxv2i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2126,6 +2252,7 @@ define @test_vlsseg7_nxv2i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2145,6 +2272,7 @@ define @test_vlsseg7_mask_nxv2i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2162,6 +2290,7 @@ define @test_vlsseg8_nxv2i8(i8* %base, i32 %offset, i32 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2182,6 +2311,7 @@ define @test_vlsseg8_mask_nxv2i8(i8* %base, i32 %offset, i32 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i32 %offset, i32 %vl) @@ -2199,6 +2329,7 @@ define @test_vlsseg2_nxv2i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2213,6 +2344,7 @@ define @test_vlsseg2_mask_nxv2i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i16( undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2230,6 +2362,7 @@ define @test_vlsseg3_nxv2i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2245,6 +2378,7 @@ define @test_vlsseg3_mask_nxv2i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i16( undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2262,6 +2396,7 @@ define @test_vlsseg4_nxv2i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2278,6 +2413,7 @@ define @test_vlsseg4_mask_nxv2i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2295,6 +2431,7 @@ define @test_vlsseg5_nxv2i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2312,6 +2449,7 @@ define @test_vlsseg5_mask_nxv2i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2329,6 +2467,7 @@ define @test_vlsseg6_nxv2i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2347,6 +2486,7 @@ define @test_vlsseg6_mask_nxv2i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2364,6 +2504,7 @@ define @test_vlsseg7_nxv2i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2383,6 +2524,7 @@ define @test_vlsseg7_mask_nxv2i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2400,6 +2542,7 @@ define @test_vlsseg8_nxv2i16(i16* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2420,6 +2563,7 @@ define @test_vlsseg8_mask_nxv2i16(i16* %base, i32 %offset, i3 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i32 %offset, i32 %vl) @@ -2437,6 +2581,7 @@ define @test_vlsseg2_nxv4i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -2451,6 +2596,7 @@ define @test_vlsseg2_mask_nxv4i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i32( undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -2468,6 +2614,7 @@ define @test_vlsseg3_nxv4i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i32( undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -2483,6 +2630,7 @@ define @test_vlsseg3_mask_nxv4i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i32( undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -2500,6 +2648,7 @@ define @test_vlsseg4_nxv4i32(i32* %base, i32 %offset, i32 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -2516,6 +2665,7 @@ define @test_vlsseg4_mask_nxv4i32(i32* %base, i32 %offset, i3 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i32 %offset, i32 %vl) @@ -2533,6 +2683,7 @@ define @test_vlsseg2_nxv16f16(half* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -2547,6 +2698,7 @@ define @test_vlsseg2_mask_nxv16f16(half* %base, i32 %offset ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -2564,6 +2716,7 @@ define @test_vlsseg2_nxv4f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f64( undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2578,6 +2731,7 @@ define @test_vlsseg2_mask_nxv4f64(double* %base, i32 %offs ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f64( undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2595,6 +2749,7 @@ define @test_vlsseg2_nxv1f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f64( undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2609,6 +2764,7 @@ define @test_vlsseg2_mask_nxv1f64(double* %base, i32 %offs ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f64( undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2626,6 +2782,7 @@ define @test_vlsseg3_nxv1f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f64( undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2641,6 +2798,7 @@ define @test_vlsseg3_mask_nxv1f64(double* %base, i32 %offs ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f64( undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2658,6 +2816,7 @@ define @test_vlsseg4_nxv1f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f64( undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2674,6 +2833,7 @@ define @test_vlsseg4_mask_nxv1f64(double* %base, i32 %offs ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f64( undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2691,6 +2851,7 @@ define @test_vlsseg5_nxv1f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2708,6 +2869,7 @@ define @test_vlsseg5_mask_nxv1f64(double* %base, i32 %offs ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2725,6 +2887,7 @@ define @test_vlsseg6_nxv1f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2743,6 +2906,7 @@ define @test_vlsseg6_mask_nxv1f64(double* %base, i32 %offs ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2760,6 +2924,7 @@ define @test_vlsseg7_nxv1f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2779,6 +2944,7 @@ define @test_vlsseg7_mask_nxv1f64(double* %base, i32 %offs ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2796,6 +2962,7 @@ define @test_vlsseg8_nxv1f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2816,6 +2983,7 @@ define @test_vlsseg8_mask_nxv1f64(double* %base, i32 %offs ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -2833,6 +3001,7 @@ define @test_vlsseg2_nxv2f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2847,6 +3016,7 @@ define @test_vlsseg2_mask_nxv2f32(float* %base, i32 %offset ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2864,6 +3034,7 @@ define @test_vlsseg3_nxv2f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f32( undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2879,6 +3050,7 @@ define @test_vlsseg3_mask_nxv2f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f32( undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2896,6 +3068,7 @@ define @test_vlsseg4_nxv2f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f32( undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2912,6 +3085,7 @@ define @test_vlsseg4_mask_nxv2f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f32( undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2929,6 +3103,7 @@ define @test_vlsseg5_nxv2f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2946,6 +3121,7 @@ define @test_vlsseg5_mask_nxv2f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2963,6 +3139,7 @@ define @test_vlsseg6_nxv2f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2981,6 +3158,7 @@ define @test_vlsseg6_mask_nxv2f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -2998,6 +3176,7 @@ define @test_vlsseg7_nxv2f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3017,6 +3196,7 @@ define @test_vlsseg7_mask_nxv2f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3034,6 +3214,7 @@ define @test_vlsseg8_nxv2f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3054,6 +3235,7 @@ define @test_vlsseg8_mask_nxv2f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3071,6 +3253,7 @@ define @test_vlsseg2_nxv1f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3085,6 +3268,7 @@ define @test_vlsseg2_mask_nxv1f16(half* %base, i32 %offset, ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3102,6 +3286,7 @@ define @test_vlsseg3_nxv1f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3117,6 +3302,7 @@ define @test_vlsseg3_mask_nxv1f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3134,6 +3320,7 @@ define @test_vlsseg4_nxv1f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3150,6 +3337,7 @@ define @test_vlsseg4_mask_nxv1f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3167,6 +3355,7 @@ define @test_vlsseg5_nxv1f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3184,6 +3373,7 @@ define @test_vlsseg5_mask_nxv1f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3201,6 +3391,7 @@ define @test_vlsseg6_nxv1f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3219,6 +3410,7 @@ define @test_vlsseg6_mask_nxv1f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3236,6 +3428,7 @@ define @test_vlsseg7_nxv1f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3255,6 +3448,7 @@ define @test_vlsseg7_mask_nxv1f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3272,6 +3466,7 @@ define @test_vlsseg8_nxv1f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3292,6 +3487,7 @@ define @test_vlsseg8_mask_nxv1f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3309,6 +3505,7 @@ define @test_vlsseg2_nxv1f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3323,6 +3520,7 @@ define @test_vlsseg2_mask_nxv1f32(float* %base, i32 %offset ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3340,6 +3538,7 @@ define @test_vlsseg3_nxv1f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f32( undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3355,6 +3554,7 @@ define @test_vlsseg3_mask_nxv1f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f32( undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3372,6 +3572,7 @@ define @test_vlsseg4_nxv1f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f32( undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3388,6 +3589,7 @@ define @test_vlsseg4_mask_nxv1f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f32( undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3405,6 +3607,7 @@ define @test_vlsseg5_nxv1f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3422,6 +3625,7 @@ define @test_vlsseg5_mask_nxv1f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3439,6 +3643,7 @@ define @test_vlsseg6_nxv1f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3457,6 +3662,7 @@ define @test_vlsseg6_mask_nxv1f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3474,6 +3680,7 @@ define @test_vlsseg7_nxv1f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3493,6 +3700,7 @@ define @test_vlsseg7_mask_nxv1f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3510,6 +3718,7 @@ define @test_vlsseg8_nxv1f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3530,6 +3739,7 @@ define @test_vlsseg8_mask_nxv1f32(float* %base, i32 %offset ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3547,6 +3757,7 @@ define @test_vlsseg2_nxv8f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3561,6 +3772,7 @@ define @test_vlsseg2_mask_nxv8f16(half* %base, i32 %offset, ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3578,6 +3790,7 @@ define @test_vlsseg3_nxv8f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3593,6 +3806,7 @@ define @test_vlsseg3_mask_nxv8f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3610,6 +3824,7 @@ define @test_vlsseg4_nxv8f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3626,6 +3841,7 @@ define @test_vlsseg4_mask_nxv8f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3643,6 +3859,7 @@ define @test_vlsseg2_nxv8f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3657,6 +3874,7 @@ define @test_vlsseg2_mask_nxv8f32(float* %base, i32 %offset ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -3674,6 +3892,7 @@ define @test_vlsseg2_nxv2f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f64( undef, undef, double* %base, i32 %offset, i32 %vl) @@ -3688,6 +3907,7 @@ define @test_vlsseg2_mask_nxv2f64(double* %base, i32 %offs ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f64( undef, undef, double* %base, i32 %offset, i32 %vl) @@ -3705,6 +3925,7 @@ define @test_vlsseg3_nxv2f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f64( undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -3720,6 +3941,7 @@ define @test_vlsseg3_mask_nxv2f64(double* %base, i32 %offs ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f64( undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -3737,6 +3959,7 @@ define @test_vlsseg4_nxv2f64(double* %base, i32 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f64( undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -3753,6 +3976,7 @@ define @test_vlsseg4_mask_nxv2f64(double* %base, i32 %offs ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f64( undef, undef, undef, undef, double* %base, i32 %offset, i32 %vl) @@ -3770,6 +3994,7 @@ define @test_vlsseg2_nxv4f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3784,6 +4009,7 @@ define @test_vlsseg2_mask_nxv4f16(half* %base, i32 %offset, ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3801,6 +4027,7 @@ define @test_vlsseg3_nxv4f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3816,6 +4043,7 @@ define @test_vlsseg3_mask_nxv4f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3833,6 +4061,7 @@ define @test_vlsseg4_nxv4f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3849,6 +4078,7 @@ define @test_vlsseg4_mask_nxv4f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3866,6 +4096,7 @@ define @test_vlsseg5_nxv4f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3883,6 +4114,7 @@ define @test_vlsseg5_mask_nxv4f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3900,6 +4132,7 @@ define @test_vlsseg6_nxv4f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3918,6 +4151,7 @@ define @test_vlsseg6_mask_nxv4f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3935,6 +4169,7 @@ define @test_vlsseg7_nxv4f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3954,6 +4189,7 @@ define @test_vlsseg7_mask_nxv4f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3971,6 +4207,7 @@ define @test_vlsseg8_nxv4f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -3991,6 +4228,7 @@ define @test_vlsseg8_mask_nxv4f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4008,6 +4246,7 @@ define @test_vlsseg2_nxv2f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4022,6 +4261,7 @@ define @test_vlsseg2_mask_nxv2f16(half* %base, i32 %offset, ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f16( undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4039,6 +4279,7 @@ define @test_vlsseg3_nxv2f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4054,6 +4295,7 @@ define @test_vlsseg3_mask_nxv2f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f16( undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4071,6 +4313,7 @@ define @test_vlsseg4_nxv2f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4087,6 +4330,7 @@ define @test_vlsseg4_mask_nxv2f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f16( undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4104,6 +4348,7 @@ define @test_vlsseg5_nxv2f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4121,6 +4366,7 @@ define @test_vlsseg5_mask_nxv2f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4138,6 +4384,7 @@ define @test_vlsseg6_nxv2f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4156,6 +4403,7 @@ define @test_vlsseg6_mask_nxv2f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4173,6 +4421,7 @@ define @test_vlsseg7_nxv2f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4192,6 +4441,7 @@ define @test_vlsseg7_mask_nxv2f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4209,6 +4459,7 @@ define @test_vlsseg8_nxv2f16(half* %base, i32 %offset, i32 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4229,6 +4480,7 @@ define @test_vlsseg8_mask_nxv2f16(half* %base, i32 %offset, ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i32 %offset, i32 %vl) @@ -4246,6 +4498,7 @@ define @test_vlsseg2_nxv4f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -4260,6 +4513,7 @@ define @test_vlsseg2_mask_nxv4f32(float* %base, i32 %offset ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f32( undef, undef, float* %base, i32 %offset, i32 %vl) @@ -4277,6 +4531,7 @@ define @test_vlsseg3_nxv4f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f32( undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -4292,6 +4547,7 @@ define @test_vlsseg3_mask_nxv4f32(float* %base, i32 %offset ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f32( undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -4309,6 +4565,7 @@ define @test_vlsseg4_nxv4f32(float* %base, i32 %offset, i32 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f32( undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) @@ -4325,6 +4582,7 @@ define @test_vlsseg4_mask_nxv4f32(float* %base, i32 %offset ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f32( undef, undef, undef, undef, float* %base, i32 %offset, i32 %vl) diff --git a/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv64.ll index 222fc4c4c57d5..030d2bdb76eb9 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vlsseg-rv64.ll @@ -10,6 +10,7 @@ define @test_vlsseg2_nxv16i16(i16* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -24,6 +25,7 @@ define @test_vlsseg2_mask_nxv16i16(i16* %base, i64 %offset, ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -41,6 +43,7 @@ define @test_vlsseg2_nxv4i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -55,6 +58,7 @@ define @test_vlsseg2_mask_nxv4i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -72,6 +76,7 @@ define @test_vlsseg3_nxv4i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i32( undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -87,6 +92,7 @@ define @test_vlsseg3_mask_nxv4i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i32( undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -104,6 +110,7 @@ define @test_vlsseg4_nxv4i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -120,6 +127,7 @@ define @test_vlsseg4_mask_nxv4i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i32( undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -137,6 +145,7 @@ define @test_vlsseg2_nxv16i8(i8* %base, i64 %offset, i64 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -151,6 +160,7 @@ define @test_vlsseg2_mask_nxv16i8(i8* %base, i64 %offset, i64 ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -168,6 +178,7 @@ define @test_vlsseg3_nxv16i8(i8* %base, i64 %offset, i64 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vlsseg3e8.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv16i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -183,6 +194,7 @@ define @test_vlsseg3_mask_nxv16i8(i8* %base, i64 %offset, i64 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e8.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv16i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -200,6 +212,7 @@ define @test_vlsseg4_nxv16i8(i8* %base, i64 %offset, i64 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vlsseg4e8.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -216,6 +229,7 @@ define @test_vlsseg4_mask_nxv16i8(i8* %base, i64 %offset, i64 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e8.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv16i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -233,6 +247,7 @@ define @test_vlsseg2_nxv1i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i64( undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -247,6 +262,7 @@ define @test_vlsseg2_mask_nxv1i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i64( undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -264,6 +280,7 @@ define @test_vlsseg3_nxv1i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i64( undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -279,6 +296,7 @@ define @test_vlsseg3_mask_nxv1i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i64( undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -296,6 +314,7 @@ define @test_vlsseg4_nxv1i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i64( undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -312,6 +331,7 @@ define @test_vlsseg4_mask_nxv1i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i64( undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -329,6 +349,7 @@ define @test_vlsseg5_nxv1i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i64( undef, undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -346,6 +367,7 @@ define @test_vlsseg5_mask_nxv1i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i64( undef, undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -363,6 +385,7 @@ define @test_vlsseg6_nxv1i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i64( undef, undef, undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -381,6 +404,7 @@ define @test_vlsseg6_mask_nxv1i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i64( undef, undef, undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -398,6 +422,7 @@ define @test_vlsseg7_nxv1i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i64( undef, undef, undef, undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -417,6 +442,7 @@ define @test_vlsseg7_mask_nxv1i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i64( undef, undef, undef, undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -434,6 +460,7 @@ define @test_vlsseg8_nxv1i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i64( undef, undef , undef , undef, undef , undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -454,6 +481,7 @@ define @test_vlsseg8_mask_nxv1i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i64( undef, undef , undef , undef, undef , undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -471,6 +499,7 @@ define @test_vlsseg2_nxv1i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -485,6 +514,7 @@ define @test_vlsseg2_mask_nxv1i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -502,6 +532,7 @@ define @test_vlsseg3_nxv1i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i32( undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -517,6 +548,7 @@ define @test_vlsseg3_mask_nxv1i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i32( undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -534,6 +566,7 @@ define @test_vlsseg4_nxv1i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -550,6 +583,7 @@ define @test_vlsseg4_mask_nxv1i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i32( undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -567,6 +601,7 @@ define @test_vlsseg5_nxv1i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -584,6 +619,7 @@ define @test_vlsseg5_mask_nxv1i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i32( undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -601,6 +637,7 @@ define @test_vlsseg6_nxv1i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -619,6 +656,7 @@ define @test_vlsseg6_mask_nxv1i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -636,6 +674,7 @@ define @test_vlsseg7_nxv1i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -655,6 +694,7 @@ define @test_vlsseg7_mask_nxv1i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -672,6 +712,7 @@ define @test_vlsseg8_nxv1i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -692,6 +733,7 @@ define @test_vlsseg8_mask_nxv1i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -709,6 +751,7 @@ define @test_vlsseg2_nxv8i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -723,6 +766,7 @@ define @test_vlsseg2_mask_nxv8i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -740,6 +784,7 @@ define @test_vlsseg3_nxv8i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -755,6 +800,7 @@ define @test_vlsseg3_mask_nxv8i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -772,6 +818,7 @@ define @test_vlsseg4_nxv8i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -788,6 +835,7 @@ define @test_vlsseg4_mask_nxv8i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -805,6 +853,7 @@ define @test_vlsseg2_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -819,6 +868,7 @@ define @test_vlsseg2_mask_nxv4i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -836,6 +886,7 @@ define @test_vlsseg3_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -851,6 +902,7 @@ define @test_vlsseg3_mask_nxv4i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -868,6 +920,7 @@ define @test_vlsseg4_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -884,6 +937,7 @@ define @test_vlsseg4_mask_nxv4i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -901,6 +955,7 @@ define @test_vlsseg5_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -918,6 +973,7 @@ define @test_vlsseg5_mask_nxv4i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -935,6 +991,7 @@ define @test_vlsseg6_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -953,6 +1010,7 @@ define @test_vlsseg6_mask_nxv4i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -970,6 +1028,7 @@ define @test_vlsseg7_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -989,6 +1048,7 @@ define @test_vlsseg7_mask_nxv4i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1006,6 +1066,7 @@ define @test_vlsseg8_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1026,6 +1087,7 @@ define @test_vlsseg8_mask_nxv4i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1043,6 +1105,7 @@ define @test_vlsseg2_nxv1i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1057,6 +1120,7 @@ define @test_vlsseg2_mask_nxv1i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1074,6 +1138,7 @@ define @test_vlsseg3_nxv1i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1089,6 +1154,7 @@ define @test_vlsseg3_mask_nxv1i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1106,6 +1172,7 @@ define @test_vlsseg4_nxv1i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1122,6 +1189,7 @@ define @test_vlsseg4_mask_nxv1i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1139,6 +1207,7 @@ define @test_vlsseg5_nxv1i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1156,6 +1225,7 @@ define @test_vlsseg5_mask_nxv1i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i16( undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1173,6 +1243,7 @@ define @test_vlsseg6_nxv1i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1191,6 +1262,7 @@ define @test_vlsseg6_mask_nxv1i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1208,6 +1280,7 @@ define @test_vlsseg7_nxv1i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1227,6 +1300,7 @@ define @test_vlsseg7_mask_nxv1i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1244,6 +1318,7 @@ define @test_vlsseg8_nxv1i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1264,6 +1339,7 @@ define @test_vlsseg8_mask_nxv1i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1281,6 +1357,7 @@ define @test_vlsseg2_nxv2i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1295,6 +1372,7 @@ define @test_vlsseg2_mask_nxv2i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1312,6 +1390,7 @@ define @test_vlsseg3_nxv2i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i32( undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1327,6 +1406,7 @@ define @test_vlsseg3_mask_nxv2i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i32( undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1344,6 +1424,7 @@ define @test_vlsseg4_nxv2i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1360,6 +1441,7 @@ define @test_vlsseg4_mask_nxv2i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i32( undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1377,6 +1459,7 @@ define @test_vlsseg5_nxv2i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1394,6 +1477,7 @@ define @test_vlsseg5_mask_nxv2i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i32( undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1411,6 +1495,7 @@ define @test_vlsseg6_nxv2i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1429,6 +1514,7 @@ define @test_vlsseg6_mask_nxv2i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i32( undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1446,6 +1532,7 @@ define @test_vlsseg7_nxv2i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1465,6 +1552,7 @@ define @test_vlsseg7_mask_nxv2i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i32( undef, undef, undef, undef, undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1482,6 +1570,7 @@ define @test_vlsseg8_nxv2i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1502,6 +1591,7 @@ define @test_vlsseg8_mask_nxv2i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i32( undef, undef , undef , undef, undef , undef, undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -1519,6 +1609,7 @@ define @test_vlsseg2_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1533,6 +1624,7 @@ define @test_vlsseg2_mask_nxv8i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1550,6 +1642,7 @@ define @test_vlsseg3_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1565,6 +1658,7 @@ define @test_vlsseg3_mask_nxv8i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1582,6 +1676,7 @@ define @test_vlsseg4_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1598,6 +1693,7 @@ define @test_vlsseg4_mask_nxv8i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1615,6 +1711,7 @@ define @test_vlsseg5_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1632,6 +1729,7 @@ define @test_vlsseg5_mask_nxv8i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv8i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1649,6 +1747,7 @@ define @test_vlsseg6_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1667,6 +1766,7 @@ define @test_vlsseg6_mask_nxv8i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv8i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1684,6 +1784,7 @@ define @test_vlsseg7_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1703,6 +1804,7 @@ define @test_vlsseg7_mask_nxv8i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv8i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1720,6 +1822,7 @@ define @test_vlsseg8_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1740,6 +1843,7 @@ define @test_vlsseg8_mask_nxv8i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv8i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -1757,6 +1861,7 @@ define @test_vlsseg2_nxv4i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i64( undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -1771,6 +1876,7 @@ define @test_vlsseg2_mask_nxv4i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i64( undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -1788,6 +1894,7 @@ define @test_vlsseg2_nxv4i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1802,6 +1909,7 @@ define @test_vlsseg2_mask_nxv4i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1819,6 +1927,7 @@ define @test_vlsseg3_nxv4i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1834,6 +1943,7 @@ define @test_vlsseg3_mask_nxv4i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1851,6 +1961,7 @@ define @test_vlsseg4_nxv4i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1867,6 +1978,7 @@ define @test_vlsseg4_mask_nxv4i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1884,6 +1996,7 @@ define @test_vlsseg5_nxv4i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1901,6 +2014,7 @@ define @test_vlsseg5_mask_nxv4i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i16( undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1918,6 +2032,7 @@ define @test_vlsseg6_nxv4i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1936,6 +2051,7 @@ define @test_vlsseg6_mask_nxv4i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1953,6 +2069,7 @@ define @test_vlsseg7_nxv4i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1972,6 +2089,7 @@ define @test_vlsseg7_mask_nxv4i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -1989,6 +2107,7 @@ define @test_vlsseg8_nxv4i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2009,6 +2128,7 @@ define @test_vlsseg8_mask_nxv4i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2026,6 +2146,7 @@ define @test_vlsseg2_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2040,6 +2161,7 @@ define @test_vlsseg2_mask_nxv1i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2057,6 +2179,7 @@ define @test_vlsseg3_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2072,6 +2195,7 @@ define @test_vlsseg3_mask_nxv1i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2089,6 +2213,7 @@ define @test_vlsseg4_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2105,6 +2230,7 @@ define @test_vlsseg4_mask_nxv1i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2122,6 +2248,7 @@ define @test_vlsseg5_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2139,6 +2266,7 @@ define @test_vlsseg5_mask_nxv1i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2156,6 +2284,7 @@ define @test_vlsseg6_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2174,6 +2303,7 @@ define @test_vlsseg6_mask_nxv1i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2191,6 +2321,7 @@ define @test_vlsseg7_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2210,6 +2341,7 @@ define @test_vlsseg7_mask_nxv1i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2227,6 +2359,7 @@ define @test_vlsseg8_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2247,6 +2380,7 @@ define @test_vlsseg8_mask_nxv1i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2264,6 +2398,7 @@ define @test_vlsseg2_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2278,6 +2413,7 @@ define @test_vlsseg2_mask_nxv2i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2295,6 +2431,7 @@ define @test_vlsseg3_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2310,6 +2447,7 @@ define @test_vlsseg3_mask_nxv2i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i8( undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2327,6 +2465,7 @@ define @test_vlsseg4_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2343,6 +2482,7 @@ define @test_vlsseg4_mask_nxv2i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i8( undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2360,6 +2500,7 @@ define @test_vlsseg5_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2377,6 +2518,7 @@ define @test_vlsseg5_mask_nxv2i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i8( undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2394,6 +2536,7 @@ define @test_vlsseg6_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2412,6 +2555,7 @@ define @test_vlsseg6_mask_nxv2i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i8( undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2429,6 +2573,7 @@ define @test_vlsseg7_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2448,6 +2593,7 @@ define @test_vlsseg7_mask_nxv2i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i8( undef, undef, undef, undef, undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2465,6 +2611,7 @@ define @test_vlsseg8_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2485,6 +2632,7 @@ define @test_vlsseg8_mask_nxv2i8(i8* %base, i64 %offset, i64 % ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i8( undef, undef , undef , undef, undef , undef, undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2502,6 +2650,7 @@ define @test_vlsseg2_nxv8i32(i32* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -2516,6 +2665,7 @@ define @test_vlsseg2_mask_nxv8i32(i32* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i32( undef, undef, i32* %base, i64 %offset, i64 %vl) @@ -2533,6 +2683,7 @@ define @test_vlsseg2_nxv32i8(i8* %base, i64 %offset, i64 %vl) ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e8, m4, ta, mu ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv32i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2547,6 +2698,7 @@ define @test_vlsseg2_mask_nxv32i8(i8* %base, i64 %offset, i64 ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv32i8( undef, undef, i8* %base, i64 %offset, i64 %vl) @@ -2564,6 +2716,7 @@ define @test_vlsseg2_nxv2i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2578,6 +2731,7 @@ define @test_vlsseg2_mask_nxv2i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i16( undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2595,6 +2749,7 @@ define @test_vlsseg3_nxv2i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2610,6 +2765,7 @@ define @test_vlsseg3_mask_nxv2i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i16( undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2627,6 +2783,7 @@ define @test_vlsseg4_nxv2i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2643,6 +2800,7 @@ define @test_vlsseg4_mask_nxv2i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i16( undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2660,6 +2818,7 @@ define @test_vlsseg5_nxv2i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2677,6 +2836,7 @@ define @test_vlsseg5_mask_nxv2i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i16( undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2694,6 +2854,7 @@ define @test_vlsseg6_nxv2i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2712,6 +2873,7 @@ define @test_vlsseg6_mask_nxv2i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i16( undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2729,6 +2891,7 @@ define @test_vlsseg7_nxv2i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2748,6 +2911,7 @@ define @test_vlsseg7_mask_nxv2i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i16( undef, undef, undef, undef, undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2765,6 +2929,7 @@ define @test_vlsseg8_nxv2i16(i16* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2785,6 +2950,7 @@ define @test_vlsseg8_mask_nxv2i16(i16* %base, i64 %offset, i6 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i16( undef, undef , undef , undef, undef , undef, undef, undef, i16* %base, i64 %offset, i64 %vl) @@ -2802,6 +2968,7 @@ define @test_vlsseg2_nxv2i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i64( undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -2816,6 +2983,7 @@ define @test_vlsseg2_mask_nxv2i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i64( undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -2833,6 +3001,7 @@ define @test_vlsseg3_nxv2i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i64( undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -2848,6 +3017,7 @@ define @test_vlsseg3_mask_nxv2i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i64( undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -2865,6 +3035,7 @@ define @test_vlsseg4_nxv2i64(i64* %base, i64 %offset, i64 %vl ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i64( undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -2881,6 +3052,7 @@ define @test_vlsseg4_mask_nxv2i64(i64* %base, i64 %offset, i6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i64( undef, undef, undef, undef, i64* %base, i64 %offset, i64 %vl) @@ -2898,6 +3070,7 @@ define @test_vlsseg2_nxv16f16(half* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -2912,6 +3085,7 @@ define @test_vlsseg2_mask_nxv16f16(half* %base, i64 %offset ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -2929,6 +3103,7 @@ define @test_vlsseg2_nxv4f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f64( undef, undef, double* %base, i64 %offset, i64 %vl) @@ -2943,6 +3118,7 @@ define @test_vlsseg2_mask_nxv4f64(double* %base, i64 %offs ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f64( undef, undef, double* %base, i64 %offset, i64 %vl) @@ -2960,6 +3136,7 @@ define @test_vlsseg2_nxv1f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f64( undef, undef, double* %base, i64 %offset, i64 %vl) @@ -2974,6 +3151,7 @@ define @test_vlsseg2_mask_nxv1f64(double* %base, i64 %offs ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f64( undef, undef, double* %base, i64 %offset, i64 %vl) @@ -2991,6 +3169,7 @@ define @test_vlsseg3_nxv1f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f64( undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3006,6 +3185,7 @@ define @test_vlsseg3_mask_nxv1f64(double* %base, i64 %offs ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f64( undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3023,6 +3203,7 @@ define @test_vlsseg4_nxv1f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f64( undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3039,6 +3220,7 @@ define @test_vlsseg4_mask_nxv1f64(double* %base, i64 %offs ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f64( undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3056,6 +3238,7 @@ define @test_vlsseg5_nxv1f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3073,6 +3256,7 @@ define @test_vlsseg5_mask_nxv1f64(double* %base, i64 %offs ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f64( undef, undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3090,6 +3274,7 @@ define @test_vlsseg6_nxv1f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3108,6 +3293,7 @@ define @test_vlsseg6_mask_nxv1f64(double* %base, i64 %offs ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f64( undef, undef, undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3125,6 +3311,7 @@ define @test_vlsseg7_nxv1f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3144,6 +3331,7 @@ define @test_vlsseg7_mask_nxv1f64(double* %base, i64 %offs ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f64( undef, undef, undef, undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3161,6 +3349,7 @@ define @test_vlsseg8_nxv1f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3181,6 +3370,7 @@ define @test_vlsseg8_mask_nxv1f64(double* %base, i64 %offs ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f64( undef, undef , undef , undef, undef , undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -3198,6 +3388,7 @@ define @test_vlsseg2_nxv2f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3212,6 +3403,7 @@ define @test_vlsseg2_mask_nxv2f32(float* %base, i64 %offset ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3229,6 +3421,7 @@ define @test_vlsseg3_nxv2f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f32( undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3244,6 +3437,7 @@ define @test_vlsseg3_mask_nxv2f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f32( undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3261,6 +3455,7 @@ define @test_vlsseg4_nxv2f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f32( undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3277,6 +3472,7 @@ define @test_vlsseg4_mask_nxv2f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f32( undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3294,6 +3490,7 @@ define @test_vlsseg5_nxv2f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3311,6 +3508,7 @@ define @test_vlsseg5_mask_nxv2f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f32( undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3328,6 +3526,7 @@ define @test_vlsseg6_nxv2f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3346,6 +3545,7 @@ define @test_vlsseg6_mask_nxv2f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3363,6 +3563,7 @@ define @test_vlsseg7_nxv2f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3382,6 +3583,7 @@ define @test_vlsseg7_mask_nxv2f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3399,6 +3601,7 @@ define @test_vlsseg8_nxv2f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3419,6 +3622,7 @@ define @test_vlsseg8_mask_nxv2f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3436,6 +3640,7 @@ define @test_vlsseg2_nxv1f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3450,6 +3655,7 @@ define @test_vlsseg2_mask_nxv1f16(half* %base, i64 %offset, ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3467,6 +3673,7 @@ define @test_vlsseg3_nxv1f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3482,6 +3689,7 @@ define @test_vlsseg3_mask_nxv1f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3499,6 +3707,7 @@ define @test_vlsseg4_nxv1f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3515,6 +3724,7 @@ define @test_vlsseg4_mask_nxv1f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3532,6 +3742,7 @@ define @test_vlsseg5_nxv1f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3549,6 +3760,7 @@ define @test_vlsseg5_mask_nxv1f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f16( undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3566,6 +3778,7 @@ define @test_vlsseg6_nxv1f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3584,6 +3797,7 @@ define @test_vlsseg6_mask_nxv1f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3601,6 +3815,7 @@ define @test_vlsseg7_nxv1f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3620,6 +3835,7 @@ define @test_vlsseg7_mask_nxv1f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3637,6 +3853,7 @@ define @test_vlsseg8_nxv1f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3657,6 +3874,7 @@ define @test_vlsseg8_mask_nxv1f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3674,6 +3892,7 @@ define @test_vlsseg2_nxv1f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3688,6 +3907,7 @@ define @test_vlsseg2_mask_nxv1f32(float* %base, i64 %offset ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3705,6 +3925,7 @@ define @test_vlsseg3_nxv1f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f32( undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3720,6 +3941,7 @@ define @test_vlsseg3_mask_nxv1f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f32( undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3737,6 +3959,7 @@ define @test_vlsseg4_nxv1f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f32( undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3753,6 +3976,7 @@ define @test_vlsseg4_mask_nxv1f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f32( undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3770,6 +3994,7 @@ define @test_vlsseg5_nxv1f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3787,6 +4012,7 @@ define @test_vlsseg5_mask_nxv1f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f32( undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3804,6 +4030,7 @@ define @test_vlsseg6_nxv1f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3822,6 +4049,7 @@ define @test_vlsseg6_mask_nxv1f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f32( undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3839,6 +4067,7 @@ define @test_vlsseg7_nxv1f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3858,6 +4087,7 @@ define @test_vlsseg7_mask_nxv1f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f32( undef, undef, undef, undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3875,6 +4105,7 @@ define @test_vlsseg8_nxv1f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3895,6 +4126,7 @@ define @test_vlsseg8_mask_nxv1f32(float* %base, i64 %offset ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f32( undef, undef , undef , undef, undef , undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -3912,6 +4144,7 @@ define @test_vlsseg2_nxv8f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3926,6 +4159,7 @@ define @test_vlsseg2_mask_nxv8f16(half* %base, i64 %offset, ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3943,6 +4177,7 @@ define @test_vlsseg3_nxv8f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3958,6 +4193,7 @@ define @test_vlsseg3_mask_nxv8f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3975,6 +4211,7 @@ define @test_vlsseg4_nxv8f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -3991,6 +4228,7 @@ define @test_vlsseg4_mask_nxv8f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4008,6 +4246,7 @@ define @test_vlsseg2_nxv8f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -4022,6 +4261,7 @@ define @test_vlsseg2_mask_nxv8f32(float* %base, i64 %offset ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -4039,6 +4279,7 @@ define @test_vlsseg2_nxv2f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f64( undef, undef, double* %base, i64 %offset, i64 %vl) @@ -4053,6 +4294,7 @@ define @test_vlsseg2_mask_nxv2f64(double* %base, i64 %offs ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f64( undef, undef, double* %base, i64 %offset, i64 %vl) @@ -4070,6 +4312,7 @@ define @test_vlsseg3_nxv2f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f64( undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -4085,6 +4328,7 @@ define @test_vlsseg3_mask_nxv2f64(double* %base, i64 %offs ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f64( undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -4102,6 +4346,7 @@ define @test_vlsseg4_nxv2f64(double* %base, i64 %offset, i ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f64( undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -4118,6 +4363,7 @@ define @test_vlsseg4_mask_nxv2f64(double* %base, i64 %offs ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f64( undef, undef, undef, undef, double* %base, i64 %offset, i64 %vl) @@ -4135,6 +4381,7 @@ define @test_vlsseg2_nxv4f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4149,6 +4396,7 @@ define @test_vlsseg2_mask_nxv4f16(half* %base, i64 %offset, ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4166,6 +4414,7 @@ define @test_vlsseg3_nxv4f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4181,6 +4430,7 @@ define @test_vlsseg3_mask_nxv4f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4198,6 +4448,7 @@ define @test_vlsseg4_nxv4f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4214,6 +4465,7 @@ define @test_vlsseg4_mask_nxv4f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4231,6 +4483,7 @@ define @test_vlsseg5_nxv4f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4248,6 +4501,7 @@ define @test_vlsseg5_mask_nxv4f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4f16( undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4265,6 +4519,7 @@ define @test_vlsseg6_nxv4f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4283,6 +4538,7 @@ define @test_vlsseg6_mask_nxv4f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4300,6 +4556,7 @@ define @test_vlsseg7_nxv4f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4319,6 +4576,7 @@ define @test_vlsseg7_mask_nxv4f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4336,6 +4594,7 @@ define @test_vlsseg8_nxv4f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4356,6 +4615,7 @@ define @test_vlsseg8_mask_nxv4f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4373,6 +4633,7 @@ define @test_vlsseg2_nxv2f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4387,6 +4648,7 @@ define @test_vlsseg2_mask_nxv2f16(half* %base, i64 %offset, ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f16( undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4404,6 +4666,7 @@ define @test_vlsseg3_nxv2f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4419,6 +4682,7 @@ define @test_vlsseg3_mask_nxv2f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f16( undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4436,6 +4700,7 @@ define @test_vlsseg4_nxv2f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4452,6 +4717,7 @@ define @test_vlsseg4_mask_nxv2f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f16( undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4469,6 +4735,7 @@ define @test_vlsseg5_nxv2f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4486,6 +4753,7 @@ define @test_vlsseg5_mask_nxv2f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f16( undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4503,6 +4771,7 @@ define @test_vlsseg6_nxv2f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4521,6 +4790,7 @@ define @test_vlsseg6_mask_nxv2f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f16( undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4538,6 +4808,7 @@ define @test_vlsseg7_nxv2f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4557,6 +4828,7 @@ define @test_vlsseg7_mask_nxv2f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f16( undef, undef, undef, undef, undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4574,6 +4846,7 @@ define @test_vlsseg8_nxv2f16(half* %base, i64 %offset, i64 % ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4594,6 +4867,7 @@ define @test_vlsseg8_mask_nxv2f16(half* %base, i64 %offset, ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f16( undef, undef , undef , undef, undef , undef, undef, undef, half* %base, i64 %offset, i64 %vl) @@ -4611,6 +4885,7 @@ define @test_vlsseg2_nxv4f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -4625,6 +4900,7 @@ define @test_vlsseg2_mask_nxv4f32(float* %base, i64 %offset ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f32( undef, undef, float* %base, i64 %offset, i64 %vl) @@ -4642,6 +4918,7 @@ define @test_vlsseg3_nxv4f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f32( undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -4657,6 +4934,7 @@ define @test_vlsseg3_mask_nxv4f32(float* %base, i64 %offset ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f32( undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -4674,6 +4952,7 @@ define @test_vlsseg4_nxv4f32(float* %base, i64 %offset, i64 ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f32( undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) @@ -4690,6 +4969,7 @@ define @test_vlsseg4_mask_nxv4f32(float* %base, i64 %offset ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f32( undef, undef, undef, undef, float* %base, i64 %offset, i64 %vl) diff --git a/llvm/test/CodeGen/RISCV/rvv/vluxseg-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vluxseg-rv32.ll index 4bc088b4c15e4..3d3ddfa6d4b18 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vluxseg-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vluxseg-rv32.ll @@ -24,6 +24,7 @@ define @test_vluxseg2_mask_nxv16i16_nxv16i16(,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -53,6 +54,7 @@ define @test_vluxseg2_mask_nxv16i16_nxv16i8(,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -82,6 +84,7 @@ define @test_vluxseg2_mask_nxv16i16_nxv16i32(,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -111,6 +114,7 @@ define @test_vluxseg2_mask_nxv1i8_nxv1i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i8.nxv1i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -140,6 +144,7 @@ define @test_vluxseg2_mask_nxv1i8_nxv1i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i8.nxv1i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -169,6 +174,7 @@ define @test_vluxseg2_mask_nxv1i8_nxv1i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i8.nxv1i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -195,11 +201,12 @@ entry: define @test_vluxseg3_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i8.nxv1i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -226,11 +233,12 @@ entry: define @test_vluxseg3_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i8.nxv1i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -257,11 +265,12 @@ entry: define @test_vluxseg3_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i8.nxv1i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -289,9 +298,9 @@ define @test_vluxseg4_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -322,9 +331,9 @@ define @test_vluxseg4_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -355,9 +364,9 @@ define @test_vluxseg4_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -388,10 +397,10 @@ define @test_vluxseg5_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -422,10 +431,10 @@ define @test_vluxseg5_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -456,10 +465,10 @@ define @test_vluxseg5_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -490,11 +499,11 @@ define @test_vluxseg6_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -525,11 +534,11 @@ define @test_vluxseg6_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -560,11 +569,11 @@ define @test_vluxseg6_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -595,12 +604,12 @@ define @test_vluxseg7_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -631,12 +640,12 @@ define @test_vluxseg7_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -667,12 +676,12 @@ define @test_vluxseg7_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -703,13 +712,13 @@ define @test_vluxseg8_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -740,13 +749,13 @@ define @test_vluxseg8_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -777,13 +786,13 @@ define @test_vluxseg8_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -816,6 +825,7 @@ define @test_vluxseg2_mask_nxv16i8_nxv16i16(,} @llvm.riscv.vluxseg2.mask.nxv16i8.nxv16i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -845,6 +855,7 @@ define @test_vluxseg2_mask_nxv16i8_nxv16i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv16i8.nxv16i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -874,6 +885,7 @@ define @test_vluxseg2_mask_nxv16i8_nxv16i32(,} @llvm.riscv.vluxseg2.mask.nxv16i8.nxv16i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -901,9 +913,10 @@ define @test_vluxseg3_mask_nxv16i8_nxv16i16(,,} @llvm.riscv.vluxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -930,11 +943,12 @@ entry: define @test_vluxseg3_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv16i8.nxv16i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -962,9 +976,10 @@ define @test_vluxseg3_mask_nxv16i8_nxv16i32(,,} @llvm.riscv.vluxseg3.mask.nxv16i8.nxv16i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -991,12 +1006,13 @@ entry: define @test_vluxseg4_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vluxseg4ei16.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei16.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv16i8.nxv16i16( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -1024,9 +1040,9 @@ define @test_vluxseg4_mask_nxv16i8_nxv16i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -1057,10 +1073,11 @@ define @test_vluxseg4_mask_nxv16i8_nxv16i32(,,,} @llvm.riscv.vluxseg4.mask.nxv16i8.nxv16i32( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -1090,6 +1107,7 @@ define @test_vluxseg2_mask_nxv2i32_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i32.nxv2i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1119,6 +1137,7 @@ define @test_vluxseg2_mask_nxv2i32_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i32.nxv2i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1148,6 +1167,7 @@ define @test_vluxseg2_mask_nxv2i32_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i32.nxv2i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1174,11 +1194,12 @@ entry: define @test_vluxseg3_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i32.nxv2i32( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1205,11 +1226,12 @@ entry: define @test_vluxseg3_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i32.nxv2i8( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1236,11 +1258,12 @@ entry: define @test_vluxseg3_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i32.nxv2i16( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -1268,9 +1291,9 @@ define @test_vluxseg4_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1301,9 +1324,9 @@ define @test_vluxseg4_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1334,9 +1357,9 @@ define @test_vluxseg4_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1367,10 +1390,10 @@ define @test_vluxseg5_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1401,10 +1424,10 @@ define @test_vluxseg5_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1435,10 +1458,10 @@ define @test_vluxseg5_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1469,11 +1492,11 @@ define @test_vluxseg6_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1504,11 +1527,11 @@ define @test_vluxseg6_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1539,11 +1562,11 @@ define @test_vluxseg6_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1574,12 +1597,12 @@ define @test_vluxseg7_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1610,12 +1633,12 @@ define @test_vluxseg7_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1646,12 +1669,12 @@ define @test_vluxseg7_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1682,13 +1705,13 @@ define @test_vluxseg8_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1719,13 +1742,13 @@ define @test_vluxseg8_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1756,13 +1779,13 @@ define @test_vluxseg8_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1795,6 +1818,7 @@ define @test_vluxseg2_mask_nxv4i16_nxv4i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i16.nxv4i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1824,6 +1848,7 @@ define @test_vluxseg2_mask_nxv4i16_nxv4i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i16.nxv4i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1853,6 +1878,7 @@ define @test_vluxseg2_mask_nxv4i16_nxv4i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i16.nxv4i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1879,11 +1905,12 @@ entry: define @test_vluxseg3_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i16.nxv4i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1910,11 +1937,12 @@ entry: define @test_vluxseg3_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i16.nxv4i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1942,9 +1970,10 @@ define @test_vluxseg3_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -1972,9 +2001,9 @@ define @test_vluxseg4_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2005,9 +2034,9 @@ define @test_vluxseg4_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2037,12 +2066,13 @@ entry: define @test_vluxseg4_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4i16.nxv4i32( %val, %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -2070,10 +2100,10 @@ define @test_vluxseg5_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2104,10 +2134,10 @@ define @test_vluxseg5_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2138,10 +2168,10 @@ define @test_vluxseg5_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2172,11 +2202,11 @@ define @test_vluxseg6_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2207,11 +2237,11 @@ define @test_vluxseg6_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2242,11 +2272,11 @@ define @test_vluxseg6_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2277,12 +2307,12 @@ define @test_vluxseg7_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2313,12 +2343,12 @@ define @test_vluxseg7_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2349,12 +2379,12 @@ define @test_vluxseg7_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2385,13 +2415,13 @@ define @test_vluxseg8_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2422,13 +2452,13 @@ define @test_vluxseg8_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2459,13 +2489,13 @@ define @test_vluxseg8_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -2498,6 +2528,7 @@ define @test_vluxseg2_mask_nxv1i32_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i32.nxv1i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2527,6 +2558,7 @@ define @test_vluxseg2_mask_nxv1i32_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i32.nxv1i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2556,6 +2588,7 @@ define @test_vluxseg2_mask_nxv1i32_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i32.nxv1i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2582,11 +2615,12 @@ entry: define @test_vluxseg3_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i32.nxv1i8( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2613,11 +2647,12 @@ entry: define @test_vluxseg3_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i32.nxv1i32( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2644,11 +2679,12 @@ entry: define @test_vluxseg3_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i32.nxv1i16( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -2676,9 +2712,9 @@ define @test_vluxseg4_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2709,9 +2745,9 @@ define @test_vluxseg4_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2742,9 +2778,9 @@ define @test_vluxseg4_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2775,10 +2811,10 @@ define @test_vluxseg5_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2809,10 +2845,10 @@ define @test_vluxseg5_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2843,10 +2879,10 @@ define @test_vluxseg5_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2877,11 +2913,11 @@ define @test_vluxseg6_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2912,11 +2948,11 @@ define @test_vluxseg6_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2947,11 +2983,11 @@ define @test_vluxseg6_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2982,12 +3018,12 @@ define @test_vluxseg7_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3018,12 +3054,12 @@ define @test_vluxseg7_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3054,12 +3090,12 @@ define @test_vluxseg7_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3090,13 +3126,13 @@ define @test_vluxseg8_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3127,13 +3163,13 @@ define @test_vluxseg8_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3164,13 +3200,13 @@ define @test_vluxseg8_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3203,6 +3239,7 @@ define @test_vluxseg2_mask_nxv8i16_nxv8i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i16.nxv8i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3232,6 +3269,7 @@ define @test_vluxseg2_mask_nxv8i16_nxv8i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i16.nxv8i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3261,6 +3299,7 @@ define @test_vluxseg2_mask_nxv8i16_nxv8i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i16.nxv8i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3287,11 +3326,12 @@ entry: define @test_vluxseg3_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i16.nxv8i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3318,11 +3358,12 @@ entry: define @test_vluxseg3_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i16.nxv8i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3350,9 +3391,10 @@ define @test_vluxseg3_mask_nxv8i16_nxv8i32( ; CHECK-LABEL: test_vluxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3380,9 +3422,9 @@ define @test_vluxseg4_mask_nxv8i16_nxv8i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -3413,9 +3455,9 @@ define @test_vluxseg4_mask_nxv8i16_nxv8i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -3445,12 +3487,13 @@ entry: define @test_vluxseg4_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i16.nxv8i32( %val, %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -3480,6 +3523,7 @@ define @test_vluxseg2_mask_nxv8i8_nxv8i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i8.nxv8i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3509,6 +3553,7 @@ define @test_vluxseg2_mask_nxv8i8_nxv8i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i8.nxv8i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3538,6 +3583,7 @@ define @test_vluxseg2_mask_nxv8i8_nxv8i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i8.nxv8i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3565,9 +3611,10 @@ define @test_vluxseg3_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3594,11 +3641,12 @@ entry: define @test_vluxseg3_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i8.nxv8i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3626,9 +3674,10 @@ define @test_vluxseg3_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i8.nxv8i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3655,12 +3704,13 @@ entry: define @test_vluxseg4_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vluxseg4ei16.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i8.nxv8i16( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3688,9 +3738,9 @@ define @test_vluxseg4_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3721,10 +3771,11 @@ define @test_vluxseg4_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3752,10 +3803,10 @@ define @test_vluxseg5_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3786,10 +3837,10 @@ define @test_vluxseg5_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3820,11 +3871,12 @@ define @test_vluxseg5_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vluxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3852,11 +3904,11 @@ define @test_vluxseg6_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3887,11 +3939,11 @@ define @test_vluxseg6_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3921,14 +3973,15 @@ entry: define @test_vluxseg6_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vluxseg6ei32.v v7, (a0), v16, v0.t +; CHECK-NEXT: vluxseg6ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vluxseg6.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -3956,12 +4009,12 @@ define @test_vluxseg7_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3992,12 +4045,12 @@ define @test_vluxseg7_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4028,12 +4081,12 @@ define @test_vluxseg7_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -4064,13 +4117,13 @@ define @test_vluxseg8_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4101,13 +4154,13 @@ define @test_vluxseg8_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4138,13 +4191,13 @@ define @test_vluxseg8_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -4177,6 +4230,7 @@ define @test_vluxseg2_mask_nxv8i32_nxv8i16( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i32.nxv8i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -4206,6 +4260,7 @@ define @test_vluxseg2_mask_nxv8i32_nxv8i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i32.nxv8i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -4235,6 +4290,7 @@ define @test_vluxseg2_mask_nxv8i32_nxv8i32( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i32.nxv8i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -4264,6 +4320,7 @@ define @test_vluxseg2_mask_nxv4i8_nxv4i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i8.nxv4i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4293,6 +4350,7 @@ define @test_vluxseg2_mask_nxv4i8_nxv4i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i8.nxv4i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4322,6 +4380,7 @@ define @test_vluxseg2_mask_nxv4i8_nxv4i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i8.nxv4i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4348,11 +4407,12 @@ entry: define @test_vluxseg3_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i8.nxv4i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4379,11 +4439,12 @@ entry: define @test_vluxseg3_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i8.nxv4i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4411,9 +4472,10 @@ define @test_vluxseg3_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4441,9 +4503,9 @@ define @test_vluxseg4_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4474,9 +4536,9 @@ define @test_vluxseg4_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4506,12 +4568,13 @@ entry: define @test_vluxseg4_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4i8.nxv4i32( %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -4539,10 +4602,10 @@ define @test_vluxseg5_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4573,10 +4636,10 @@ define @test_vluxseg5_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4607,10 +4670,10 @@ define @test_vluxseg5_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4641,11 +4704,11 @@ define @test_vluxseg6_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4676,11 +4739,11 @@ define @test_vluxseg6_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4711,11 +4774,11 @@ define @test_vluxseg6_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4746,12 +4809,12 @@ define @test_vluxseg7_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4782,12 +4845,12 @@ define @test_vluxseg7_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4818,12 +4881,12 @@ define @test_vluxseg7_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4854,13 +4917,13 @@ define @test_vluxseg8_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4891,13 +4954,13 @@ define @test_vluxseg8_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4928,13 +4991,13 @@ define @test_vluxseg8_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -4967,6 +5030,7 @@ define @test_vluxseg2_mask_nxv1i16_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i16.nxv1i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -4996,6 +5060,7 @@ define @test_vluxseg2_mask_nxv1i16_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i16.nxv1i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5025,6 +5090,7 @@ define @test_vluxseg2_mask_nxv1i16_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i16.nxv1i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5051,11 +5117,12 @@ entry: define @test_vluxseg3_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i16.nxv1i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5082,11 +5149,12 @@ entry: define @test_vluxseg3_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i16.nxv1i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5113,11 +5181,12 @@ entry: define @test_vluxseg3_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i16.nxv1i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -5145,9 +5214,9 @@ define @test_vluxseg4_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5178,9 +5247,9 @@ define @test_vluxseg4_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5211,9 +5280,9 @@ define @test_vluxseg4_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5244,10 +5313,10 @@ define @test_vluxseg5_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5278,10 +5347,10 @@ define @test_vluxseg5_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5312,10 +5381,10 @@ define @test_vluxseg5_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5346,11 +5415,11 @@ define @test_vluxseg6_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5381,11 +5450,11 @@ define @test_vluxseg6_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5416,11 +5485,11 @@ define @test_vluxseg6_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5451,12 +5520,12 @@ define @test_vluxseg7_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5487,12 +5556,12 @@ define @test_vluxseg7_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5523,12 +5592,12 @@ define @test_vluxseg7_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5559,13 +5628,13 @@ define @test_vluxseg8_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5596,13 +5665,13 @@ define @test_vluxseg8_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5633,13 +5702,13 @@ define @test_vluxseg8_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5672,6 +5741,7 @@ define @test_vluxseg2_mask_nxv32i8_nxv32i16(,} @llvm.riscv.vluxseg2.mask.nxv32i8.nxv32i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5701,6 +5771,7 @@ define @test_vluxseg2_mask_nxv32i8_nxv32i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv32i8.nxv32i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5730,6 +5801,7 @@ define @test_vluxseg2_mask_nxv2i8_nxv2i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i8.nxv2i32( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5759,6 +5831,7 @@ define @test_vluxseg2_mask_nxv2i8_nxv2i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i8.nxv2i8( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5788,6 +5861,7 @@ define @test_vluxseg2_mask_nxv2i8_nxv2i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i8.nxv2i16( %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5814,11 +5888,12 @@ entry: define @test_vluxseg3_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i8.nxv2i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5845,11 +5920,12 @@ entry: define @test_vluxseg3_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i8.nxv2i8( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5876,11 +5952,12 @@ entry: define @test_vluxseg3_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i8.nxv2i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl, i32 1) @@ -5908,9 +5985,9 @@ define @test_vluxseg4_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5941,9 +6018,9 @@ define @test_vluxseg4_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5974,9 +6051,9 @@ define @test_vluxseg4_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6007,10 +6084,10 @@ define @test_vluxseg5_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6041,10 +6118,10 @@ define @test_vluxseg5_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6075,10 +6152,10 @@ define @test_vluxseg5_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6109,11 +6186,11 @@ define @test_vluxseg6_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6144,11 +6221,11 @@ define @test_vluxseg6_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6179,11 +6256,11 @@ define @test_vluxseg6_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6214,12 +6291,12 @@ define @test_vluxseg7_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6250,12 +6327,12 @@ define @test_vluxseg7_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6286,12 +6363,12 @@ define @test_vluxseg7_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6322,13 +6399,13 @@ define @test_vluxseg8_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6359,13 +6436,13 @@ define @test_vluxseg8_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6396,13 +6473,13 @@ define @test_vluxseg8_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6435,6 +6512,7 @@ define @test_vluxseg2_mask_nxv2i16_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i16.nxv2i32( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6464,6 +6542,7 @@ define @test_vluxseg2_mask_nxv2i16_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i16.nxv2i8( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6493,6 +6572,7 @@ define @test_vluxseg2_mask_nxv2i16_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i16.nxv2i16( %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6519,11 +6599,12 @@ entry: define @test_vluxseg3_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i16.nxv2i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6550,11 +6631,12 @@ entry: define @test_vluxseg3_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i16.nxv2i8( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6581,11 +6663,12 @@ entry: define @test_vluxseg3_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i16.nxv2i16( %val, %val, %val, i16* %base, %index, %mask, i32 %vl, i32 1) @@ -6613,9 +6696,9 @@ define @test_vluxseg4_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6646,9 +6729,9 @@ define @test_vluxseg4_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6679,9 +6762,9 @@ define @test_vluxseg4_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6712,10 +6795,10 @@ define @test_vluxseg5_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6746,10 +6829,10 @@ define @test_vluxseg5_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6780,10 +6863,10 @@ define @test_vluxseg5_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6814,11 +6897,11 @@ define @test_vluxseg6_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6849,11 +6932,11 @@ define @test_vluxseg6_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6884,11 +6967,11 @@ define @test_vluxseg6_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6919,12 +7002,12 @@ define @test_vluxseg7_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6955,12 +7038,12 @@ define @test_vluxseg7_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6991,12 +7074,12 @@ define @test_vluxseg7_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7027,13 +7110,13 @@ define @test_vluxseg8_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7064,13 +7147,13 @@ define @test_vluxseg8_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7101,13 +7184,13 @@ define @test_vluxseg8_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7140,6 +7223,7 @@ define @test_vluxseg2_mask_nxv4i32_nxv4i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i32.nxv4i16( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7169,6 +7253,7 @@ define @test_vluxseg2_mask_nxv4i32_nxv4i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i32.nxv4i8( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7198,6 +7283,7 @@ define @test_vluxseg2_mask_nxv4i32_nxv4i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i32.nxv4i32( %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7224,11 +7310,12 @@ entry: define @test_vluxseg3_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i32.nxv4i16( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7255,11 +7342,12 @@ entry: define @test_vluxseg3_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i32.nxv4i8( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7286,11 +7374,12 @@ entry: define @test_vluxseg3_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i32.nxv4i32( %val, %val, %val, i32* %base, %index, %mask, i32 %vl, i32 1) @@ -7318,9 +7407,9 @@ define @test_vluxseg4_mask_nxv4i32_nxv4i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -7351,9 +7440,9 @@ define @test_vluxseg4_mask_nxv4i32_nxv4i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -7384,9 +7473,9 @@ define @test_vluxseg4_mask_nxv4i32_nxv4i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -7419,6 +7508,7 @@ define @test_vluxseg2_mask_nxv16f16_nxv16i16(,} @llvm.riscv.vluxseg2.mask.nxv16f16.nxv16i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -7448,6 +7538,7 @@ define @test_vluxseg2_mask_nxv16f16_nxv16i8(,} @llvm.riscv.vluxseg2.mask.nxv16f16.nxv16i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -7477,6 +7568,7 @@ define @test_vluxseg2_mask_nxv16f16_nxv16i32(,} @llvm.riscv.vluxseg2.mask.nxv16f16.nxv16i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -7506,6 +7598,7 @@ define @test_vluxseg2_mask_nxv4f64_nxv4i16(,} @llvm.riscv.vluxseg2.mask.nxv4f64.nxv4i16( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7535,6 +7628,7 @@ define @test_vluxseg2_mask_nxv4f64_nxv4i8(,} @llvm.riscv.vluxseg2.mask.nxv4f64.nxv4i8( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7564,6 +7658,7 @@ define @test_vluxseg2_mask_nxv4f64_nxv4i32(,} @llvm.riscv.vluxseg2.mask.nxv4f64.nxv4i32( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7593,6 +7688,7 @@ define @test_vluxseg2_mask_nxv1f64_nxv1i8(,} @llvm.riscv.vluxseg2.mask.nxv1f64.nxv1i8( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7622,6 +7718,7 @@ define @test_vluxseg2_mask_nxv1f64_nxv1i32(,} @llvm.riscv.vluxseg2.mask.nxv1f64.nxv1i32( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7651,6 +7748,7 @@ define @test_vluxseg2_mask_nxv1f64_nxv1i16(,} @llvm.riscv.vluxseg2.mask.nxv1f64.nxv1i16( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7677,11 +7775,12 @@ entry: define @test_vluxseg3_mask_nxv1f64_nxv1i8( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f64.nxv1i8( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7708,11 +7807,12 @@ entry: define @test_vluxseg3_mask_nxv1f64_nxv1i32( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f64.nxv1i32( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7739,11 +7839,12 @@ entry: define @test_vluxseg3_mask_nxv1f64_nxv1i16( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f64.nxv1i16( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -7771,9 +7872,9 @@ define @test_vluxseg4_mask_nxv1f64_nxv1i8( @test_vluxseg4_mask_nxv1f64_nxv1i32( @test_vluxseg4_mask_nxv1f64_nxv1i16( @test_vluxseg5_mask_nxv1f64_nxv1i8( @test_vluxseg5_mask_nxv1f64_nxv1i32( @test_vluxseg5_mask_nxv1f64_nxv1i16( @test_vluxseg6_mask_nxv1f64_nxv1i8( @test_vluxseg6_mask_nxv1f64_nxv1i32( @test_vluxseg6_mask_nxv1f64_nxv1i16( @test_vluxseg7_mask_nxv1f64_nxv1i8( @test_vluxseg7_mask_nxv1f64_nxv1i32( @test_vluxseg7_mask_nxv1f64_nxv1i16( @test_vluxseg8_mask_nxv1f64_nxv1i8( @test_vluxseg8_mask_nxv1f64_nxv1i32( @test_vluxseg8_mask_nxv1f64_nxv1i16( @test_vluxseg2_mask_nxv2f32_nxv2i32(,} @llvm.riscv.vluxseg2.mask.nxv2f32.nxv2i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8327,6 +8429,7 @@ define @test_vluxseg2_mask_nxv2f32_nxv2i8(,} @llvm.riscv.vluxseg2.mask.nxv2f32.nxv2i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8356,6 +8459,7 @@ define @test_vluxseg2_mask_nxv2f32_nxv2i16(,} @llvm.riscv.vluxseg2.mask.nxv2f32.nxv2i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8382,11 +8486,12 @@ entry: define @test_vluxseg3_mask_nxv2f32_nxv2i32( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f32.nxv2i32( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8413,11 +8518,12 @@ entry: define @test_vluxseg3_mask_nxv2f32_nxv2i8( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f32.nxv2i8( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8444,11 +8550,12 @@ entry: define @test_vluxseg3_mask_nxv2f32_nxv2i16( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f32.nxv2i16( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -8476,9 +8583,9 @@ define @test_vluxseg4_mask_nxv2f32_nxv2i32( @test_vluxseg4_mask_nxv2f32_nxv2i8( @test_vluxseg4_mask_nxv2f32_nxv2i16( @test_vluxseg5_mask_nxv2f32_nxv2i32( @test_vluxseg5_mask_nxv2f32_nxv2i8( @test_vluxseg5_mask_nxv2f32_nxv2i16( @test_vluxseg6_mask_nxv2f32_nxv2i32( @test_vluxseg6_mask_nxv2f32_nxv2i8( @test_vluxseg6_mask_nxv2f32_nxv2i16( @test_vluxseg7_mask_nxv2f32_nxv2i32( @test_vluxseg7_mask_nxv2f32_nxv2i8( @test_vluxseg7_mask_nxv2f32_nxv2i16( @test_vluxseg8_mask_nxv2f32_nxv2i32( @test_vluxseg8_mask_nxv2f32_nxv2i8( @test_vluxseg8_mask_nxv2f32_nxv2i16( @test_vluxseg2_mask_nxv1f16_nxv1i8(,} @llvm.riscv.vluxseg2.mask.nxv1f16.nxv1i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9032,6 +9140,7 @@ define @test_vluxseg2_mask_nxv1f16_nxv1i32(,} @llvm.riscv.vluxseg2.mask.nxv1f16.nxv1i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9061,6 +9170,7 @@ define @test_vluxseg2_mask_nxv1f16_nxv1i16(,} @llvm.riscv.vluxseg2.mask.nxv1f16.nxv1i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9087,11 +9197,12 @@ entry: define @test_vluxseg3_mask_nxv1f16_nxv1i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f16.nxv1i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9118,11 +9229,12 @@ entry: define @test_vluxseg3_mask_nxv1f16_nxv1i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f16.nxv1i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9149,11 +9261,12 @@ entry: define @test_vluxseg3_mask_nxv1f16_nxv1i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f16.nxv1i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -9181,9 +9294,9 @@ define @test_vluxseg4_mask_nxv1f16_nxv1i8( @test_vluxseg4_mask_nxv1f16_nxv1i32( @test_vluxseg4_mask_nxv1f16_nxv1i16( @test_vluxseg5_mask_nxv1f16_nxv1i8( @test_vluxseg5_mask_nxv1f16_nxv1i32( @test_vluxseg5_mask_nxv1f16_nxv1i16( @test_vluxseg6_mask_nxv1f16_nxv1i8( @test_vluxseg6_mask_nxv1f16_nxv1i32( @test_vluxseg6_mask_nxv1f16_nxv1i16( @test_vluxseg7_mask_nxv1f16_nxv1i8( @test_vluxseg7_mask_nxv1f16_nxv1i32( @test_vluxseg7_mask_nxv1f16_nxv1i16( @test_vluxseg8_mask_nxv1f16_nxv1i8( @test_vluxseg8_mask_nxv1f16_nxv1i32( @test_vluxseg8_mask_nxv1f16_nxv1i16( @test_vluxseg2_mask_nxv1f32_nxv1i8(,} @llvm.riscv.vluxseg2.mask.nxv1f32.nxv1i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9737,6 +9851,7 @@ define @test_vluxseg2_mask_nxv1f32_nxv1i32(,} @llvm.riscv.vluxseg2.mask.nxv1f32.nxv1i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9766,6 +9881,7 @@ define @test_vluxseg2_mask_nxv1f32_nxv1i16(,} @llvm.riscv.vluxseg2.mask.nxv1f32.nxv1i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9792,11 +9908,12 @@ entry: define @test_vluxseg3_mask_nxv1f32_nxv1i8( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f32.nxv1i8( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9823,11 +9940,12 @@ entry: define @test_vluxseg3_mask_nxv1f32_nxv1i32( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f32.nxv1i32( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9854,11 +9972,12 @@ entry: define @test_vluxseg3_mask_nxv1f32_nxv1i16( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f32.nxv1i16( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -9886,9 +10005,9 @@ define @test_vluxseg4_mask_nxv1f32_nxv1i8( @test_vluxseg4_mask_nxv1f32_nxv1i32( @test_vluxseg4_mask_nxv1f32_nxv1i16( @test_vluxseg5_mask_nxv1f32_nxv1i8( @test_vluxseg5_mask_nxv1f32_nxv1i32( @test_vluxseg5_mask_nxv1f32_nxv1i16( @test_vluxseg6_mask_nxv1f32_nxv1i8( @test_vluxseg6_mask_nxv1f32_nxv1i32( @test_vluxseg6_mask_nxv1f32_nxv1i16( @test_vluxseg7_mask_nxv1f32_nxv1i8( @test_vluxseg7_mask_nxv1f32_nxv1i32( @test_vluxseg7_mask_nxv1f32_nxv1i16( @test_vluxseg8_mask_nxv1f32_nxv1i8( @test_vluxseg8_mask_nxv1f32_nxv1i32( @test_vluxseg8_mask_nxv1f32_nxv1i16( @test_vluxseg2_mask_nxv8f16_nxv8i16(,} @llvm.riscv.vluxseg2.mask.nxv8f16.nxv8i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10442,6 +10562,7 @@ define @test_vluxseg2_mask_nxv8f16_nxv8i8(,} @llvm.riscv.vluxseg2.mask.nxv8f16.nxv8i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10471,6 +10592,7 @@ define @test_vluxseg2_mask_nxv8f16_nxv8i32(,} @llvm.riscv.vluxseg2.mask.nxv8f16.nxv8i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10497,11 +10619,12 @@ entry: define @test_vluxseg3_mask_nxv8f16_nxv8i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8f16.nxv8i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10528,11 +10651,12 @@ entry: define @test_vluxseg3_mask_nxv8f16_nxv8i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8f16.nxv8i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10560,9 +10684,10 @@ define @test_vluxseg3_mask_nxv8f16_nxv8i32(,,} @llvm.riscv.vluxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10590,9 +10715,9 @@ define @test_vluxseg4_mask_nxv8f16_nxv8i16( @test_vluxseg4_mask_nxv8f16_nxv8i8( @test_vluxseg4_mask_nxv8f16_nxv8i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8f16.nxv8i32( %val, %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -10690,6 +10816,7 @@ define @test_vluxseg2_mask_nxv8f32_nxv8i16(,} @llvm.riscv.vluxseg2.mask.nxv8f32.nxv8i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -10719,6 +10846,7 @@ define @test_vluxseg2_mask_nxv8f32_nxv8i8(,} @llvm.riscv.vluxseg2.mask.nxv8f32.nxv8i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -10748,6 +10876,7 @@ define @test_vluxseg2_mask_nxv8f32_nxv8i32(,} @llvm.riscv.vluxseg2.mask.nxv8f32.nxv8i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -10777,6 +10906,7 @@ define @test_vluxseg2_mask_nxv2f64_nxv2i32(,} @llvm.riscv.vluxseg2.mask.nxv2f64.nxv2i32( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10806,6 +10936,7 @@ define @test_vluxseg2_mask_nxv2f64_nxv2i8(,} @llvm.riscv.vluxseg2.mask.nxv2f64.nxv2i8( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10835,6 +10966,7 @@ define @test_vluxseg2_mask_nxv2f64_nxv2i16(,} @llvm.riscv.vluxseg2.mask.nxv2f64.nxv2i16( %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10861,11 +10993,12 @@ entry: define @test_vluxseg3_mask_nxv2f64_nxv2i32( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f64.nxv2i32( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10892,11 +11025,12 @@ entry: define @test_vluxseg3_mask_nxv2f64_nxv2i8( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f64.nxv2i8( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10923,11 +11057,12 @@ entry: define @test_vluxseg3_mask_nxv2f64_nxv2i16( %val, double* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f64.nxv2i16( %val, %val, %val, double* %base, %index, %mask, i32 %vl, i32 1) @@ -10955,9 +11090,9 @@ define @test_vluxseg4_mask_nxv2f64_nxv2i32( @test_vluxseg4_mask_nxv2f64_nxv2i8( @test_vluxseg4_mask_nxv2f64_nxv2i16( @test_vluxseg2_mask_nxv4f16_nxv4i16(,} @llvm.riscv.vluxseg2.mask.nxv4f16.nxv4i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11085,6 +11221,7 @@ define @test_vluxseg2_mask_nxv4f16_nxv4i8(,} @llvm.riscv.vluxseg2.mask.nxv4f16.nxv4i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11114,6 +11251,7 @@ define @test_vluxseg2_mask_nxv4f16_nxv4i32(,} @llvm.riscv.vluxseg2.mask.nxv4f16.nxv4i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11140,11 +11278,12 @@ entry: define @test_vluxseg3_mask_nxv4f16_nxv4i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f16.nxv4i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11171,11 +11310,12 @@ entry: define @test_vluxseg3_mask_nxv4f16_nxv4i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f16.nxv4i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11203,9 +11343,10 @@ define @test_vluxseg3_mask_nxv4f16_nxv4i32(,,} @llvm.riscv.vluxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11233,9 +11374,9 @@ define @test_vluxseg4_mask_nxv4f16_nxv4i16( @test_vluxseg4_mask_nxv4f16_nxv4i8( @test_vluxseg4_mask_nxv4f16_nxv4i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4f16.nxv4i32( %val, %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11331,10 +11473,10 @@ define @test_vluxseg5_mask_nxv4f16_nxv4i16( @test_vluxseg5_mask_nxv4f16_nxv4i8( @test_vluxseg5_mask_nxv4f16_nxv4i32( @test_vluxseg6_mask_nxv4f16_nxv4i16( @test_vluxseg6_mask_nxv4f16_nxv4i8( @test_vluxseg6_mask_nxv4f16_nxv4i32( @test_vluxseg7_mask_nxv4f16_nxv4i16( @test_vluxseg7_mask_nxv4f16_nxv4i8( @test_vluxseg7_mask_nxv4f16_nxv4i32( @test_vluxseg8_mask_nxv4f16_nxv4i16( @test_vluxseg8_mask_nxv4f16_nxv4i8( @test_vluxseg8_mask_nxv4f16_nxv4i32( @test_vluxseg2_mask_nxv2f16_nxv2i32(,} @llvm.riscv.vluxseg2.mask.nxv2f16.nxv2i32( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11788,6 +11931,7 @@ define @test_vluxseg2_mask_nxv2f16_nxv2i8(,} @llvm.riscv.vluxseg2.mask.nxv2f16.nxv2i8( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11817,6 +11961,7 @@ define @test_vluxseg2_mask_nxv2f16_nxv2i16(,} @llvm.riscv.vluxseg2.mask.nxv2f16.nxv2i16( %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11843,11 +11988,12 @@ entry: define @test_vluxseg3_mask_nxv2f16_nxv2i32( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f16.nxv2i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11874,11 +12020,12 @@ entry: define @test_vluxseg3_mask_nxv2f16_nxv2i8( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f16.nxv2i8( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11905,11 +12052,12 @@ entry: define @test_vluxseg3_mask_nxv2f16_nxv2i16( %val, half* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f16.nxv2i16( %val, %val, %val, half* %base, %index, %mask, i32 %vl, i32 1) @@ -11937,9 +12085,9 @@ define @test_vluxseg4_mask_nxv2f16_nxv2i32( @test_vluxseg4_mask_nxv2f16_nxv2i8( @test_vluxseg4_mask_nxv2f16_nxv2i16( @test_vluxseg5_mask_nxv2f16_nxv2i32( @test_vluxseg5_mask_nxv2f16_nxv2i8( @test_vluxseg5_mask_nxv2f16_nxv2i16( @test_vluxseg6_mask_nxv2f16_nxv2i32( @test_vluxseg6_mask_nxv2f16_nxv2i8( @test_vluxseg6_mask_nxv2f16_nxv2i16( @test_vluxseg7_mask_nxv2f16_nxv2i32( @test_vluxseg7_mask_nxv2f16_nxv2i8( @test_vluxseg7_mask_nxv2f16_nxv2i16( @test_vluxseg8_mask_nxv2f16_nxv2i32( @test_vluxseg8_mask_nxv2f16_nxv2i8( @test_vluxseg8_mask_nxv2f16_nxv2i16( @test_vluxseg2_mask_nxv4f32_nxv4i16(,} @llvm.riscv.vluxseg2.mask.nxv4f32.nxv4i16( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12493,6 +12642,7 @@ define @test_vluxseg2_mask_nxv4f32_nxv4i8(,} @llvm.riscv.vluxseg2.mask.nxv4f32.nxv4i8( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12522,6 +12672,7 @@ define @test_vluxseg2_mask_nxv4f32_nxv4i32(,} @llvm.riscv.vluxseg2.mask.nxv4f32.nxv4i32( %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12548,11 +12699,12 @@ entry: define @test_vluxseg3_mask_nxv4f32_nxv4i16( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f32.nxv4i16( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12579,11 +12731,12 @@ entry: define @test_vluxseg3_mask_nxv4f32_nxv4i8( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f32.nxv4i8( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12610,11 +12763,12 @@ entry: define @test_vluxseg3_mask_nxv4f32_nxv4i32( %val, float* %base, %index, i32 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f32.nxv4i32( %val, %val, %val, float* %base, %index, %mask, i32 %vl, i32 1) @@ -12642,9 +12796,9 @@ define @test_vluxseg4_mask_nxv4f32_nxv4i16( @test_vluxseg4_mask_nxv4f32_nxv4i8( @test_vluxseg4_mask_nxv4f32_nxv4i32( @test_vluxseg2_mask_nxv16i16_nxv16i16(,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -53,6 +54,7 @@ define @test_vluxseg2_mask_nxv16i16_nxv16i8(,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -82,6 +84,7 @@ define @test_vluxseg2_mask_nxv16i16_nxv16i32(,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -111,6 +114,7 @@ define @test_vluxseg2_mask_nxv4i32_nxv4i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i32.nxv4i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -140,6 +144,7 @@ define @test_vluxseg2_mask_nxv4i32_nxv4i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i32.nxv4i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -169,6 +174,7 @@ define @test_vluxseg2_mask_nxv4i32_nxv4i64( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i32.nxv4i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -198,6 +204,7 @@ define @test_vluxseg2_mask_nxv4i32_nxv4i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i32.nxv4i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -224,11 +231,12 @@ entry: define @test_vluxseg3_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i32.nxv4i32( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -255,11 +263,12 @@ entry: define @test_vluxseg3_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i32.nxv4i8( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -287,9 +296,10 @@ define @test_vluxseg3_mask_nxv4i32_nxv4i64( ; CHECK-LABEL: test_vluxseg3_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i32.nxv4i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -316,11 +326,12 @@ entry: define @test_vluxseg3_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i32.nxv4i16( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -348,9 +359,9 @@ define @test_vluxseg4_mask_nxv4i32_nxv4i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -381,9 +392,9 @@ define @test_vluxseg4_mask_nxv4i32_nxv4i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -413,12 +424,13 @@ entry: define @test_vluxseg4_mask_nxv4i32_nxv4i64( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg4ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4i32.nxv4i64( %val, %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -446,9 +458,9 @@ define @test_vluxseg4_mask_nxv4i32_nxv4i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -481,6 +493,7 @@ define @test_vluxseg2_mask_nxv16i8_nxv16i16(,} @llvm.riscv.vluxseg2.mask.nxv16i8.nxv16i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -510,6 +523,7 @@ define @test_vluxseg2_mask_nxv16i8_nxv16i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv16i8.nxv16i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -539,6 +553,7 @@ define @test_vluxseg2_mask_nxv16i8_nxv16i32(,} @llvm.riscv.vluxseg2.mask.nxv16i8.nxv16i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -566,9 +581,10 @@ define @test_vluxseg3_mask_nxv16i8_nxv16i16(,,} @llvm.riscv.vluxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -595,11 +611,12 @@ entry: define @test_vluxseg3_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv16i8.nxv16i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -627,9 +644,10 @@ define @test_vluxseg3_mask_nxv16i8_nxv16i32(,,} @llvm.riscv.vluxseg3.mask.nxv16i8.nxv16i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -656,12 +674,13 @@ entry: define @test_vluxseg4_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vluxseg4ei16.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei16.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv16i8.nxv16i16( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -689,9 +708,9 @@ define @test_vluxseg4_mask_nxv16i8_nxv16i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -722,10 +741,11 @@ define @test_vluxseg4_mask_nxv16i8_nxv16i32(,,,} @llvm.riscv.vluxseg4.mask.nxv16i8.nxv16i32( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -755,6 +775,7 @@ define @test_vluxseg2_mask_nxv1i64_nxv1i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i64.nxv1i64( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -784,6 +805,7 @@ define @test_vluxseg2_mask_nxv1i64_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i64.nxv1i32( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -813,6 +835,7 @@ define @test_vluxseg2_mask_nxv1i64_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i64.nxv1i16( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -842,6 +865,7 @@ define @test_vluxseg2_mask_nxv1i64_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i64.nxv1i8( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -868,11 +892,12 @@ entry: define @test_vluxseg3_mask_nxv1i64_nxv1i64( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i64.nxv1i64( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -899,11 +924,12 @@ entry: define @test_vluxseg3_mask_nxv1i64_nxv1i32( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i64.nxv1i32( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -930,11 +956,12 @@ entry: define @test_vluxseg3_mask_nxv1i64_nxv1i16( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i64.nxv1i16( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -961,11 +988,12 @@ entry: define @test_vluxseg3_mask_nxv1i64_nxv1i8( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i64.nxv1i8( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -993,9 +1021,9 @@ define @test_vluxseg4_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1026,9 +1054,9 @@ define @test_vluxseg4_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1059,9 +1087,9 @@ define @test_vluxseg4_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1092,9 +1120,9 @@ define @test_vluxseg4_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1125,10 +1153,10 @@ define @test_vluxseg5_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1159,10 +1187,10 @@ define @test_vluxseg5_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1193,10 +1221,10 @@ define @test_vluxseg5_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1227,10 +1255,10 @@ define @test_vluxseg5_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1261,11 +1289,11 @@ define @test_vluxseg6_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1296,11 +1324,11 @@ define @test_vluxseg6_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1331,11 +1359,11 @@ define @test_vluxseg6_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1366,11 +1394,11 @@ define @test_vluxseg6_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1401,12 +1429,12 @@ define @test_vluxseg7_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1437,12 +1465,12 @@ define @test_vluxseg7_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1473,12 +1501,12 @@ define @test_vluxseg7_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1509,12 +1537,12 @@ define @test_vluxseg7_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1545,13 +1573,13 @@ define @test_vluxseg8_mask_nxv1i64_nxv1i64( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1582,13 +1610,13 @@ define @test_vluxseg8_mask_nxv1i64_nxv1i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1619,13 +1647,13 @@ define @test_vluxseg8_mask_nxv1i64_nxv1i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1656,13 +1684,13 @@ define @test_vluxseg8_mask_nxv1i64_nxv1i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1695,6 +1723,7 @@ define @test_vluxseg2_mask_nxv1i32_nxv1i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i32.nxv1i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1724,6 +1753,7 @@ define @test_vluxseg2_mask_nxv1i32_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i32.nxv1i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1753,6 +1783,7 @@ define @test_vluxseg2_mask_nxv1i32_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i32.nxv1i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1782,6 +1813,7 @@ define @test_vluxseg2_mask_nxv1i32_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i32.nxv1i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1808,11 +1840,12 @@ entry: define @test_vluxseg3_mask_nxv1i32_nxv1i64( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i32.nxv1i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1839,11 +1872,12 @@ entry: define @test_vluxseg3_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i32.nxv1i32( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1870,11 +1904,12 @@ entry: define @test_vluxseg3_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i32.nxv1i16( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1901,11 +1936,12 @@ entry: define @test_vluxseg3_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i32.nxv1i8( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -1933,9 +1969,9 @@ define @test_vluxseg4_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1966,9 +2002,9 @@ define @test_vluxseg4_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -1999,9 +2035,9 @@ define @test_vluxseg4_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2032,9 +2068,9 @@ define @test_vluxseg4_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2065,10 +2101,10 @@ define @test_vluxseg5_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2099,10 +2135,10 @@ define @test_vluxseg5_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2133,10 +2169,10 @@ define @test_vluxseg5_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2167,10 +2203,10 @@ define @test_vluxseg5_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2201,11 +2237,11 @@ define @test_vluxseg6_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2236,11 +2272,11 @@ define @test_vluxseg6_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2271,11 +2307,11 @@ define @test_vluxseg6_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2306,11 +2342,11 @@ define @test_vluxseg6_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2341,12 +2377,12 @@ define @test_vluxseg7_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2377,12 +2413,12 @@ define @test_vluxseg7_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2413,12 +2449,12 @@ define @test_vluxseg7_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2449,12 +2485,12 @@ define @test_vluxseg7_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2485,13 +2521,13 @@ define @test_vluxseg8_mask_nxv1i32_nxv1i64( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2522,13 +2558,13 @@ define @test_vluxseg8_mask_nxv1i32_nxv1i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2559,13 +2595,13 @@ define @test_vluxseg8_mask_nxv1i32_nxv1i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2596,13 +2632,13 @@ define @test_vluxseg8_mask_nxv1i32_nxv1i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -2635,6 +2671,7 @@ define @test_vluxseg2_mask_nxv8i16_nxv8i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i16.nxv8i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2664,6 +2701,7 @@ define @test_vluxseg2_mask_nxv8i16_nxv8i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i16.nxv8i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2693,6 +2731,7 @@ define @test_vluxseg2_mask_nxv8i16_nxv8i64( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i16.nxv8i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2722,6 +2761,7 @@ define @test_vluxseg2_mask_nxv8i16_nxv8i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i16.nxv8i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2748,11 +2788,12 @@ entry: define @test_vluxseg3_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i16.nxv8i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2779,11 +2820,12 @@ entry: define @test_vluxseg3_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i16.nxv8i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2811,9 +2853,10 @@ define @test_vluxseg3_mask_nxv8i16_nxv8i64( ; CHECK-LABEL: test_vluxseg3_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i16.nxv8i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2841,9 +2884,10 @@ define @test_vluxseg3_mask_nxv8i16_nxv8i32( ; CHECK-LABEL: test_vluxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2871,9 +2915,9 @@ define @test_vluxseg4_mask_nxv8i16_nxv8i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -2904,9 +2948,9 @@ define @test_vluxseg4_mask_nxv8i16_nxv8i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -2937,10 +2981,11 @@ define @test_vluxseg4_mask_nxv8i16_nxv8i64( ; CHECK-LABEL: test_vluxseg4_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v10, v6 +; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i16.nxv8i64( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -2967,12 +3012,13 @@ entry: define @test_vluxseg4_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i16.nxv8i32( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -3002,6 +3048,7 @@ define @test_vluxseg2_mask_nxv4i8_nxv4i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i8.nxv4i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3031,6 +3078,7 @@ define @test_vluxseg2_mask_nxv4i8_nxv4i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i8.nxv4i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3060,6 +3108,7 @@ define @test_vluxseg2_mask_nxv4i8_nxv4i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i8.nxv4i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3089,6 +3138,7 @@ define @test_vluxseg2_mask_nxv4i8_nxv4i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i8.nxv4i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3116,9 +3166,10 @@ define @test_vluxseg3_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3145,11 +3196,12 @@ entry: define @test_vluxseg3_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i8.nxv4i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3177,9 +3229,10 @@ define @test_vluxseg3_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i8.nxv4i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3206,11 +3259,12 @@ entry: define @test_vluxseg3_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i8.nxv4i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3237,12 +3291,13 @@ entry: define @test_vluxseg4_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4i8.nxv4i32( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3270,9 +3325,9 @@ define @test_vluxseg4_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3303,10 +3358,11 @@ define @test_vluxseg4_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3334,9 +3390,9 @@ define @test_vluxseg4_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3367,10 +3423,10 @@ define @test_vluxseg5_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3401,10 +3457,10 @@ define @test_vluxseg5_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3435,11 +3491,12 @@ define @test_vluxseg5_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vluxseg5.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3467,10 +3524,10 @@ define @test_vluxseg5_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3501,11 +3558,11 @@ define @test_vluxseg6_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3536,11 +3593,11 @@ define @test_vluxseg6_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3570,14 +3627,15 @@ entry: define @test_vluxseg6_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg6_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vluxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: vluxseg6ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vluxseg6.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -3605,11 +3663,11 @@ define @test_vluxseg6_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3640,12 +3698,12 @@ define @test_vluxseg7_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3676,12 +3734,12 @@ define @test_vluxseg7_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3712,12 +3770,12 @@ define @test_vluxseg7_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -3748,12 +3806,12 @@ define @test_vluxseg7_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3784,13 +3842,13 @@ define @test_vluxseg8_mask_nxv4i8_nxv4i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -3821,13 +3879,13 @@ define @test_vluxseg8_mask_nxv4i8_nxv4i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3858,13 +3916,13 @@ define @test_vluxseg8_mask_nxv4i8_nxv4i64( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -3895,13 +3953,13 @@ define @test_vluxseg8_mask_nxv4i8_nxv4i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -3934,6 +3992,7 @@ define @test_vluxseg2_mask_nxv1i16_nxv1i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i16.nxv1i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -3963,6 +4022,7 @@ define @test_vluxseg2_mask_nxv1i16_nxv1i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i16.nxv1i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -3992,6 +4052,7 @@ define @test_vluxseg2_mask_nxv1i16_nxv1i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i16.nxv1i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4021,6 +4082,7 @@ define @test_vluxseg2_mask_nxv1i16_nxv1i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i16.nxv1i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4047,11 +4109,12 @@ entry: define @test_vluxseg3_mask_nxv1i16_nxv1i64( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i16.nxv1i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4078,11 +4141,12 @@ entry: define @test_vluxseg3_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i16.nxv1i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4109,11 +4173,12 @@ entry: define @test_vluxseg3_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i16.nxv1i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4140,11 +4205,12 @@ entry: define @test_vluxseg3_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i16.nxv1i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -4172,9 +4238,9 @@ define @test_vluxseg4_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4205,9 +4271,9 @@ define @test_vluxseg4_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4238,9 +4304,9 @@ define @test_vluxseg4_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4271,9 +4337,9 @@ define @test_vluxseg4_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4304,10 +4370,10 @@ define @test_vluxseg5_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4338,10 +4404,10 @@ define @test_vluxseg5_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4372,10 +4438,10 @@ define @test_vluxseg5_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4406,10 +4472,10 @@ define @test_vluxseg5_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4440,11 +4506,11 @@ define @test_vluxseg6_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4475,11 +4541,11 @@ define @test_vluxseg6_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4510,11 +4576,11 @@ define @test_vluxseg6_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4545,11 +4611,11 @@ define @test_vluxseg6_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4580,12 +4646,12 @@ define @test_vluxseg7_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4616,12 +4682,12 @@ define @test_vluxseg7_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4652,12 +4718,12 @@ define @test_vluxseg7_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4688,12 +4754,12 @@ define @test_vluxseg7_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4724,13 +4790,13 @@ define @test_vluxseg8_mask_nxv1i16_nxv1i64( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4761,13 +4827,13 @@ define @test_vluxseg8_mask_nxv1i16_nxv1i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4798,13 +4864,13 @@ define @test_vluxseg8_mask_nxv1i16_nxv1i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4835,13 +4901,13 @@ define @test_vluxseg8_mask_nxv1i16_nxv1i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -4874,6 +4940,7 @@ define @test_vluxseg2_mask_nxv2i32_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i32.nxv2i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4903,6 +4970,7 @@ define @test_vluxseg2_mask_nxv2i32_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i32.nxv2i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4932,6 +5000,7 @@ define @test_vluxseg2_mask_nxv2i32_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i32.nxv2i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4961,6 +5030,7 @@ define @test_vluxseg2_mask_nxv2i32_nxv2i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i32.nxv2i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -4987,11 +5057,12 @@ entry: define @test_vluxseg3_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i32.nxv2i32( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5018,11 +5089,12 @@ entry: define @test_vluxseg3_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i32.nxv2i8( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5049,11 +5121,12 @@ entry: define @test_vluxseg3_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i32.nxv2i16( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5081,9 +5154,10 @@ define @test_vluxseg3_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vluxseg3_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i32.nxv2i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5111,9 +5185,9 @@ define @test_vluxseg4_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5144,9 +5218,9 @@ define @test_vluxseg4_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5177,9 +5251,9 @@ define @test_vluxseg4_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5209,12 +5283,13 @@ entry: define @test_vluxseg4_mask_nxv2i32_nxv2i64( %val, i32* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv2i32.nxv2i64( %val, %val, %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -5242,10 +5317,10 @@ define @test_vluxseg5_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5276,10 +5351,10 @@ define @test_vluxseg5_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5310,10 +5385,10 @@ define @test_vluxseg5_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5344,10 +5419,10 @@ define @test_vluxseg5_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5378,11 +5453,11 @@ define @test_vluxseg6_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5413,11 +5488,11 @@ define @test_vluxseg6_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5448,11 +5523,11 @@ define @test_vluxseg6_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5483,11 +5558,11 @@ define @test_vluxseg6_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5518,12 +5593,12 @@ define @test_vluxseg7_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5554,12 +5629,12 @@ define @test_vluxseg7_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5590,12 +5665,12 @@ define @test_vluxseg7_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5626,12 +5701,12 @@ define @test_vluxseg7_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5662,13 +5737,13 @@ define @test_vluxseg8_mask_nxv2i32_nxv2i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5699,13 +5774,13 @@ define @test_vluxseg8_mask_nxv2i32_nxv2i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5736,13 +5811,13 @@ define @test_vluxseg8_mask_nxv2i32_nxv2i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -5773,13 +5848,13 @@ define @test_vluxseg8_mask_nxv2i32_nxv2i64( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -5812,6 +5887,7 @@ define @test_vluxseg2_mask_nxv8i8_nxv8i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i8.nxv8i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5841,6 +5917,7 @@ define @test_vluxseg2_mask_nxv8i8_nxv8i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i8.nxv8i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5870,6 +5947,7 @@ define @test_vluxseg2_mask_nxv8i8_nxv8i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i8.nxv8i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5899,6 +5977,7 @@ define @test_vluxseg2_mask_nxv8i8_nxv8i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i8.nxv8i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5926,9 +6005,10 @@ define @test_vluxseg3_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5955,11 +6035,12 @@ entry: define @test_vluxseg3_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i8.nxv8i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -5987,9 +6068,10 @@ define @test_vluxseg3_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i8.nxv8i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6017,9 +6099,10 @@ define @test_vluxseg3_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8i8.nxv8i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6046,12 +6129,13 @@ entry: define @test_vluxseg4_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vluxseg4ei16.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i8.nxv8i16( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6079,9 +6163,9 @@ define @test_vluxseg4_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6112,10 +6196,11 @@ define @test_vluxseg4_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6143,10 +6228,11 @@ define @test_vluxseg4_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6174,10 +6260,10 @@ define @test_vluxseg5_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6208,10 +6294,10 @@ define @test_vluxseg5_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6242,11 +6328,12 @@ define @test_vluxseg5_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vluxseg5.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6274,11 +6361,12 @@ define @test_vluxseg5_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vluxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6306,11 +6394,11 @@ define @test_vluxseg6_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6341,11 +6429,11 @@ define @test_vluxseg6_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6376,12 +6464,13 @@ define @test_vluxseg6_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vluxseg6.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6408,14 +6497,15 @@ entry: define @test_vluxseg6_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vluxseg6ei32.v v7, (a0), v16, v0.t +; CHECK-NEXT: vluxseg6ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vluxseg6.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6443,12 +6533,12 @@ define @test_vluxseg7_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6479,12 +6569,12 @@ define @test_vluxseg7_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6515,13 +6605,14 @@ define @test_vluxseg7_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vluxseg7.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6549,12 +6640,12 @@ define @test_vluxseg7_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -6585,13 +6676,13 @@ define @test_vluxseg8_mask_nxv8i8_nxv8i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -6622,13 +6713,13 @@ define @test_vluxseg8_mask_nxv8i8_nxv8i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -6659,14 +6750,15 @@ define @test_vluxseg8_mask_nxv8i8_nxv8i64( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 +; CHECK-NEXT: vmv1r.v v12, v7 +; CHECK-NEXT: vmv1r.v v13, v7 +; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vluxseg8.mask.nxv8i8.nxv8i64( %val, %val, %val, %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -6694,13 +6786,13 @@ define @test_vluxseg8_mask_nxv8i8_nxv8i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -6733,6 +6825,7 @@ define @test_vluxseg2_mask_nxv4i64_nxv4i32( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i64.nxv4i32( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6762,6 +6855,7 @@ define @test_vluxseg2_mask_nxv4i64_nxv4i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i64.nxv4i8( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6791,6 +6885,7 @@ define @test_vluxseg2_mask_nxv4i64_nxv4i64( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i64.nxv4i64( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6820,6 +6915,7 @@ define @test_vluxseg2_mask_nxv4i64_nxv4i16( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i64.nxv4i16( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -6849,6 +6945,7 @@ define @test_vluxseg2_mask_nxv4i16_nxv4i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i16.nxv4i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6878,6 +6975,7 @@ define @test_vluxseg2_mask_nxv4i16_nxv4i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i16.nxv4i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6907,6 +7005,7 @@ define @test_vluxseg2_mask_nxv4i16_nxv4i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i16.nxv4i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6936,6 +7035,7 @@ define @test_vluxseg2_mask_nxv4i16_nxv4i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv4i16.nxv4i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6963,9 +7063,10 @@ define @test_vluxseg3_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -6992,11 +7093,12 @@ entry: define @test_vluxseg3_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i16.nxv4i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7024,9 +7126,10 @@ define @test_vluxseg3_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vluxseg3_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i16.nxv4i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7053,11 +7156,12 @@ entry: define @test_vluxseg3_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4i16.nxv4i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7084,12 +7188,13 @@ entry: define @test_vluxseg4_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4i16.nxv4i32( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7117,9 +7222,9 @@ define @test_vluxseg4_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7150,10 +7255,11 @@ define @test_vluxseg4_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7181,9 +7287,9 @@ define @test_vluxseg4_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7214,10 +7320,10 @@ define @test_vluxseg5_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7248,10 +7354,10 @@ define @test_vluxseg5_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7282,11 +7388,12 @@ define @test_vluxseg5_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vluxseg5_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 +; CHECK-NEXT: vmv1r.v v9, v7 +; CHECK-NEXT: vmv1r.v v10, v7 +; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vluxseg5.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7314,10 +7421,10 @@ define @test_vluxseg5_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7348,11 +7455,11 @@ define @test_vluxseg6_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7383,11 +7490,11 @@ define @test_vluxseg6_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7417,14 +7524,15 @@ entry: define @test_vluxseg6_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg6_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: vluxseg6ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vluxseg6.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -7452,11 +7560,11 @@ define @test_vluxseg6_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7487,12 +7595,12 @@ define @test_vluxseg7_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7523,12 +7631,12 @@ define @test_vluxseg7_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7559,12 +7667,12 @@ define @test_vluxseg7_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vluxseg7_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -7595,12 +7703,12 @@ define @test_vluxseg7_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7631,13 +7739,13 @@ define @test_vluxseg8_mask_nxv4i16_nxv4i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -7668,13 +7776,13 @@ define @test_vluxseg8_mask_nxv4i16_nxv4i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7705,13 +7813,13 @@ define @test_vluxseg8_mask_nxv4i16_nxv4i64( ; CHECK-LABEL: test_vluxseg8_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: vmv1r.v v8, v17 @@ -7742,13 +7850,13 @@ define @test_vluxseg8_mask_nxv4i16_nxv4i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -7781,6 +7889,7 @@ define @test_vluxseg2_mask_nxv1i8_nxv1i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i8.nxv1i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7810,6 +7919,7 @@ define @test_vluxseg2_mask_nxv1i8_nxv1i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i8.nxv1i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7839,6 +7949,7 @@ define @test_vluxseg2_mask_nxv1i8_nxv1i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i8.nxv1i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7868,6 +7979,7 @@ define @test_vluxseg2_mask_nxv1i8_nxv1i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv1i8.nxv1i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7894,11 +8006,12 @@ entry: define @test_vluxseg3_mask_nxv1i8_nxv1i64( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i8.nxv1i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7925,11 +8038,12 @@ entry: define @test_vluxseg3_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i8.nxv1i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7956,11 +8070,12 @@ entry: define @test_vluxseg3_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i8.nxv1i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -7987,11 +8102,12 @@ entry: define @test_vluxseg3_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1i8.nxv1i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8019,9 +8135,9 @@ define @test_vluxseg4_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8052,9 +8168,9 @@ define @test_vluxseg4_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8085,9 +8201,9 @@ define @test_vluxseg4_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8118,9 +8234,9 @@ define @test_vluxseg4_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8151,10 +8267,10 @@ define @test_vluxseg5_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8185,10 +8301,10 @@ define @test_vluxseg5_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8219,10 +8335,10 @@ define @test_vluxseg5_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8253,10 +8369,10 @@ define @test_vluxseg5_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8287,11 +8403,11 @@ define @test_vluxseg6_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8322,11 +8438,11 @@ define @test_vluxseg6_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8357,11 +8473,11 @@ define @test_vluxseg6_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8392,11 +8508,11 @@ define @test_vluxseg6_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8427,12 +8543,12 @@ define @test_vluxseg7_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8463,12 +8579,12 @@ define @test_vluxseg7_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8499,12 +8615,12 @@ define @test_vluxseg7_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8535,12 +8651,12 @@ define @test_vluxseg7_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8571,13 +8687,13 @@ define @test_vluxseg8_mask_nxv1i8_nxv1i64( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8608,13 +8724,13 @@ define @test_vluxseg8_mask_nxv1i8_nxv1i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8645,13 +8761,13 @@ define @test_vluxseg8_mask_nxv1i8_nxv1i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8682,13 +8798,13 @@ define @test_vluxseg8_mask_nxv1i8_nxv1i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8721,6 +8837,7 @@ define @test_vluxseg2_mask_nxv2i8_nxv2i32( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i8.nxv2i32( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8750,6 +8867,7 @@ define @test_vluxseg2_mask_nxv2i8_nxv2i8( %va ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i8.nxv2i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8779,6 +8897,7 @@ define @test_vluxseg2_mask_nxv2i8_nxv2i16( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i8.nxv2i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8808,6 +8927,7 @@ define @test_vluxseg2_mask_nxv2i8_nxv2i64( %v ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i8.nxv2i64( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8834,11 +8954,12 @@ entry: define @test_vluxseg3_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i8.nxv2i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8865,11 +8986,12 @@ entry: define @test_vluxseg3_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i8.nxv2i8( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8896,11 +9018,12 @@ entry: define @test_vluxseg3_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i8.nxv2i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8928,9 +9051,10 @@ define @test_vluxseg3_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vluxseg3_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i8.nxv2i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -8958,9 +9082,9 @@ define @test_vluxseg4_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -8991,9 +9115,9 @@ define @test_vluxseg4_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9024,9 +9148,9 @@ define @test_vluxseg4_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9056,12 +9180,13 @@ entry: define @test_vluxseg4_mask_nxv2i8_nxv2i64( %val, i8* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv2i8.nxv2i64( %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -9089,10 +9214,10 @@ define @test_vluxseg5_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9123,10 +9248,10 @@ define @test_vluxseg5_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9157,10 +9282,10 @@ define @test_vluxseg5_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9191,10 +9316,10 @@ define @test_vluxseg5_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vluxseg5_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9225,11 +9350,11 @@ define @test_vluxseg6_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9260,11 +9385,11 @@ define @test_vluxseg6_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9295,11 +9420,11 @@ define @test_vluxseg6_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9330,11 +9455,11 @@ define @test_vluxseg6_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vluxseg6_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9365,12 +9490,12 @@ define @test_vluxseg7_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9401,12 +9526,12 @@ define @test_vluxseg7_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9437,12 +9562,12 @@ define @test_vluxseg7_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9473,12 +9598,12 @@ define @test_vluxseg7_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vluxseg7_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9509,13 +9634,13 @@ define @test_vluxseg8_mask_nxv2i8_nxv2i32( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9546,13 +9671,13 @@ define @test_vluxseg8_mask_nxv2i8_nxv2i8( %va ; CHECK-LABEL: test_vluxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9583,13 +9708,13 @@ define @test_vluxseg8_mask_nxv2i8_nxv2i16( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -9620,13 +9745,13 @@ define @test_vluxseg8_mask_nxv2i8_nxv2i64( %v ; CHECK-LABEL: test_vluxseg8_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -9659,6 +9784,7 @@ define @test_vluxseg2_mask_nxv8i32_nxv8i16( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i32.nxv8i16( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9688,6 +9814,7 @@ define @test_vluxseg2_mask_nxv8i32_nxv8i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i32.nxv8i8( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9717,6 +9844,7 @@ define @test_vluxseg2_mask_nxv8i32_nxv8i64( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v4, (a0), v16, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i32.nxv8i64( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9746,6 +9874,7 @@ define @test_vluxseg2_mask_nxv8i32_nxv8i32( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv8i32.nxv8i32( %val, %val, i32* %base, %index, %mask, i64 %vl, i64 1) @@ -9775,6 +9904,7 @@ define @test_vluxseg2_mask_nxv32i8_nxv32i16(,} @llvm.riscv.vluxseg2.mask.nxv32i8.nxv32i16( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -9804,6 +9934,7 @@ define @test_vluxseg2_mask_nxv32i8_nxv32i8( ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v4, (a0), v12, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv32i8.nxv32i8( %val, %val, i8* %base, %index, %mask, i64 %vl, i64 1) @@ -9833,6 +9964,7 @@ define @test_vluxseg2_mask_nxv2i16_nxv2i32( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i16.nxv2i32( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9862,6 +9994,7 @@ define @test_vluxseg2_mask_nxv2i16_nxv2i8( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i16.nxv2i8( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9891,6 +10024,7 @@ define @test_vluxseg2_mask_nxv2i16_nxv2i16( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v7, (a0), v9, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i16.nxv2i16( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9920,6 +10054,7 @@ define @test_vluxseg2_mask_nxv2i16_nxv2i64( ; CHECK-NEXT: vmv1r.v v7, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i16.nxv2i64( %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9946,11 +10081,12 @@ entry: define @test_vluxseg3_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i16.nxv2i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -9977,11 +10113,12 @@ entry: define @test_vluxseg3_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i16.nxv2i8( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10008,11 +10145,12 @@ entry: define @test_vluxseg3_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i16.nxv2i16( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10040,9 +10178,10 @@ define @test_vluxseg3_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vluxseg3_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i16.nxv2i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10070,9 +10209,9 @@ define @test_vluxseg4_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10103,9 +10242,9 @@ define @test_vluxseg4_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10136,9 +10275,9 @@ define @test_vluxseg4_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10168,12 +10307,13 @@ entry: define @test_vluxseg4_mask_nxv2i16_nxv2i64( %val, i16* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv2i16.nxv2i64( %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl, i64 1) @@ -10201,10 +10341,10 @@ define @test_vluxseg5_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10235,10 +10375,10 @@ define @test_vluxseg5_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10269,10 +10409,10 @@ define @test_vluxseg5_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10303,10 +10443,10 @@ define @test_vluxseg5_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vluxseg5_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10337,11 +10477,11 @@ define @test_vluxseg6_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10372,11 +10512,11 @@ define @test_vluxseg6_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10407,11 +10547,11 @@ define @test_vluxseg6_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10442,11 +10582,11 @@ define @test_vluxseg6_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vluxseg6_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10477,12 +10617,12 @@ define @test_vluxseg7_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10513,12 +10653,12 @@ define @test_vluxseg7_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10549,12 +10689,12 @@ define @test_vluxseg7_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10585,12 +10725,12 @@ define @test_vluxseg7_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vluxseg7_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10621,13 +10761,13 @@ define @test_vluxseg8_mask_nxv2i16_nxv2i32( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10658,13 +10798,13 @@ define @test_vluxseg8_mask_nxv2i16_nxv2i8( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10695,13 +10835,13 @@ define @test_vluxseg8_mask_nxv2i16_nxv2i16( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: vmv1r.v v8, v11 @@ -10732,13 +10872,13 @@ define @test_vluxseg8_mask_nxv2i16_nxv2i64( ; CHECK-LABEL: test_vluxseg8_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vluxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv1r.v v8, v13 @@ -10771,6 +10911,7 @@ define @test_vluxseg2_mask_nxv2i64_nxv2i32( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg2ei32.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i64.nxv2i32( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10800,6 +10941,7 @@ define @test_vluxseg2_mask_nxv2i64_nxv2i8( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg2ei8.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i64.nxv2i8( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10829,6 +10971,7 @@ define @test_vluxseg2_mask_nxv2i64_nxv2i16( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i64.nxv2i16( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10858,6 +11001,7 @@ define @test_vluxseg2_mask_nxv2i64_nxv2i64( ; CHECK-NEXT: vmv2r.v v6, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg2ei64.v v6, (a0), v10, v0.t +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv2i64.nxv2i64( %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10884,11 +11028,12 @@ entry: define @test_vluxseg3_mask_nxv2i64_nxv2i32( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i64.nxv2i32( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10915,11 +11060,12 @@ entry: define @test_vluxseg3_mask_nxv2i64_nxv2i8( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i64.nxv2i8( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10946,11 +11092,12 @@ entry: define @test_vluxseg3_mask_nxv2i64_nxv2i16( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i64.nxv2i16( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -10977,11 +11124,12 @@ entry: define @test_vluxseg3_mask_nxv2i64_nxv2i64( %val, i64* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2i64.nxv2i64( %val, %val, %val, i64* %base, %index, %mask, i64 %vl, i64 1) @@ -11009,9 +11157,9 @@ define @test_vluxseg4_mask_nxv2i64_nxv2i32( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11042,9 +11190,9 @@ define @test_vluxseg4_mask_nxv2i64_nxv2i8( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11075,9 +11223,9 @@ define @test_vluxseg4_mask_nxv2i64_nxv2i16( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11108,9 +11256,9 @@ define @test_vluxseg4_mask_nxv2i64_nxv2i64( ; CHECK-LABEL: test_vluxseg4_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vluxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: vmv2r.v v8, v14 @@ -11143,6 +11291,7 @@ define @test_vluxseg2_mask_nxv16f16_nxv16i16(,} @llvm.riscv.vluxseg2.mask.nxv16f16.nxv16i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -11172,6 +11321,7 @@ define @test_vluxseg2_mask_nxv16f16_nxv16i8(,} @llvm.riscv.vluxseg2.mask.nxv16f16.nxv16i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -11201,6 +11351,7 @@ define @test_vluxseg2_mask_nxv16f16_nxv16i32(,} @llvm.riscv.vluxseg2.mask.nxv16f16.nxv16i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -11230,6 +11381,7 @@ define @test_vluxseg2_mask_nxv4f64_nxv4i32(,} @llvm.riscv.vluxseg2.mask.nxv4f64.nxv4i32( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11259,6 +11411,7 @@ define @test_vluxseg2_mask_nxv4f64_nxv4i8(,} @llvm.riscv.vluxseg2.mask.nxv4f64.nxv4i8( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11288,6 +11441,7 @@ define @test_vluxseg2_mask_nxv4f64_nxv4i64(,} @llvm.riscv.vluxseg2.mask.nxv4f64.nxv4i64( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11317,6 +11471,7 @@ define @test_vluxseg2_mask_nxv4f64_nxv4i16(,} @llvm.riscv.vluxseg2.mask.nxv4f64.nxv4i16( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11346,6 +11501,7 @@ define @test_vluxseg2_mask_nxv1f64_nxv1i64(,} @llvm.riscv.vluxseg2.mask.nxv1f64.nxv1i64( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11375,6 +11531,7 @@ define @test_vluxseg2_mask_nxv1f64_nxv1i32(,} @llvm.riscv.vluxseg2.mask.nxv1f64.nxv1i32( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11404,6 +11561,7 @@ define @test_vluxseg2_mask_nxv1f64_nxv1i16(,} @llvm.riscv.vluxseg2.mask.nxv1f64.nxv1i16( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11433,6 +11591,7 @@ define @test_vluxseg2_mask_nxv1f64_nxv1i8(,} @llvm.riscv.vluxseg2.mask.nxv1f64.nxv1i8( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11459,11 +11618,12 @@ entry: define @test_vluxseg3_mask_nxv1f64_nxv1i64( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f64.nxv1i64( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11490,11 +11650,12 @@ entry: define @test_vluxseg3_mask_nxv1f64_nxv1i32( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f64.nxv1i32( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11521,11 +11682,12 @@ entry: define @test_vluxseg3_mask_nxv1f64_nxv1i16( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f64.nxv1i16( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11552,11 +11714,12 @@ entry: define @test_vluxseg3_mask_nxv1f64_nxv1i8( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f64.nxv1i8( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -11584,9 +11747,9 @@ define @test_vluxseg4_mask_nxv1f64_nxv1i64( @test_vluxseg4_mask_nxv1f64_nxv1i32( @test_vluxseg4_mask_nxv1f64_nxv1i16( @test_vluxseg4_mask_nxv1f64_nxv1i8( @test_vluxseg5_mask_nxv1f64_nxv1i64( @test_vluxseg5_mask_nxv1f64_nxv1i32( @test_vluxseg5_mask_nxv1f64_nxv1i16( @test_vluxseg5_mask_nxv1f64_nxv1i8( @test_vluxseg6_mask_nxv1f64_nxv1i64( @test_vluxseg6_mask_nxv1f64_nxv1i32( @test_vluxseg6_mask_nxv1f64_nxv1i16( @test_vluxseg6_mask_nxv1f64_nxv1i8( @test_vluxseg7_mask_nxv1f64_nxv1i64( @test_vluxseg7_mask_nxv1f64_nxv1i32( @test_vluxseg7_mask_nxv1f64_nxv1i16( @test_vluxseg7_mask_nxv1f64_nxv1i8( @test_vluxseg8_mask_nxv1f64_nxv1i64( @test_vluxseg8_mask_nxv1f64_nxv1i32( @test_vluxseg8_mask_nxv1f64_nxv1i16( @test_vluxseg8_mask_nxv1f64_nxv1i8( @test_vluxseg2_mask_nxv2f32_nxv2i32(,} @llvm.riscv.vluxseg2.mask.nxv2f32.nxv2i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12315,6 +12479,7 @@ define @test_vluxseg2_mask_nxv2f32_nxv2i8(,} @llvm.riscv.vluxseg2.mask.nxv2f32.nxv2i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12344,6 +12509,7 @@ define @test_vluxseg2_mask_nxv2f32_nxv2i16(,} @llvm.riscv.vluxseg2.mask.nxv2f32.nxv2i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12373,6 +12539,7 @@ define @test_vluxseg2_mask_nxv2f32_nxv2i64(,} @llvm.riscv.vluxseg2.mask.nxv2f32.nxv2i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12399,11 +12566,12 @@ entry: define @test_vluxseg3_mask_nxv2f32_nxv2i32( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f32.nxv2i32( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12430,11 +12598,12 @@ entry: define @test_vluxseg3_mask_nxv2f32_nxv2i8( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f32.nxv2i8( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12461,11 +12630,12 @@ entry: define @test_vluxseg3_mask_nxv2f32_nxv2i16( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f32.nxv2i16( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12493,9 +12663,10 @@ define @test_vluxseg3_mask_nxv2f32_nxv2i64(,,} @llvm.riscv.vluxseg3.mask.nxv2f32.nxv2i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12523,9 +12694,9 @@ define @test_vluxseg4_mask_nxv2f32_nxv2i32( @test_vluxseg4_mask_nxv2f32_nxv2i8( @test_vluxseg4_mask_nxv2f32_nxv2i16( @test_vluxseg4_mask_nxv2f32_nxv2i64( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv2f32.nxv2i64( %val, %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -12654,10 +12826,10 @@ define @test_vluxseg5_mask_nxv2f32_nxv2i32( @test_vluxseg5_mask_nxv2f32_nxv2i8( @test_vluxseg5_mask_nxv2f32_nxv2i16( @test_vluxseg5_mask_nxv2f32_nxv2i64( @test_vluxseg6_mask_nxv2f32_nxv2i32( @test_vluxseg6_mask_nxv2f32_nxv2i8( @test_vluxseg6_mask_nxv2f32_nxv2i16( @test_vluxseg6_mask_nxv2f32_nxv2i64( @test_vluxseg7_mask_nxv2f32_nxv2i32( @test_vluxseg7_mask_nxv2f32_nxv2i8( @test_vluxseg7_mask_nxv2f32_nxv2i16( @test_vluxseg7_mask_nxv2f32_nxv2i64( @test_vluxseg8_mask_nxv2f32_nxv2i32( @test_vluxseg8_mask_nxv2f32_nxv2i8( @test_vluxseg8_mask_nxv2f32_nxv2i16( @test_vluxseg8_mask_nxv2f32_nxv2i64( @test_vluxseg2_mask_nxv1f16_nxv1i64(,} @llvm.riscv.vluxseg2.mask.nxv1f16.nxv1i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13253,6 +13426,7 @@ define @test_vluxseg2_mask_nxv1f16_nxv1i32(,} @llvm.riscv.vluxseg2.mask.nxv1f16.nxv1i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13282,6 +13456,7 @@ define @test_vluxseg2_mask_nxv1f16_nxv1i16(,} @llvm.riscv.vluxseg2.mask.nxv1f16.nxv1i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13311,6 +13486,7 @@ define @test_vluxseg2_mask_nxv1f16_nxv1i8(,} @llvm.riscv.vluxseg2.mask.nxv1f16.nxv1i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13337,11 +13513,12 @@ entry: define @test_vluxseg3_mask_nxv1f16_nxv1i64( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f16.nxv1i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13368,11 +13545,12 @@ entry: define @test_vluxseg3_mask_nxv1f16_nxv1i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f16.nxv1i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13399,11 +13577,12 @@ entry: define @test_vluxseg3_mask_nxv1f16_nxv1i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f16.nxv1i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13430,11 +13609,12 @@ entry: define @test_vluxseg3_mask_nxv1f16_nxv1i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f16.nxv1i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -13462,9 +13642,9 @@ define @test_vluxseg4_mask_nxv1f16_nxv1i64( @test_vluxseg4_mask_nxv1f16_nxv1i32( @test_vluxseg4_mask_nxv1f16_nxv1i16( @test_vluxseg4_mask_nxv1f16_nxv1i8( @test_vluxseg5_mask_nxv1f16_nxv1i64( @test_vluxseg5_mask_nxv1f16_nxv1i32( @test_vluxseg5_mask_nxv1f16_nxv1i16( @test_vluxseg5_mask_nxv1f16_nxv1i8( @test_vluxseg6_mask_nxv1f16_nxv1i64( @test_vluxseg6_mask_nxv1f16_nxv1i32( @test_vluxseg6_mask_nxv1f16_nxv1i16( @test_vluxseg6_mask_nxv1f16_nxv1i8( @test_vluxseg7_mask_nxv1f16_nxv1i64( @test_vluxseg7_mask_nxv1f16_nxv1i32( @test_vluxseg7_mask_nxv1f16_nxv1i16( @test_vluxseg7_mask_nxv1f16_nxv1i8( @test_vluxseg8_mask_nxv1f16_nxv1i64( @test_vluxseg8_mask_nxv1f16_nxv1i32( @test_vluxseg8_mask_nxv1f16_nxv1i16( @test_vluxseg8_mask_nxv1f16_nxv1i8( @test_vluxseg2_mask_nxv1f32_nxv1i64(,} @llvm.riscv.vluxseg2.mask.nxv1f32.nxv1i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14193,6 +14374,7 @@ define @test_vluxseg2_mask_nxv1f32_nxv1i32(,} @llvm.riscv.vluxseg2.mask.nxv1f32.nxv1i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14222,6 +14404,7 @@ define @test_vluxseg2_mask_nxv1f32_nxv1i16(,} @llvm.riscv.vluxseg2.mask.nxv1f32.nxv1i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14251,6 +14434,7 @@ define @test_vluxseg2_mask_nxv1f32_nxv1i8(,} @llvm.riscv.vluxseg2.mask.nxv1f32.nxv1i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14277,11 +14461,12 @@ entry: define @test_vluxseg3_mask_nxv1f32_nxv1i64( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei64.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f32.nxv1i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14308,11 +14493,12 @@ entry: define @test_vluxseg3_mask_nxv1f32_nxv1i32( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f32.nxv1i32( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14339,11 +14525,12 @@ entry: define @test_vluxseg3_mask_nxv1f32_nxv1i16( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f32.nxv1i16( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14370,11 +14557,12 @@ entry: define @test_vluxseg3_mask_nxv1f32_nxv1i8( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv1f32.nxv1i8( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -14402,9 +14590,9 @@ define @test_vluxseg4_mask_nxv1f32_nxv1i64( @test_vluxseg4_mask_nxv1f32_nxv1i32( @test_vluxseg4_mask_nxv1f32_nxv1i16( @test_vluxseg4_mask_nxv1f32_nxv1i8( @test_vluxseg5_mask_nxv1f32_nxv1i64( @test_vluxseg5_mask_nxv1f32_nxv1i32( @test_vluxseg5_mask_nxv1f32_nxv1i16( @test_vluxseg5_mask_nxv1f32_nxv1i8( @test_vluxseg6_mask_nxv1f32_nxv1i64( @test_vluxseg6_mask_nxv1f32_nxv1i32( @test_vluxseg6_mask_nxv1f32_nxv1i16( @test_vluxseg6_mask_nxv1f32_nxv1i8( @test_vluxseg7_mask_nxv1f32_nxv1i64( @test_vluxseg7_mask_nxv1f32_nxv1i32( @test_vluxseg7_mask_nxv1f32_nxv1i16( @test_vluxseg7_mask_nxv1f32_nxv1i8( @test_vluxseg8_mask_nxv1f32_nxv1i64( @test_vluxseg8_mask_nxv1f32_nxv1i32( @test_vluxseg8_mask_nxv1f32_nxv1i16( @test_vluxseg8_mask_nxv1f32_nxv1i8( @test_vluxseg2_mask_nxv8f16_nxv8i16(,} @llvm.riscv.vluxseg2.mask.nxv8f16.nxv8i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15133,6 +15322,7 @@ define @test_vluxseg2_mask_nxv8f16_nxv8i8(,} @llvm.riscv.vluxseg2.mask.nxv8f16.nxv8i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15162,6 +15352,7 @@ define @test_vluxseg2_mask_nxv8f16_nxv8i64(,} @llvm.riscv.vluxseg2.mask.nxv8f16.nxv8i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15191,6 +15382,7 @@ define @test_vluxseg2_mask_nxv8f16_nxv8i32(,} @llvm.riscv.vluxseg2.mask.nxv8f16.nxv8i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15217,11 +15409,12 @@ entry: define @test_vluxseg3_mask_nxv8f16_nxv8i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8f16.nxv8i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15248,11 +15441,12 @@ entry: define @test_vluxseg3_mask_nxv8f16_nxv8i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv8f16.nxv8i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15280,9 +15474,10 @@ define @test_vluxseg3_mask_nxv8f16_nxv8i64(,,} @llvm.riscv.vluxseg3.mask.nxv8f16.nxv8i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15310,9 +15505,10 @@ define @test_vluxseg3_mask_nxv8f16_nxv8i32(,,} @llvm.riscv.vluxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15340,9 +15536,9 @@ define @test_vluxseg4_mask_nxv8f16_nxv8i16( @test_vluxseg4_mask_nxv8f16_nxv8i8( @test_vluxseg4_mask_nxv8f16_nxv8i64(,,,} @llvm.riscv.vluxseg4.mask.nxv8f16.nxv8i64( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15436,12 +15633,13 @@ entry: define @test_vluxseg4_mask_nxv8f16_nxv8i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei32.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv8f16.nxv8i32( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15471,6 +15669,7 @@ define @test_vluxseg2_mask_nxv8f32_nxv8i16(,} @llvm.riscv.vluxseg2.mask.nxv8f32.nxv8i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15500,6 +15699,7 @@ define @test_vluxseg2_mask_nxv8f32_nxv8i8(,} @llvm.riscv.vluxseg2.mask.nxv8f32.nxv8i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15529,6 +15729,7 @@ define @test_vluxseg2_mask_nxv8f32_nxv8i64(,} @llvm.riscv.vluxseg2.mask.nxv8f32.nxv8i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15558,6 +15759,7 @@ define @test_vluxseg2_mask_nxv8f32_nxv8i32(,} @llvm.riscv.vluxseg2.mask.nxv8f32.nxv8i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -15587,6 +15789,7 @@ define @test_vluxseg2_mask_nxv2f64_nxv2i32(,} @llvm.riscv.vluxseg2.mask.nxv2f64.nxv2i32( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15616,6 +15819,7 @@ define @test_vluxseg2_mask_nxv2f64_nxv2i8(,} @llvm.riscv.vluxseg2.mask.nxv2f64.nxv2i8( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15645,6 +15849,7 @@ define @test_vluxseg2_mask_nxv2f64_nxv2i16(,} @llvm.riscv.vluxseg2.mask.nxv2f64.nxv2i16( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15674,6 +15879,7 @@ define @test_vluxseg2_mask_nxv2f64_nxv2i64(,} @llvm.riscv.vluxseg2.mask.nxv2f64.nxv2i64( %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15700,11 +15906,12 @@ entry: define @test_vluxseg3_mask_nxv2f64_nxv2i32( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f64.nxv2i32( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15731,11 +15938,12 @@ entry: define @test_vluxseg3_mask_nxv2f64_nxv2i8( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f64.nxv2i8( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15762,11 +15970,12 @@ entry: define @test_vluxseg3_mask_nxv2f64_nxv2i16( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f64.nxv2i16( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15793,11 +16002,12 @@ entry: define @test_vluxseg3_mask_nxv2f64_nxv2i64( %val, double* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu -; CHECK-NEXT: vluxseg3ei64.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f64.nxv2i64( %val, %val, %val, double* %base, %index, %mask, i64 %vl, i64 1) @@ -15825,9 +16035,9 @@ define @test_vluxseg4_mask_nxv2f64_nxv2i32( @test_vluxseg4_mask_nxv2f64_nxv2i8( @test_vluxseg4_mask_nxv2f64_nxv2i16( @test_vluxseg4_mask_nxv2f64_nxv2i64( @test_vluxseg2_mask_nxv4f16_nxv4i32(,} @llvm.riscv.vluxseg2.mask.nxv4f16.nxv4i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -15988,6 +16199,7 @@ define @test_vluxseg2_mask_nxv4f16_nxv4i8(,} @llvm.riscv.vluxseg2.mask.nxv4f16.nxv4i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16017,6 +16229,7 @@ define @test_vluxseg2_mask_nxv4f16_nxv4i64(,} @llvm.riscv.vluxseg2.mask.nxv4f16.nxv4i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16046,6 +16259,7 @@ define @test_vluxseg2_mask_nxv4f16_nxv4i16(,} @llvm.riscv.vluxseg2.mask.nxv4f16.nxv4i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16073,9 +16287,10 @@ define @test_vluxseg3_mask_nxv4f16_nxv4i32(,,} @llvm.riscv.vluxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16102,11 +16317,12 @@ entry: define @test_vluxseg3_mask_nxv4f16_nxv4i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f16.nxv4i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16134,9 +16350,10 @@ define @test_vluxseg3_mask_nxv4f16_nxv4i64(,,} @llvm.riscv.vluxseg3.mask.nxv4f16.nxv4i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16163,11 +16380,12 @@ entry: define @test_vluxseg3_mask_nxv4f16_nxv4i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f16.nxv4i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16194,12 +16412,13 @@ entry: define @test_vluxseg4_mask_nxv4f16_nxv4i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg4ei32.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4f16.nxv4i32( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16227,9 +16446,9 @@ define @test_vluxseg4_mask_nxv4f16_nxv4i8( @test_vluxseg4_mask_nxv4f16_nxv4i64(,,,} @llvm.riscv.vluxseg4.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16291,9 +16511,9 @@ define @test_vluxseg4_mask_nxv4f16_nxv4i16( @test_vluxseg5_mask_nxv4f16_nxv4i32( @test_vluxseg5_mask_nxv4f16_nxv4i8( @test_vluxseg5_mask_nxv4f16_nxv4i64(,,,,} @llvm.riscv.vluxseg5.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16424,10 +16645,10 @@ define @test_vluxseg5_mask_nxv4f16_nxv4i16( @test_vluxseg6_mask_nxv4f16_nxv4i32( @test_vluxseg6_mask_nxv4f16_nxv4i8( @test_vluxseg6_mask_nxv4f16_nxv4i64( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg6_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vluxseg6ei64.v v7, (a0), v16, v0.t +; CHECK-NEXT: vluxseg6ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv1r.v v8, v17 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vluxseg6.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16562,11 +16784,11 @@ define @test_vluxseg6_mask_nxv4f16_nxv4i16( @test_vluxseg7_mask_nxv4f16_nxv4i32( @test_vluxseg7_mask_nxv4f16_nxv4i8( @test_vluxseg7_mask_nxv4f16_nxv4i64( @test_vluxseg7_mask_nxv4f16_nxv4i16( @test_vluxseg8_mask_nxv4f16_nxv4i32( @test_vluxseg8_mask_nxv4f16_nxv4i8( @test_vluxseg8_mask_nxv4f16_nxv4i64( @test_vluxseg8_mask_nxv4f16_nxv4i16( @test_vluxseg2_mask_nxv2f16_nxv2i32(,} @llvm.riscv.vluxseg2.mask.nxv2f16.nxv2i32( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16920,6 +17143,7 @@ define @test_vluxseg2_mask_nxv2f16_nxv2i8(,} @llvm.riscv.vluxseg2.mask.nxv2f16.nxv2i8( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16949,6 +17173,7 @@ define @test_vluxseg2_mask_nxv2f16_nxv2i16(,} @llvm.riscv.vluxseg2.mask.nxv2f16.nxv2i16( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -16978,6 +17203,7 @@ define @test_vluxseg2_mask_nxv2f16_nxv2i64(,} @llvm.riscv.vluxseg2.mask.nxv2f16.nxv2i64( %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17004,11 +17230,12 @@ entry: define @test_vluxseg3_mask_nxv2f16_nxv2i32( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei32.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f16.nxv2i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17035,11 +17262,12 @@ entry: define @test_vluxseg3_mask_nxv2f16_nxv2i8( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei8.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f16.nxv2i8( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17066,11 +17294,12 @@ entry: define @test_vluxseg3_mask_nxv2f16_nxv2i16( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v10, v9 -; CHECK-NEXT: vmv1r.v v9, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v7, (a0), v10, v0.t +; CHECK-NEXT: vluxseg3ei16.v v10, (a0), v9, v0.t +; CHECK-NEXT: vmv1r.v v8, v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv2f16.nxv2i16( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17098,9 +17327,10 @@ define @test_vluxseg3_mask_nxv2f16_nxv2i64(,,} @llvm.riscv.vluxseg3.mask.nxv2f16.nxv2i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17128,9 +17358,9 @@ define @test_vluxseg4_mask_nxv2f16_nxv2i32( @test_vluxseg4_mask_nxv2f16_nxv2i8( @test_vluxseg4_mask_nxv2f16_nxv2i16( @test_vluxseg4_mask_nxv2f16_nxv2i64( %val, half* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v7, v8 -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vluxseg4ei64.v v7, (a0), v12, v0.t +; CHECK-NEXT: vluxseg4ei64.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv1r.v v8, v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv2f16.nxv2i64( %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl, i64 1) @@ -17259,10 +17490,10 @@ define @test_vluxseg5_mask_nxv2f16_nxv2i32( @test_vluxseg5_mask_nxv2f16_nxv2i8( @test_vluxseg5_mask_nxv2f16_nxv2i16( @test_vluxseg5_mask_nxv2f16_nxv2i64( @test_vluxseg6_mask_nxv2f16_nxv2i32( @test_vluxseg6_mask_nxv2f16_nxv2i8( @test_vluxseg6_mask_nxv2f16_nxv2i16( @test_vluxseg6_mask_nxv2f16_nxv2i64( @test_vluxseg7_mask_nxv2f16_nxv2i32( @test_vluxseg7_mask_nxv2f16_nxv2i8( @test_vluxseg7_mask_nxv2f16_nxv2i16( @test_vluxseg7_mask_nxv2f16_nxv2i64( @test_vluxseg8_mask_nxv2f16_nxv2i32( @test_vluxseg8_mask_nxv2f16_nxv2i8( @test_vluxseg8_mask_nxv2f16_nxv2i16( @test_vluxseg8_mask_nxv2f16_nxv2i64( @test_vluxseg2_mask_nxv4f32_nxv4i32(,} @llvm.riscv.vluxseg2.mask.nxv4f32.nxv4i32( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17858,6 +18090,7 @@ define @test_vluxseg2_mask_nxv4f32_nxv4i8(,} @llvm.riscv.vluxseg2.mask.nxv4f32.nxv4i8( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17887,6 +18120,7 @@ define @test_vluxseg2_mask_nxv4f32_nxv4i64(,} @llvm.riscv.vluxseg2.mask.nxv4f32.nxv4i64( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17916,6 +18150,7 @@ define @test_vluxseg2_mask_nxv4f32_nxv4i16(,} @llvm.riscv.vluxseg2.mask.nxv4f32.nxv4i16( %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17942,11 +18177,12 @@ entry: define @test_vluxseg3_mask_nxv4f32_nxv4i32( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei32.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei32.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f32.nxv4i32( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -17973,11 +18209,12 @@ entry: define @test_vluxseg3_mask_nxv4f32_nxv4i8( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei8.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei8.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f32.nxv4i8( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18005,9 +18242,10 @@ define @test_vluxseg3_mask_nxv4f32_nxv4i64(,,} @llvm.riscv.vluxseg3.mask.nxv4f32.nxv4i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18034,11 +18272,12 @@ entry: define @test_vluxseg3_mask_nxv4f32_nxv4i16( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv1r.v v12, v10 -; CHECK-NEXT: vmv2r.v v10, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg3ei16.v v6, (a0), v12, v0.t +; CHECK-NEXT: vluxseg3ei16.v v12, (a0), v10, v0.t +; CHECK-NEXT: vmv2r.v v8, v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vluxseg3.mask.nxv4f32.nxv4i16( %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18066,9 +18305,9 @@ define @test_vluxseg4_mask_nxv4f32_nxv4i32( @test_vluxseg4_mask_nxv4f32_nxv4i8( @test_vluxseg4_mask_nxv4f32_nxv4i64( %val, float* %base, %index, i64 %vl, %mask) { ; CHECK-LABEL: test_vluxseg4_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v6, v8 -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vluxseg4ei64.v v6, (a0), v16, v0.t +; CHECK-NEXT: vluxseg4ei64.v v16, (a0), v12, v0.t +; CHECK-NEXT: vmv2r.v v8, v18 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vluxseg4.mask.nxv4f32.nxv4i64( %val, %val, %val, %val, float* %base, %index, %mask, i64 %vl, i64 1) @@ -18164,9 +18404,9 @@ define @test_vluxseg4_mask_nxv4f32_nxv4i16( @vpgather_baseidx_nxv32i8(i8* %base, @vpgather_baseidx_nxv32i8(i8* %base, @vpgather_baseidx_nxv32i8(i8* %base, %idxs %v = call @llvm.vp.gather.nxv32i8.nxv32p0i8( %ptrs, %m, i32 %evl) diff --git a/llvm/test/CodeGen/RISCV/rvv/vrgatherei16-subreg-liveness.ll b/llvm/test/CodeGen/RISCV/rvv/vrgatherei16-subreg-liveness.ll index 78770822ae663..48bc1d2bd8430 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vrgatherei16-subreg-liveness.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vrgatherei16-subreg-liveness.ll @@ -1,6 +1,6 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py -; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s -; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s -riscv-enable-subreg-liveness=false | FileCheck %s --check-prefix NOSUBREG +; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s --check-prefix NOSUBREG +; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s -riscv-enable-subreg-liveness=true | FileCheck %s --check-prefix SUBREG ; This test checks that vrgatherei16 instructions are correctly ; register-allocated. The LMUL=1 destination register groups may not overlap @@ -10,26 +10,6 @@ ; allocation! define internal void @foo( %v15, %0, %vs12.i.i.i, %1, %v37) { -; CHECK-LABEL: foo: -; CHECK: # %bb.0: # %loopIR.preheader.i.i -; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, mu -; CHECK-NEXT: vmv.v.i v14, 0 -; CHECK-NEXT: vsetvli zero, zero, e8, m1, ta, mu -; CHECK-NEXT: vmv.v.i v9, 0 -; CHECK-NEXT: vsetivli zero, 4, e8, m1, tu, mu -; CHECK-NEXT: vmv1r.v v8, v9 -; CHECK-NEXT: vrgatherei16.vv v8, v9, v14 -; CHECK-NEXT: .LBB0_1: # %loopIR3.i.i -; CHECK-NEXT: # =>This Inner Loop Header: Depth=1 -; CHECK-NEXT: vl1r.v v9, (zero) -; CHECK-NEXT: vsetivli zero, 4, e8, m1, tu, mu -; CHECK-NEXT: vmv1r.v v11, v12 -; CHECK-NEXT: vrgatherei16.vv v11, v9, v10 -; CHECK-NEXT: vsetvli a0, zero, e8, m1, ta, mu -; CHECK-NEXT: vand.vv v9, v8, v11 -; CHECK-NEXT: vs1r.v v9, (zero) -; CHECK-NEXT: j .LBB0_1 -; ; NOSUBREG-LABEL: foo: ; NOSUBREG: # %bb.0: # %loopIR.preheader.i.i ; NOSUBREG-NEXT: # kill: def $v10 killed $v10 def $v10m2 @@ -50,6 +30,26 @@ define internal void @foo( %v15, %0, This Inner Loop Header: Depth=1 +; SUBREG-NEXT: vl1r.v v9, (zero) +; SUBREG-NEXT: vsetivli zero, 4, e8, m1, tu, mu +; SUBREG-NEXT: vmv1r.v v11, v12 +; SUBREG-NEXT: vrgatherei16.vv v11, v9, v10 +; SUBREG-NEXT: vsetvli a0, zero, e8, m1, ta, mu +; SUBREG-NEXT: vand.vv v9, v8, v11 +; SUBREG-NEXT: vs1r.v v9, (zero) +; SUBREG-NEXT: j .LBB0_1 loopIR.preheader.i.i: %v18 = tail call @llvm.vector.insert.nxv8i16.nxv1i16( poison, %vs12.i.i.i, i64 0) br label %loopIR3.i.i diff --git a/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll b/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll index adc578ec3dc7d..2184925214c2b 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll @@ -91,13 +91,11 @@ define @test3(i64 %avl, i8 zeroext %cond, , %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -21,6 +22,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i16_nxv16i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -37,6 +39,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -50,6 +53,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i16_nxv16i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -66,6 +70,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16 @@ -78,6 +83,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i16_nxv16i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16, v0.t @@ -93,6 +99,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i8.nxv1i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -106,6 +113,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -122,6 +130,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i8.nxv1i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -135,6 +144,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -151,6 +161,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i8.nxv1i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -164,6 +175,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -181,8 +193,8 @@ define void @test_vsoxseg3_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -212,8 +224,8 @@ define void @test_vsoxseg3_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -243,8 +255,8 @@ define void @test_vsoxseg3_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -274,9 +286,9 @@ define void @test_vsoxseg4_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -307,9 +319,9 @@ define void @test_vsoxseg4_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -340,9 +352,9 @@ define void @test_vsoxseg4_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -373,10 +385,10 @@ define void @test_vsoxseg5_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -408,10 +420,10 @@ define void @test_vsoxseg5_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -443,10 +455,10 @@ define void @test_vsoxseg5_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -478,11 +490,11 @@ define void @test_vsoxseg6_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -515,11 +527,11 @@ define void @test_vsoxseg6_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -552,11 +564,11 @@ define void @test_vsoxseg6_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -589,12 +601,12 @@ define void @test_vsoxseg7_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -628,12 +640,12 @@ define void @test_vsoxseg7_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -667,12 +679,12 @@ define void @test_vsoxseg7_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -706,13 +718,13 @@ define void @test_vsoxseg8_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -747,13 +759,13 @@ define void @test_vsoxseg8_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -788,13 +800,13 @@ define void @test_vsoxseg8_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -828,6 +840,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v12 @@ -840,6 +853,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v12, v0.t @@ -855,6 +869,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i8.nxv16i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -868,6 +883,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -884,6 +900,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16 @@ -896,6 +913,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16, v0.t @@ -911,11 +929,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei16.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, i32 %vl) @@ -925,11 +943,11 @@ entry: define void @test_vsoxseg3_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -943,8 +961,8 @@ define void @test_vsoxseg3_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -957,8 +975,8 @@ define void @test_vsoxseg3_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -973,6 +991,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -986,6 +1005,7 @@ entry: define void @test_vsoxseg3_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -1003,9 +1023,9 @@ define void @test_vsoxseg4_nxv16i8_nxv16i16( %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -1018,9 +1038,9 @@ define void @test_vsoxseg4_mask_nxv16i8_nxv16i16( %val, i8* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -1036,9 +1056,9 @@ define void @test_vsoxseg4_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -1051,9 +1071,9 @@ define void @test_vsoxseg4_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -1068,6 +1088,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1082,6 +1103,7 @@ entry: define void @test_vsoxseg4_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1099,6 +1121,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i32.nxv2i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1112,6 +1135,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1128,6 +1152,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i32.nxv2i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1141,6 +1166,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1157,6 +1183,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i32.nxv2i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1170,6 +1197,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1187,8 +1215,8 @@ define void @test_vsoxseg3_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1201,8 +1229,8 @@ define void @test_vsoxseg3_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1218,8 +1246,8 @@ define void @test_vsoxseg3_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1232,8 +1260,8 @@ define void @test_vsoxseg3_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1249,8 +1277,8 @@ define void @test_vsoxseg3_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1263,8 +1291,8 @@ define void @test_vsoxseg3_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1280,9 +1308,9 @@ define void @test_vsoxseg4_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1295,9 +1323,9 @@ define void @test_vsoxseg4_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1313,9 +1341,9 @@ define void @test_vsoxseg4_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1328,9 +1356,9 @@ define void @test_vsoxseg4_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1346,9 +1374,9 @@ define void @test_vsoxseg4_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1361,9 +1389,9 @@ define void @test_vsoxseg4_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1379,10 +1407,10 @@ define void @test_vsoxseg5_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1395,10 +1423,10 @@ define void @test_vsoxseg5_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1414,10 +1442,10 @@ define void @test_vsoxseg5_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1430,10 +1458,10 @@ define void @test_vsoxseg5_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1449,10 +1477,10 @@ define void @test_vsoxseg5_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1465,10 +1493,10 @@ define void @test_vsoxseg5_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1484,11 +1512,11 @@ define void @test_vsoxseg6_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1501,11 +1529,11 @@ define void @test_vsoxseg6_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1521,11 +1549,11 @@ define void @test_vsoxseg6_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1538,11 +1566,11 @@ define void @test_vsoxseg6_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1558,11 +1586,11 @@ define void @test_vsoxseg6_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1575,11 +1603,11 @@ define void @test_vsoxseg6_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1595,12 +1623,12 @@ define void @test_vsoxseg7_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1613,12 +1641,12 @@ define void @test_vsoxseg7_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1634,12 +1662,12 @@ define void @test_vsoxseg7_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1652,12 +1680,12 @@ define void @test_vsoxseg7_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1673,12 +1701,12 @@ define void @test_vsoxseg7_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1691,12 +1719,12 @@ define void @test_vsoxseg7_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1712,13 +1740,13 @@ define void @test_vsoxseg8_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1731,13 +1759,13 @@ define void @test_vsoxseg8_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1753,13 +1781,13 @@ define void @test_vsoxseg8_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1772,13 +1800,13 @@ define void @test_vsoxseg8_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1794,13 +1822,13 @@ define void @test_vsoxseg8_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1813,13 +1841,13 @@ define void @test_vsoxseg8_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1834,6 +1862,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i16.nxv4i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1847,6 +1876,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1863,6 +1893,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i16.nxv4i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1876,6 +1907,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1892,6 +1924,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10 @@ -1904,6 +1937,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10, v0.t @@ -1920,8 +1954,8 @@ define void @test_vsoxseg3_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1934,8 +1968,8 @@ define void @test_vsoxseg3_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1951,8 +1985,8 @@ define void @test_vsoxseg3_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1965,8 +1999,8 @@ define void @test_vsoxseg3_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1981,11 +2015,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, i32 %vl) @@ -1995,11 +2029,11 @@ entry: define void @test_vsoxseg3_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl) @@ -2013,9 +2047,9 @@ define void @test_vsoxseg4_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2028,9 +2062,9 @@ define void @test_vsoxseg4_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2046,9 +2080,9 @@ define void @test_vsoxseg4_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2061,9 +2095,9 @@ define void @test_vsoxseg4_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2079,9 +2113,9 @@ define void @test_vsoxseg4_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2094,9 +2128,9 @@ define void @test_vsoxseg4_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2112,10 +2146,10 @@ define void @test_vsoxseg5_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2128,10 +2162,10 @@ define void @test_vsoxseg5_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2147,10 +2181,10 @@ define void @test_vsoxseg5_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2163,10 +2197,10 @@ define void @test_vsoxseg5_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2182,10 +2216,10 @@ define void @test_vsoxseg5_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2198,10 +2232,10 @@ define void @test_vsoxseg5_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2217,11 +2251,11 @@ define void @test_vsoxseg6_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2234,11 +2268,11 @@ define void @test_vsoxseg6_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2254,11 +2288,11 @@ define void @test_vsoxseg6_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2271,11 +2305,11 @@ define void @test_vsoxseg6_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2291,11 +2325,11 @@ define void @test_vsoxseg6_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2308,11 +2342,11 @@ define void @test_vsoxseg6_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2328,12 +2362,12 @@ define void @test_vsoxseg7_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2346,12 +2380,12 @@ define void @test_vsoxseg7_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2367,12 +2401,12 @@ define void @test_vsoxseg7_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2385,12 +2419,12 @@ define void @test_vsoxseg7_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2406,12 +2440,12 @@ define void @test_vsoxseg7_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2424,12 +2458,12 @@ define void @test_vsoxseg7_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2445,13 +2479,13 @@ define void @test_vsoxseg8_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2464,13 +2498,13 @@ define void @test_vsoxseg8_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2486,13 +2520,13 @@ define void @test_vsoxseg8_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2505,13 +2539,13 @@ define void @test_vsoxseg8_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2527,13 +2561,13 @@ define void @test_vsoxseg8_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2546,13 +2580,13 @@ define void @test_vsoxseg8_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2567,6 +2601,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i32.nxv1i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2580,6 +2615,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2596,6 +2632,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i32.nxv1i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2609,6 +2646,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2625,6 +2663,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i32.nxv1i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2638,6 +2677,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2655,8 +2695,8 @@ define void @test_vsoxseg3_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2669,8 +2709,8 @@ define void @test_vsoxseg3_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2686,8 +2726,8 @@ define void @test_vsoxseg3_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2700,8 +2740,8 @@ define void @test_vsoxseg3_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2717,8 +2757,8 @@ define void @test_vsoxseg3_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2731,8 +2771,8 @@ define void @test_vsoxseg3_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2748,9 +2788,9 @@ define void @test_vsoxseg4_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2763,9 +2803,9 @@ define void @test_vsoxseg4_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2781,9 +2821,9 @@ define void @test_vsoxseg4_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2796,9 +2836,9 @@ define void @test_vsoxseg4_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2814,9 +2854,9 @@ define void @test_vsoxseg4_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2829,9 +2869,9 @@ define void @test_vsoxseg4_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2847,10 +2887,10 @@ define void @test_vsoxseg5_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2863,10 +2903,10 @@ define void @test_vsoxseg5_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2882,10 +2922,10 @@ define void @test_vsoxseg5_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2898,10 +2938,10 @@ define void @test_vsoxseg5_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2917,10 +2957,10 @@ define void @test_vsoxseg5_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2933,10 +2973,10 @@ define void @test_vsoxseg5_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2952,11 +2992,11 @@ define void @test_vsoxseg6_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2969,11 +3009,11 @@ define void @test_vsoxseg6_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2989,11 +3029,11 @@ define void @test_vsoxseg6_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3006,11 +3046,11 @@ define void @test_vsoxseg6_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3026,11 +3066,11 @@ define void @test_vsoxseg6_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3043,11 +3083,11 @@ define void @test_vsoxseg6_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3063,12 +3103,12 @@ define void @test_vsoxseg7_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3081,12 +3121,12 @@ define void @test_vsoxseg7_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3102,12 +3142,12 @@ define void @test_vsoxseg7_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3120,12 +3160,12 @@ define void @test_vsoxseg7_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3141,12 +3181,12 @@ define void @test_vsoxseg7_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3159,12 +3199,12 @@ define void @test_vsoxseg7_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3180,13 +3220,13 @@ define void @test_vsoxseg8_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3199,13 +3239,13 @@ define void @test_vsoxseg8_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3221,13 +3261,13 @@ define void @test_vsoxseg8_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3240,13 +3280,13 @@ define void @test_vsoxseg8_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3262,13 +3302,13 @@ define void @test_vsoxseg8_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3281,13 +3321,13 @@ define void @test_vsoxseg8_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3302,6 +3342,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i16.nxv8i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3315,6 +3356,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3331,6 +3373,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i16.nxv8i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3344,6 +3387,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3360,6 +3404,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12 @@ -3372,6 +3417,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12, v0.t @@ -3388,8 +3434,8 @@ define void @test_vsoxseg3_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3402,8 +3448,8 @@ define void @test_vsoxseg3_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3419,8 +3465,8 @@ define void @test_vsoxseg3_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3433,8 +3479,8 @@ define void @test_vsoxseg3_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3449,11 +3495,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, i32 %vl) @@ -3463,11 +3509,11 @@ entry: define void @test_vsoxseg3_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl) @@ -3481,9 +3527,9 @@ define void @test_vsoxseg4_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3496,9 +3542,9 @@ define void @test_vsoxseg4_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3514,9 +3560,9 @@ define void @test_vsoxseg4_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3529,9 +3575,9 @@ define void @test_vsoxseg4_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3547,9 +3593,9 @@ define void @test_vsoxseg4_nxv8i16_nxv8i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -3562,9 +3608,9 @@ define void @test_vsoxseg4_mask_nxv8i16_nxv8i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3579,6 +3625,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v10 @@ -3591,6 +3638,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v10, v0.t @@ -3606,6 +3654,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i8.nxv8i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3619,6 +3668,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3635,6 +3685,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12 @@ -3647,6 +3698,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12, v0.t @@ -3662,11 +3714,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, i32 %vl) @@ -3676,11 +3728,11 @@ entry: define void @test_vsoxseg3_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -3694,8 +3746,8 @@ define void @test_vsoxseg3_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3724,6 +3776,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3737,6 +3790,7 @@ entry: define void @test_vsoxseg3_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3754,9 +3808,9 @@ define void @test_vsoxseg4_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3787,9 +3841,9 @@ define void @test_vsoxseg4_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3819,6 +3873,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3833,6 +3888,7 @@ entry: define void @test_vsoxseg4_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3851,10 +3907,10 @@ define void @test_vsoxseg5_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3886,10 +3942,10 @@ define void @test_vsoxseg5_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3920,13 +3976,13 @@ declare void @llvm.riscv.vsoxseg5.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg5_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei32.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg5ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, i32 %vl) @@ -3936,13 +3992,13 @@ entry: define void @test_vsoxseg5_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg5ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -3956,11 +4012,11 @@ define void @test_vsoxseg6_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3993,11 +4049,11 @@ define void @test_vsoxseg6_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4030,11 +4086,11 @@ define void @test_vsoxseg6_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -4067,12 +4123,12 @@ define void @test_vsoxseg7_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4106,12 +4162,12 @@ define void @test_vsoxseg7_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4145,12 +4201,12 @@ define void @test_vsoxseg7_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -4184,13 +4240,13 @@ define void @test_vsoxseg8_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4225,13 +4281,13 @@ define void @test_vsoxseg8_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4266,13 +4322,13 @@ define void @test_vsoxseg8_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -4306,6 +4362,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i32.nxv8i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4319,6 +4376,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i32_nxv8i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4335,6 +4393,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i32.nxv8i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4348,6 +4407,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i32_nxv8i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4364,6 +4424,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i32.nxv8i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4377,6 +4438,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i32_nxv8i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4393,6 +4455,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i8.nxv4i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4406,6 +4469,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4422,6 +4486,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i8.nxv4i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4435,6 +4500,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4451,6 +4517,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10 @@ -4463,6 +4530,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10, v0.t @@ -4479,8 +4547,8 @@ define void @test_vsoxseg3_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4510,8 +4578,8 @@ define void @test_vsoxseg3_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4540,11 +4608,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, i32 %vl) @@ -4554,11 +4622,11 @@ entry: define void @test_vsoxseg3_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -4572,9 +4640,9 @@ define void @test_vsoxseg4_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4605,9 +4673,9 @@ define void @test_vsoxseg4_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4638,9 +4706,9 @@ define void @test_vsoxseg4_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4671,10 +4739,10 @@ define void @test_vsoxseg5_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4706,10 +4774,10 @@ define void @test_vsoxseg5_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4741,10 +4809,10 @@ define void @test_vsoxseg5_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4776,11 +4844,11 @@ define void @test_vsoxseg6_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4813,11 +4881,11 @@ define void @test_vsoxseg6_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4850,11 +4918,11 @@ define void @test_vsoxseg6_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4887,12 +4955,12 @@ define void @test_vsoxseg7_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4926,12 +4994,12 @@ define void @test_vsoxseg7_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4965,12 +5033,12 @@ define void @test_vsoxseg7_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5004,13 +5072,13 @@ define void @test_vsoxseg8_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5045,13 +5113,13 @@ define void @test_vsoxseg8_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5086,13 +5154,13 @@ define void @test_vsoxseg8_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5126,6 +5194,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i16.nxv1i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5139,6 +5208,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5155,6 +5225,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i16.nxv1i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5168,6 +5239,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5184,6 +5256,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i16.nxv1i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5197,6 +5270,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5214,8 +5288,8 @@ define void @test_vsoxseg3_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5228,8 +5302,8 @@ define void @test_vsoxseg3_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5245,8 +5319,8 @@ define void @test_vsoxseg3_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5259,8 +5333,8 @@ define void @test_vsoxseg3_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5276,8 +5350,8 @@ define void @test_vsoxseg3_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5290,8 +5364,8 @@ define void @test_vsoxseg3_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5307,9 +5381,9 @@ define void @test_vsoxseg4_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5322,9 +5396,9 @@ define void @test_vsoxseg4_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5340,9 +5414,9 @@ define void @test_vsoxseg4_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5355,9 +5429,9 @@ define void @test_vsoxseg4_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5373,9 +5447,9 @@ define void @test_vsoxseg4_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5388,9 +5462,9 @@ define void @test_vsoxseg4_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5406,10 +5480,10 @@ define void @test_vsoxseg5_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5422,10 +5496,10 @@ define void @test_vsoxseg5_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5441,10 +5515,10 @@ define void @test_vsoxseg5_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5457,10 +5531,10 @@ define void @test_vsoxseg5_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5476,10 +5550,10 @@ define void @test_vsoxseg5_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5492,10 +5566,10 @@ define void @test_vsoxseg5_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5511,11 +5585,11 @@ define void @test_vsoxseg6_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5528,11 +5602,11 @@ define void @test_vsoxseg6_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5548,11 +5622,11 @@ define void @test_vsoxseg6_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5565,11 +5639,11 @@ define void @test_vsoxseg6_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5585,11 +5659,11 @@ define void @test_vsoxseg6_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5602,11 +5676,11 @@ define void @test_vsoxseg6_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5622,12 +5696,12 @@ define void @test_vsoxseg7_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5640,12 +5714,12 @@ define void @test_vsoxseg7_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5661,12 +5735,12 @@ define void @test_vsoxseg7_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5679,12 +5753,12 @@ define void @test_vsoxseg7_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5700,12 +5774,12 @@ define void @test_vsoxseg7_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5718,12 +5792,12 @@ define void @test_vsoxseg7_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5739,13 +5813,13 @@ define void @test_vsoxseg8_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5758,13 +5832,13 @@ define void @test_vsoxseg8_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5780,13 +5854,13 @@ define void @test_vsoxseg8_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5799,13 +5873,13 @@ define void @test_vsoxseg8_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5821,13 +5895,13 @@ define void @test_vsoxseg8_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5840,13 +5914,13 @@ define void @test_vsoxseg8_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5861,6 +5935,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv32i8.nxv32i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v16 @@ -5873,6 +5948,7 @@ entry: define void @test_vsoxseg2_mask_nxv32i8_nxv32i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v16, v0.t @@ -5888,6 +5964,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv32i8.nxv32i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -5901,6 +5978,7 @@ entry: define void @test_vsoxseg2_mask_nxv32i8_nxv32i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -5917,6 +5995,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i8.nxv2i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5930,6 +6009,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5946,6 +6026,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i8.nxv2i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5959,6 +6040,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5975,6 +6057,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i8.nxv2i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5988,6 +6071,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -6005,8 +6089,8 @@ define void @test_vsoxseg3_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6036,8 +6120,8 @@ define void @test_vsoxseg3_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6067,8 +6151,8 @@ define void @test_vsoxseg3_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6098,9 +6182,9 @@ define void @test_vsoxseg4_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6131,9 +6215,9 @@ define void @test_vsoxseg4_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6164,9 +6248,9 @@ define void @test_vsoxseg4_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6197,10 +6281,10 @@ define void @test_vsoxseg5_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6232,10 +6316,10 @@ define void @test_vsoxseg5_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6267,10 +6351,10 @@ define void @test_vsoxseg5_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6302,11 +6386,11 @@ define void @test_vsoxseg6_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6339,11 +6423,11 @@ define void @test_vsoxseg6_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6376,11 +6460,11 @@ define void @test_vsoxseg6_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6413,12 +6497,12 @@ define void @test_vsoxseg7_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6452,12 +6536,12 @@ define void @test_vsoxseg7_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6491,12 +6575,12 @@ define void @test_vsoxseg7_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6530,13 +6614,13 @@ define void @test_vsoxseg8_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6571,13 +6655,13 @@ define void @test_vsoxseg8_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6612,13 +6696,13 @@ define void @test_vsoxseg8_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6652,6 +6736,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i16.nxv2i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6665,6 +6750,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6681,6 +6767,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i16.nxv2i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6694,6 +6781,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6710,6 +6798,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i16.nxv2i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6723,6 +6812,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6740,8 +6830,8 @@ define void @test_vsoxseg3_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6754,8 +6844,8 @@ define void @test_vsoxseg3_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6771,8 +6861,8 @@ define void @test_vsoxseg3_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6785,8 +6875,8 @@ define void @test_vsoxseg3_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6802,8 +6892,8 @@ define void @test_vsoxseg3_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6816,8 +6906,8 @@ define void @test_vsoxseg3_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6833,9 +6923,9 @@ define void @test_vsoxseg4_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6848,9 +6938,9 @@ define void @test_vsoxseg4_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6866,9 +6956,9 @@ define void @test_vsoxseg4_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6881,9 +6971,9 @@ define void @test_vsoxseg4_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6899,9 +6989,9 @@ define void @test_vsoxseg4_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6914,9 +7004,9 @@ define void @test_vsoxseg4_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6932,10 +7022,10 @@ define void @test_vsoxseg5_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6948,10 +7038,10 @@ define void @test_vsoxseg5_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6967,10 +7057,10 @@ define void @test_vsoxseg5_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6983,10 +7073,10 @@ define void @test_vsoxseg5_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7002,10 +7092,10 @@ define void @test_vsoxseg5_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7018,10 +7108,10 @@ define void @test_vsoxseg5_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7037,11 +7127,11 @@ define void @test_vsoxseg6_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7054,11 +7144,11 @@ define void @test_vsoxseg6_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7074,11 +7164,11 @@ define void @test_vsoxseg6_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7091,11 +7181,11 @@ define void @test_vsoxseg6_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7111,11 +7201,11 @@ define void @test_vsoxseg6_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7128,11 +7218,11 @@ define void @test_vsoxseg6_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7148,12 +7238,12 @@ define void @test_vsoxseg7_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7166,12 +7256,12 @@ define void @test_vsoxseg7_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7187,12 +7277,12 @@ define void @test_vsoxseg7_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7205,12 +7295,12 @@ define void @test_vsoxseg7_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7226,12 +7316,12 @@ define void @test_vsoxseg7_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7244,12 +7334,12 @@ define void @test_vsoxseg7_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7265,13 +7355,13 @@ define void @test_vsoxseg8_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7284,13 +7374,13 @@ define void @test_vsoxseg8_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7306,13 +7396,13 @@ define void @test_vsoxseg8_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7325,13 +7415,13 @@ define void @test_vsoxseg8_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7347,13 +7437,13 @@ define void @test_vsoxseg8_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7366,13 +7456,13 @@ define void @test_vsoxseg8_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7387,6 +7477,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i32.nxv4i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7400,6 +7491,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7416,6 +7508,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i32.nxv4i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7429,6 +7522,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7445,6 +7539,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i32.nxv4i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7458,6 +7553,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7475,8 +7571,8 @@ define void @test_vsoxseg3_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7489,8 +7585,8 @@ define void @test_vsoxseg3_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7506,8 +7602,8 @@ define void @test_vsoxseg3_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7520,8 +7616,8 @@ define void @test_vsoxseg3_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7537,8 +7633,8 @@ define void @test_vsoxseg3_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7551,8 +7647,8 @@ define void @test_vsoxseg3_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7568,9 +7664,9 @@ define void @test_vsoxseg4_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7583,9 +7679,9 @@ define void @test_vsoxseg4_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7601,9 +7697,9 @@ define void @test_vsoxseg4_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7616,9 +7712,9 @@ define void @test_vsoxseg4_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7634,9 +7730,9 @@ define void @test_vsoxseg4_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7649,9 +7745,9 @@ define void @test_vsoxseg4_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7666,6 +7762,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16f16.nxv16i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7679,6 +7776,7 @@ entry: define void @test_vsoxseg2_mask_nxv16f16_nxv16i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7695,6 +7793,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16f16.nxv16i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7708,6 +7807,7 @@ entry: define void @test_vsoxseg2_mask_nxv16f16_nxv16i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7724,6 +7824,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16f16.nxv16i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16 @@ -7736,6 +7837,7 @@ entry: define void @test_vsoxseg2_mask_nxv16f16_nxv16i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16, v0.t @@ -7751,6 +7853,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f64.nxv4i16(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7764,6 +7867,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f64_nxv4i16( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7780,6 +7884,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f64.nxv4i8(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7793,6 +7898,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f64_nxv4i8( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7809,6 +7915,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f64.nxv4i32(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7822,6 +7929,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f64_nxv4i32( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7838,6 +7946,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f64.nxv1i8(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7851,6 +7960,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f64_nxv1i8( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7867,6 +7977,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f64.nxv1i32(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7880,6 +7991,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f64_nxv1i32( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7896,6 +8008,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f64.nxv1i16(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7909,6 +8022,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f64_nxv1i16( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7926,8 +8040,8 @@ define void @test_vsoxseg3_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg3_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7940,8 +8054,8 @@ define void @test_vsoxseg3_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7957,8 +8071,8 @@ define void @test_vsoxseg3_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7971,8 +8085,8 @@ define void @test_vsoxseg3_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7988,8 +8102,8 @@ define void @test_vsoxseg3_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8002,8 +8116,8 @@ define void @test_vsoxseg3_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8019,9 +8133,9 @@ define void @test_vsoxseg4_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg4_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8034,9 +8148,9 @@ define void @test_vsoxseg4_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8052,9 +8166,9 @@ define void @test_vsoxseg4_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8067,9 +8181,9 @@ define void @test_vsoxseg4_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8085,9 +8199,9 @@ define void @test_vsoxseg4_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8100,9 +8214,9 @@ define void @test_vsoxseg4_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8118,10 +8232,10 @@ define void @test_vsoxseg5_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg5_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8134,10 +8248,10 @@ define void @test_vsoxseg5_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8153,10 +8267,10 @@ define void @test_vsoxseg5_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg5_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8169,10 +8283,10 @@ define void @test_vsoxseg5_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8188,10 +8302,10 @@ define void @test_vsoxseg5_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg5_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8204,10 +8318,10 @@ define void @test_vsoxseg5_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8223,11 +8337,11 @@ define void @test_vsoxseg6_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg6_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8240,11 +8354,11 @@ define void @test_vsoxseg6_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8260,11 +8374,11 @@ define void @test_vsoxseg6_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg6_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8277,11 +8391,11 @@ define void @test_vsoxseg6_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8297,11 +8411,11 @@ define void @test_vsoxseg6_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg6_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8314,11 +8428,11 @@ define void @test_vsoxseg6_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8334,12 +8448,12 @@ define void @test_vsoxseg7_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg7_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8352,12 +8466,12 @@ define void @test_vsoxseg7_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8373,12 +8487,12 @@ define void @test_vsoxseg7_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg7_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8391,12 +8505,12 @@ define void @test_vsoxseg7_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8412,12 +8526,12 @@ define void @test_vsoxseg7_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg7_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8430,12 +8544,12 @@ define void @test_vsoxseg7_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8451,13 +8565,13 @@ define void @test_vsoxseg8_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg8_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8470,13 +8584,13 @@ define void @test_vsoxseg8_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8492,13 +8606,13 @@ define void @test_vsoxseg8_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg8_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8511,13 +8625,13 @@ define void @test_vsoxseg8_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8533,13 +8647,13 @@ define void @test_vsoxseg8_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg8_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8552,13 +8666,13 @@ define void @test_vsoxseg8_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8573,6 +8687,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f32.nxv2i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8586,6 +8701,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f32_nxv2i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8602,6 +8718,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f32.nxv2i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8615,6 +8732,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f32_nxv2i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8631,6 +8749,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f32.nxv2i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8644,6 +8763,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f32_nxv2i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8661,8 +8781,8 @@ define void @test_vsoxseg3_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8675,8 +8795,8 @@ define void @test_vsoxseg3_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8692,8 +8812,8 @@ define void @test_vsoxseg3_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg3_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8706,8 +8826,8 @@ define void @test_vsoxseg3_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8723,8 +8843,8 @@ define void @test_vsoxseg3_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8737,8 +8857,8 @@ define void @test_vsoxseg3_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8754,9 +8874,9 @@ define void @test_vsoxseg4_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8769,9 +8889,9 @@ define void @test_vsoxseg4_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8787,9 +8907,9 @@ define void @test_vsoxseg4_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg4_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8802,9 +8922,9 @@ define void @test_vsoxseg4_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8820,9 +8940,9 @@ define void @test_vsoxseg4_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8835,9 +8955,9 @@ define void @test_vsoxseg4_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8853,10 +8973,10 @@ define void @test_vsoxseg5_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8869,10 +8989,10 @@ define void @test_vsoxseg5_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8888,10 +9008,10 @@ define void @test_vsoxseg5_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg5_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8904,10 +9024,10 @@ define void @test_vsoxseg5_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8923,10 +9043,10 @@ define void @test_vsoxseg5_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8939,10 +9059,10 @@ define void @test_vsoxseg5_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8958,11 +9078,11 @@ define void @test_vsoxseg6_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8975,11 +9095,11 @@ define void @test_vsoxseg6_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8995,11 +9115,11 @@ define void @test_vsoxseg6_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg6_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9012,11 +9132,11 @@ define void @test_vsoxseg6_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9032,11 +9152,11 @@ define void @test_vsoxseg6_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9049,11 +9169,11 @@ define void @test_vsoxseg6_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9069,12 +9189,12 @@ define void @test_vsoxseg7_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9087,12 +9207,12 @@ define void @test_vsoxseg7_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9108,12 +9228,12 @@ define void @test_vsoxseg7_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg7_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9126,12 +9246,12 @@ define void @test_vsoxseg7_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9147,12 +9267,12 @@ define void @test_vsoxseg7_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9165,12 +9285,12 @@ define void @test_vsoxseg7_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9186,13 +9306,13 @@ define void @test_vsoxseg8_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9205,13 +9325,13 @@ define void @test_vsoxseg8_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9227,13 +9347,13 @@ define void @test_vsoxseg8_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg8_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9246,13 +9366,13 @@ define void @test_vsoxseg8_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9268,13 +9388,13 @@ define void @test_vsoxseg8_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9287,13 +9407,13 @@ define void @test_vsoxseg8_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9308,6 +9428,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f16.nxv1i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9321,6 +9442,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f16_nxv1i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9337,6 +9459,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f16.nxv1i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9350,6 +9473,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f16_nxv1i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9366,6 +9490,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f16.nxv1i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9379,6 +9504,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f16_nxv1i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9396,8 +9522,8 @@ define void @test_vsoxseg3_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9410,8 +9536,8 @@ define void @test_vsoxseg3_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9427,8 +9553,8 @@ define void @test_vsoxseg3_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9441,8 +9567,8 @@ define void @test_vsoxseg3_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9458,8 +9584,8 @@ define void @test_vsoxseg3_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9472,8 +9598,8 @@ define void @test_vsoxseg3_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9489,9 +9615,9 @@ define void @test_vsoxseg4_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9504,9 +9630,9 @@ define void @test_vsoxseg4_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9522,9 +9648,9 @@ define void @test_vsoxseg4_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9537,9 +9663,9 @@ define void @test_vsoxseg4_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9555,9 +9681,9 @@ define void @test_vsoxseg4_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9570,9 +9696,9 @@ define void @test_vsoxseg4_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9588,10 +9714,10 @@ define void @test_vsoxseg5_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9604,10 +9730,10 @@ define void @test_vsoxseg5_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9623,10 +9749,10 @@ define void @test_vsoxseg5_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9639,10 +9765,10 @@ define void @test_vsoxseg5_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9658,10 +9784,10 @@ define void @test_vsoxseg5_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9674,10 +9800,10 @@ define void @test_vsoxseg5_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9693,11 +9819,11 @@ define void @test_vsoxseg6_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9710,11 +9836,11 @@ define void @test_vsoxseg6_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9730,11 +9856,11 @@ define void @test_vsoxseg6_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9747,11 +9873,11 @@ define void @test_vsoxseg6_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9767,11 +9893,11 @@ define void @test_vsoxseg6_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9784,11 +9910,11 @@ define void @test_vsoxseg6_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9804,12 +9930,12 @@ define void @test_vsoxseg7_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9822,12 +9948,12 @@ define void @test_vsoxseg7_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9843,12 +9969,12 @@ define void @test_vsoxseg7_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9861,12 +9987,12 @@ define void @test_vsoxseg7_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9882,12 +10008,12 @@ define void @test_vsoxseg7_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9900,12 +10026,12 @@ define void @test_vsoxseg7_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9921,13 +10047,13 @@ define void @test_vsoxseg8_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9940,13 +10066,13 @@ define void @test_vsoxseg8_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9962,13 +10088,13 @@ define void @test_vsoxseg8_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9981,13 +10107,13 @@ define void @test_vsoxseg8_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10003,13 +10129,13 @@ define void @test_vsoxseg8_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10022,13 +10148,13 @@ define void @test_vsoxseg8_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10043,6 +10169,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f32.nxv1i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10056,6 +10183,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f32_nxv1i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10072,6 +10200,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f32.nxv1i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10085,6 +10214,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f32_nxv1i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10101,6 +10231,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f32.nxv1i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10114,6 +10245,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f32_nxv1i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10131,8 +10263,8 @@ define void @test_vsoxseg3_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg3_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10145,8 +10277,8 @@ define void @test_vsoxseg3_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10162,8 +10294,8 @@ define void @test_vsoxseg3_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10176,8 +10308,8 @@ define void @test_vsoxseg3_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10193,8 +10325,8 @@ define void @test_vsoxseg3_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10207,8 +10339,8 @@ define void @test_vsoxseg3_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10224,9 +10356,9 @@ define void @test_vsoxseg4_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg4_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10239,9 +10371,9 @@ define void @test_vsoxseg4_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10257,9 +10389,9 @@ define void @test_vsoxseg4_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10272,9 +10404,9 @@ define void @test_vsoxseg4_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10290,9 +10422,9 @@ define void @test_vsoxseg4_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10305,9 +10437,9 @@ define void @test_vsoxseg4_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10323,10 +10455,10 @@ define void @test_vsoxseg5_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg5_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10339,10 +10471,10 @@ define void @test_vsoxseg5_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10358,10 +10490,10 @@ define void @test_vsoxseg5_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10374,10 +10506,10 @@ define void @test_vsoxseg5_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10393,10 +10525,10 @@ define void @test_vsoxseg5_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10409,10 +10541,10 @@ define void @test_vsoxseg5_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10428,11 +10560,11 @@ define void @test_vsoxseg6_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg6_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10445,11 +10577,11 @@ define void @test_vsoxseg6_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10465,11 +10597,11 @@ define void @test_vsoxseg6_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10482,11 +10614,11 @@ define void @test_vsoxseg6_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10502,11 +10634,11 @@ define void @test_vsoxseg6_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10519,11 +10651,11 @@ define void @test_vsoxseg6_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10539,12 +10671,12 @@ define void @test_vsoxseg7_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg7_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10557,12 +10689,12 @@ define void @test_vsoxseg7_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10578,12 +10710,12 @@ define void @test_vsoxseg7_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10596,12 +10728,12 @@ define void @test_vsoxseg7_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10617,12 +10749,12 @@ define void @test_vsoxseg7_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10635,12 +10767,12 @@ define void @test_vsoxseg7_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10656,13 +10788,13 @@ define void @test_vsoxseg8_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg8_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10675,13 +10807,13 @@ define void @test_vsoxseg8_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10697,13 +10829,13 @@ define void @test_vsoxseg8_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10716,13 +10848,13 @@ define void @test_vsoxseg8_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10738,13 +10870,13 @@ define void @test_vsoxseg8_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10757,13 +10889,13 @@ define void @test_vsoxseg8_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10778,6 +10910,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f16.nxv8i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10791,6 +10924,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f16_nxv8i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10807,6 +10941,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f16.nxv8i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10820,6 +10955,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f16_nxv8i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10836,6 +10972,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12 @@ -10848,6 +10985,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12, v0.t @@ -10864,8 +11002,8 @@ define void @test_vsoxseg3_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10878,8 +11016,8 @@ define void @test_vsoxseg3_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10895,8 +11033,8 @@ define void @test_vsoxseg3_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10909,8 +11047,8 @@ define void @test_vsoxseg3_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10925,11 +11063,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, i32 %vl) @@ -10939,11 +11077,11 @@ entry: define void @test_vsoxseg3_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl) @@ -10957,9 +11095,9 @@ define void @test_vsoxseg4_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10972,9 +11110,9 @@ define void @test_vsoxseg4_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10990,9 +11128,9 @@ define void @test_vsoxseg4_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg4_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11005,9 +11143,9 @@ define void @test_vsoxseg4_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11023,9 +11161,9 @@ define void @test_vsoxseg4_nxv8f16_nxv8i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -11038,9 +11176,9 @@ define void @test_vsoxseg4_mask_nxv8f16_nxv8i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -11055,6 +11193,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f32.nxv8i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11068,6 +11207,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f32_nxv8i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11084,6 +11224,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f32.nxv8i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11097,6 +11238,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f32_nxv8i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11113,6 +11255,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f32.nxv8i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11126,6 +11269,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f32_nxv8i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11142,6 +11286,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f64.nxv2i32(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11155,6 +11300,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f64_nxv2i32( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11171,6 +11317,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f64.nxv2i8(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11184,6 +11331,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f64_nxv2i8( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11200,6 +11348,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f64.nxv2i16(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11213,6 +11362,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f64_nxv2i16( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11230,8 +11380,8 @@ define void @test_vsoxseg3_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11244,8 +11394,8 @@ define void @test_vsoxseg3_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11261,8 +11411,8 @@ define void @test_vsoxseg3_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg3_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11275,8 +11425,8 @@ define void @test_vsoxseg3_mask_nxv2f64_nxv2i8( %val, doubl ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11292,8 +11442,8 @@ define void @test_vsoxseg3_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11306,8 +11456,8 @@ define void @test_vsoxseg3_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11323,9 +11473,9 @@ define void @test_vsoxseg4_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11338,9 +11488,9 @@ define void @test_vsoxseg4_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11356,9 +11506,9 @@ define void @test_vsoxseg4_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg4_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11371,9 +11521,9 @@ define void @test_vsoxseg4_mask_nxv2f64_nxv2i8( %val, doubl ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11389,9 +11539,9 @@ define void @test_vsoxseg4_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11404,9 +11554,9 @@ define void @test_vsoxseg4_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11421,6 +11571,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f16.nxv4i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11434,6 +11585,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f16_nxv4i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11450,6 +11602,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f16.nxv4i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11463,6 +11616,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f16_nxv4i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11479,6 +11633,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10 @@ -11491,6 +11646,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10, v0.t @@ -11507,8 +11663,8 @@ define void @test_vsoxseg3_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11521,8 +11677,8 @@ define void @test_vsoxseg3_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11538,8 +11694,8 @@ define void @test_vsoxseg3_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11552,8 +11708,8 @@ define void @test_vsoxseg3_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11568,11 +11724,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, i32 %vl) @@ -11582,11 +11738,11 @@ entry: define void @test_vsoxseg3_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl) @@ -11600,9 +11756,9 @@ define void @test_vsoxseg4_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11615,9 +11771,9 @@ define void @test_vsoxseg4_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11633,9 +11789,9 @@ define void @test_vsoxseg4_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11648,9 +11804,9 @@ define void @test_vsoxseg4_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11666,9 +11822,9 @@ define void @test_vsoxseg4_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11681,9 +11837,9 @@ define void @test_vsoxseg4_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11699,10 +11855,10 @@ define void @test_vsoxseg5_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11715,10 +11871,10 @@ define void @test_vsoxseg5_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11734,10 +11890,10 @@ define void @test_vsoxseg5_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg5_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11750,10 +11906,10 @@ define void @test_vsoxseg5_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg5_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11769,10 +11925,10 @@ define void @test_vsoxseg5_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11785,10 +11941,10 @@ define void @test_vsoxseg5_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11804,11 +11960,11 @@ define void @test_vsoxseg6_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11821,11 +11977,11 @@ define void @test_vsoxseg6_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11841,11 +11997,11 @@ define void @test_vsoxseg6_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg6_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11858,11 +12014,11 @@ define void @test_vsoxseg6_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg6_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11878,11 +12034,11 @@ define void @test_vsoxseg6_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11895,11 +12051,11 @@ define void @test_vsoxseg6_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11915,12 +12071,12 @@ define void @test_vsoxseg7_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11933,12 +12089,12 @@ define void @test_vsoxseg7_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11954,12 +12110,12 @@ define void @test_vsoxseg7_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg7_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11972,12 +12128,12 @@ define void @test_vsoxseg7_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg7_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11993,12 +12149,12 @@ define void @test_vsoxseg7_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -12011,12 +12167,12 @@ define void @test_vsoxseg7_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -12032,13 +12188,13 @@ define void @test_vsoxseg8_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12051,13 +12207,13 @@ define void @test_vsoxseg8_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12073,13 +12229,13 @@ define void @test_vsoxseg8_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg8_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12092,13 +12248,13 @@ define void @test_vsoxseg8_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg8_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12114,13 +12270,13 @@ define void @test_vsoxseg8_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -12133,13 +12289,13 @@ define void @test_vsoxseg8_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -12154,6 +12310,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f16.nxv2i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12167,6 +12324,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f16_nxv2i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12183,6 +12341,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f16.nxv2i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12196,6 +12355,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f16_nxv2i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12212,6 +12372,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f16.nxv2i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12225,6 +12386,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f16_nxv2i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12242,8 +12404,8 @@ define void @test_vsoxseg3_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12256,8 +12418,8 @@ define void @test_vsoxseg3_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12273,8 +12435,8 @@ define void @test_vsoxseg3_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12287,8 +12449,8 @@ define void @test_vsoxseg3_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12304,8 +12466,8 @@ define void @test_vsoxseg3_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12318,8 +12480,8 @@ define void @test_vsoxseg3_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12335,9 +12497,9 @@ define void @test_vsoxseg4_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12350,9 +12512,9 @@ define void @test_vsoxseg4_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12368,9 +12530,9 @@ define void @test_vsoxseg4_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12383,9 +12545,9 @@ define void @test_vsoxseg4_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12401,9 +12563,9 @@ define void @test_vsoxseg4_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12416,9 +12578,9 @@ define void @test_vsoxseg4_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12434,10 +12596,10 @@ define void @test_vsoxseg5_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12450,10 +12612,10 @@ define void @test_vsoxseg5_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12469,10 +12631,10 @@ define void @test_vsoxseg5_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12485,10 +12647,10 @@ define void @test_vsoxseg5_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12504,10 +12666,10 @@ define void @test_vsoxseg5_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12520,10 +12682,10 @@ define void @test_vsoxseg5_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12539,11 +12701,11 @@ define void @test_vsoxseg6_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12556,11 +12718,11 @@ define void @test_vsoxseg6_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12576,11 +12738,11 @@ define void @test_vsoxseg6_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12593,11 +12755,11 @@ define void @test_vsoxseg6_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12613,11 +12775,11 @@ define void @test_vsoxseg6_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12630,11 +12792,11 @@ define void @test_vsoxseg6_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12650,12 +12812,12 @@ define void @test_vsoxseg7_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12668,12 +12830,12 @@ define void @test_vsoxseg7_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12689,12 +12851,12 @@ define void @test_vsoxseg7_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12707,12 +12869,12 @@ define void @test_vsoxseg7_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12728,12 +12890,12 @@ define void @test_vsoxseg7_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12746,12 +12908,12 @@ define void @test_vsoxseg7_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12767,13 +12929,13 @@ define void @test_vsoxseg8_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12786,13 +12948,13 @@ define void @test_vsoxseg8_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12808,13 +12970,13 @@ define void @test_vsoxseg8_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12827,13 +12989,13 @@ define void @test_vsoxseg8_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12849,13 +13011,13 @@ define void @test_vsoxseg8_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12868,13 +13030,13 @@ define void @test_vsoxseg8_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12889,6 +13051,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f32.nxv4i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12902,6 +13065,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f32_nxv4i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12918,6 +13082,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f32.nxv4i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12931,6 +13096,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f32_nxv4i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12947,6 +13113,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f32.nxv4i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12960,6 +13127,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f32_nxv4i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12977,8 +13145,8 @@ define void @test_vsoxseg3_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -12991,8 +13159,8 @@ define void @test_vsoxseg3_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13008,8 +13176,8 @@ define void @test_vsoxseg3_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg3_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13022,8 +13190,8 @@ define void @test_vsoxseg3_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13039,8 +13207,8 @@ define void @test_vsoxseg3_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13053,8 +13221,8 @@ define void @test_vsoxseg3_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13070,9 +13238,9 @@ define void @test_vsoxseg4_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13085,9 +13253,9 @@ define void @test_vsoxseg4_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13103,9 +13271,9 @@ define void @test_vsoxseg4_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg4_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13118,9 +13286,9 @@ define void @test_vsoxseg4_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13136,9 +13304,9 @@ define void @test_vsoxseg4_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13151,9 +13319,9 @@ define void @test_vsoxseg4_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret diff --git a/llvm/test/CodeGen/RISCV/rvv/vsoxseg-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vsoxseg-rv64.ll index c0a753abbc134..c07b5427ce2bb 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsoxseg-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsoxseg-rv64.ll @@ -8,6 +8,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -21,6 +22,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i16_nxv16i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -37,6 +39,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -50,6 +53,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i16_nxv16i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -66,6 +70,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16 @@ -78,6 +83,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i16_nxv16i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16, v0.t @@ -93,6 +99,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i32.nxv4i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -106,6 +113,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -122,6 +130,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i32.nxv4i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -135,6 +144,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -151,6 +161,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i32.nxv4i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12 @@ -163,6 +174,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i32_nxv4i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12, v0.t @@ -178,6 +190,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i32.nxv4i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -191,6 +204,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -208,8 +222,8 @@ define void @test_vsoxseg3_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -222,8 +236,8 @@ define void @test_vsoxseg3_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -239,8 +253,8 @@ define void @test_vsoxseg3_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -253,8 +267,8 @@ define void @test_vsoxseg3_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -269,11 +283,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4i32.nxv4i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4i32.nxv4i64( %val, %val, %val, i32* %base, %index, i64 %vl) @@ -283,11 +297,11 @@ entry: define void @test_vsoxseg3_mask_nxv4i32_nxv4i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4i32.nxv4i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl) @@ -301,8 +315,8 @@ define void @test_vsoxseg3_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -315,8 +329,8 @@ define void @test_vsoxseg3_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -332,9 +346,9 @@ define void @test_vsoxseg4_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -347,9 +361,9 @@ define void @test_vsoxseg4_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -365,9 +379,9 @@ define void @test_vsoxseg4_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -380,9 +394,9 @@ define void @test_vsoxseg4_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -398,9 +412,9 @@ define void @test_vsoxseg4_nxv4i32_nxv4i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -413,9 +427,9 @@ define void @test_vsoxseg4_mask_nxv4i32_nxv4i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -431,9 +445,9 @@ define void @test_vsoxseg4_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -446,9 +460,9 @@ define void @test_vsoxseg4_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -463,6 +477,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v12 @@ -475,6 +490,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v12, v0.t @@ -490,6 +506,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i8.nxv16i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -503,6 +520,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -519,6 +537,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16 @@ -531,6 +550,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16, v0.t @@ -546,11 +566,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei16.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -560,11 +580,11 @@ entry: define void @test_vsoxseg3_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -578,8 +598,8 @@ define void @test_vsoxseg3_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -592,8 +612,8 @@ define void @test_vsoxseg3_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -608,6 +628,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -621,6 +642,7 @@ entry: define void @test_vsoxseg3_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -638,9 +660,9 @@ define void @test_vsoxseg4_nxv16i8_nxv16i16( %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -653,9 +675,9 @@ define void @test_vsoxseg4_mask_nxv16i8_nxv16i16( %val, i8* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -671,9 +693,9 @@ define void @test_vsoxseg4_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -686,9 +708,9 @@ define void @test_vsoxseg4_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -703,6 +725,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -717,6 +740,7 @@ entry: define void @test_vsoxseg4_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -734,6 +758,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i64.nxv1i64(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -747,6 +772,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i64_nxv1i64( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -763,6 +789,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i64.nxv1i32(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -776,6 +803,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i64_nxv1i32( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -792,6 +820,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i64.nxv1i16(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -805,6 +834,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i64_nxv1i16( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -821,6 +851,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i64.nxv1i8(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -834,6 +865,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i64_nxv1i8( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -851,8 +883,8 @@ define void @test_vsoxseg3_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -865,8 +897,8 @@ define void @test_vsoxseg3_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -882,8 +914,8 @@ define void @test_vsoxseg3_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -896,8 +928,8 @@ define void @test_vsoxseg3_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -913,8 +945,8 @@ define void @test_vsoxseg3_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -927,8 +959,8 @@ define void @test_vsoxseg3_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -944,8 +976,8 @@ define void @test_vsoxseg3_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -958,8 +990,8 @@ define void @test_vsoxseg3_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -975,9 +1007,9 @@ define void @test_vsoxseg4_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -990,9 +1022,9 @@ define void @test_vsoxseg4_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1008,9 +1040,9 @@ define void @test_vsoxseg4_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1023,9 +1055,9 @@ define void @test_vsoxseg4_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1041,9 +1073,9 @@ define void @test_vsoxseg4_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1056,9 +1088,9 @@ define void @test_vsoxseg4_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1074,9 +1106,9 @@ define void @test_vsoxseg4_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1089,9 +1121,9 @@ define void @test_vsoxseg4_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1107,10 +1139,10 @@ define void @test_vsoxseg5_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1123,10 +1155,10 @@ define void @test_vsoxseg5_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1142,10 +1174,10 @@ define void @test_vsoxseg5_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1158,10 +1190,10 @@ define void @test_vsoxseg5_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1177,10 +1209,10 @@ define void @test_vsoxseg5_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1193,10 +1225,10 @@ define void @test_vsoxseg5_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1212,10 +1244,10 @@ define void @test_vsoxseg5_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1228,10 +1260,10 @@ define void @test_vsoxseg5_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1247,11 +1279,11 @@ define void @test_vsoxseg6_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1264,11 +1296,11 @@ define void @test_vsoxseg6_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1284,11 +1316,11 @@ define void @test_vsoxseg6_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1301,11 +1333,11 @@ define void @test_vsoxseg6_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1321,11 +1353,11 @@ define void @test_vsoxseg6_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1338,11 +1370,11 @@ define void @test_vsoxseg6_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1358,11 +1390,11 @@ define void @test_vsoxseg6_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1375,11 +1407,11 @@ define void @test_vsoxseg6_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1395,12 +1427,12 @@ define void @test_vsoxseg7_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1413,12 +1445,12 @@ define void @test_vsoxseg7_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1434,12 +1466,12 @@ define void @test_vsoxseg7_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1452,12 +1484,12 @@ define void @test_vsoxseg7_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1473,12 +1505,12 @@ define void @test_vsoxseg7_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1491,12 +1523,12 @@ define void @test_vsoxseg7_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1512,12 +1544,12 @@ define void @test_vsoxseg7_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1530,12 +1562,12 @@ define void @test_vsoxseg7_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1551,13 +1583,13 @@ define void @test_vsoxseg8_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1570,13 +1602,13 @@ define void @test_vsoxseg8_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1592,13 +1624,13 @@ define void @test_vsoxseg8_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1611,13 +1643,13 @@ define void @test_vsoxseg8_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1633,13 +1665,13 @@ define void @test_vsoxseg8_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1652,13 +1684,13 @@ define void @test_vsoxseg8_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1674,13 +1706,13 @@ define void @test_vsoxseg8_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1693,13 +1725,13 @@ define void @test_vsoxseg8_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1714,6 +1746,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i32.nxv1i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1727,6 +1760,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i32_nxv1i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1743,6 +1777,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i32.nxv1i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1756,6 +1791,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1772,6 +1808,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i32.nxv1i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1785,6 +1822,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1801,6 +1839,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i32.nxv1i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1814,6 +1853,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1831,8 +1871,8 @@ define void @test_vsoxseg3_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1845,8 +1885,8 @@ define void @test_vsoxseg3_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1862,8 +1902,8 @@ define void @test_vsoxseg3_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1876,8 +1916,8 @@ define void @test_vsoxseg3_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1893,8 +1933,8 @@ define void @test_vsoxseg3_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1907,8 +1947,8 @@ define void @test_vsoxseg3_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1924,8 +1964,8 @@ define void @test_vsoxseg3_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1938,8 +1978,8 @@ define void @test_vsoxseg3_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1955,9 +1995,9 @@ define void @test_vsoxseg4_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1970,9 +2010,9 @@ define void @test_vsoxseg4_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1988,9 +2028,9 @@ define void @test_vsoxseg4_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2003,9 +2043,9 @@ define void @test_vsoxseg4_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2021,9 +2061,9 @@ define void @test_vsoxseg4_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2036,9 +2076,9 @@ define void @test_vsoxseg4_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2054,9 +2094,9 @@ define void @test_vsoxseg4_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2069,9 +2109,9 @@ define void @test_vsoxseg4_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2087,10 +2127,10 @@ define void @test_vsoxseg5_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2103,10 +2143,10 @@ define void @test_vsoxseg5_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2122,10 +2162,10 @@ define void @test_vsoxseg5_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2138,10 +2178,10 @@ define void @test_vsoxseg5_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2157,10 +2197,10 @@ define void @test_vsoxseg5_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2173,10 +2213,10 @@ define void @test_vsoxseg5_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2192,10 +2232,10 @@ define void @test_vsoxseg5_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2208,10 +2248,10 @@ define void @test_vsoxseg5_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2227,11 +2267,11 @@ define void @test_vsoxseg6_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2244,11 +2284,11 @@ define void @test_vsoxseg6_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2264,11 +2304,11 @@ define void @test_vsoxseg6_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2281,11 +2321,11 @@ define void @test_vsoxseg6_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2301,11 +2341,11 @@ define void @test_vsoxseg6_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2318,11 +2358,11 @@ define void @test_vsoxseg6_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2338,11 +2378,11 @@ define void @test_vsoxseg6_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2355,11 +2395,11 @@ define void @test_vsoxseg6_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2375,12 +2415,12 @@ define void @test_vsoxseg7_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2393,12 +2433,12 @@ define void @test_vsoxseg7_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2414,12 +2454,12 @@ define void @test_vsoxseg7_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2432,12 +2472,12 @@ define void @test_vsoxseg7_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2453,12 +2493,12 @@ define void @test_vsoxseg7_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2471,12 +2511,12 @@ define void @test_vsoxseg7_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2492,12 +2532,12 @@ define void @test_vsoxseg7_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2510,12 +2550,12 @@ define void @test_vsoxseg7_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2531,13 +2571,13 @@ define void @test_vsoxseg8_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2550,13 +2590,13 @@ define void @test_vsoxseg8_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2572,13 +2612,13 @@ define void @test_vsoxseg8_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2591,13 +2631,13 @@ define void @test_vsoxseg8_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2613,13 +2653,13 @@ define void @test_vsoxseg8_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2632,13 +2672,13 @@ define void @test_vsoxseg8_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2654,13 +2694,13 @@ define void @test_vsoxseg8_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2673,13 +2713,13 @@ define void @test_vsoxseg8_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2694,6 +2734,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i16.nxv8i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2707,6 +2748,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2723,6 +2765,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i16.nxv8i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2736,6 +2779,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2752,6 +2796,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i16.nxv8i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16 @@ -2764,6 +2809,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i16_nxv8i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16, v0.t @@ -2779,6 +2825,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12 @@ -2791,6 +2838,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12, v0.t @@ -2807,8 +2855,8 @@ define void @test_vsoxseg3_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2821,8 +2869,8 @@ define void @test_vsoxseg3_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2838,8 +2886,8 @@ define void @test_vsoxseg3_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2852,8 +2900,8 @@ define void @test_vsoxseg3_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2868,6 +2916,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i16.nxv8i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2881,6 +2930,7 @@ entry: define void @test_vsoxseg3_mask_nxv8i16_nxv8i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2897,11 +2947,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, i64 %vl) @@ -2911,11 +2961,11 @@ entry: define void @test_vsoxseg3_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -2929,9 +2979,9 @@ define void @test_vsoxseg4_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2944,9 +2994,9 @@ define void @test_vsoxseg4_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2962,9 +3012,9 @@ define void @test_vsoxseg4_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2977,9 +3027,9 @@ define void @test_vsoxseg4_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2994,6 +3044,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv8i16.nxv8i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3008,6 +3059,7 @@ entry: define void @test_vsoxseg4_mask_nxv8i16_nxv8i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3026,9 +3078,9 @@ define void @test_vsoxseg4_nxv8i16_nxv8i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -3041,9 +3093,9 @@ define void @test_vsoxseg4_mask_nxv8i16_nxv8i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3058,6 +3110,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10 @@ -3070,6 +3123,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10, v0.t @@ -3085,6 +3139,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i8.nxv4i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3098,6 +3153,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3114,6 +3170,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12 @@ -3126,6 +3183,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12, v0.t @@ -3141,6 +3199,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i8.nxv4i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3154,6 +3213,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3170,11 +3230,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -3184,11 +3244,11 @@ entry: define void @test_vsoxseg3_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -3202,8 +3262,8 @@ define void @test_vsoxseg3_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3232,6 +3292,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3245,6 +3306,7 @@ entry: define void @test_vsoxseg3_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3262,8 +3324,8 @@ define void @test_vsoxseg3_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3293,9 +3355,9 @@ define void @test_vsoxseg4_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3326,9 +3388,9 @@ define void @test_vsoxseg4_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3358,6 +3420,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3372,6 +3435,7 @@ entry: define void @test_vsoxseg4_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3390,9 +3454,9 @@ define void @test_vsoxseg4_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3423,10 +3487,10 @@ define void @test_vsoxseg5_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3458,10 +3522,10 @@ define void @test_vsoxseg5_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3492,13 +3556,13 @@ declare void @llvm.riscv.vsoxseg5.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsoxseg5ei64.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg5ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, i8* %base, %index, i64 %vl) @@ -3508,13 +3572,13 @@ entry: define void @test_vsoxseg5_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsoxseg5ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg5ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -3528,10 +3592,10 @@ define void @test_vsoxseg5_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3563,11 +3627,11 @@ define void @test_vsoxseg6_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3600,11 +3664,11 @@ define void @test_vsoxseg6_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3637,11 +3701,11 @@ define void @test_vsoxseg6_nxv4i8_nxv4i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3674,11 +3738,11 @@ define void @test_vsoxseg6_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3711,12 +3775,12 @@ define void @test_vsoxseg7_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3750,12 +3814,12 @@ define void @test_vsoxseg7_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3789,12 +3853,12 @@ define void @test_vsoxseg7_nxv4i8_nxv4i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3828,12 +3892,12 @@ define void @test_vsoxseg7_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3867,13 +3931,13 @@ define void @test_vsoxseg8_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3908,13 +3972,13 @@ define void @test_vsoxseg8_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3949,13 +4013,13 @@ define void @test_vsoxseg8_nxv4i8_nxv4i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3990,13 +4054,13 @@ define void @test_vsoxseg8_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4030,6 +4094,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i16.nxv1i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4043,6 +4108,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i16_nxv1i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4059,6 +4125,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i16.nxv1i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4072,6 +4139,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4088,6 +4156,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i16.nxv1i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4101,6 +4170,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4117,6 +4187,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i16.nxv1i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4130,6 +4201,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4147,8 +4219,8 @@ define void @test_vsoxseg3_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4161,8 +4233,8 @@ define void @test_vsoxseg3_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4178,8 +4250,8 @@ define void @test_vsoxseg3_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4192,8 +4264,8 @@ define void @test_vsoxseg3_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4209,8 +4281,8 @@ define void @test_vsoxseg3_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4223,8 +4295,8 @@ define void @test_vsoxseg3_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4240,8 +4312,8 @@ define void @test_vsoxseg3_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4254,8 +4326,8 @@ define void @test_vsoxseg3_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4271,9 +4343,9 @@ define void @test_vsoxseg4_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4286,9 +4358,9 @@ define void @test_vsoxseg4_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4304,9 +4376,9 @@ define void @test_vsoxseg4_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4319,9 +4391,9 @@ define void @test_vsoxseg4_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4337,9 +4409,9 @@ define void @test_vsoxseg4_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4352,9 +4424,9 @@ define void @test_vsoxseg4_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4370,9 +4442,9 @@ define void @test_vsoxseg4_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4385,9 +4457,9 @@ define void @test_vsoxseg4_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4403,10 +4475,10 @@ define void @test_vsoxseg5_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4419,10 +4491,10 @@ define void @test_vsoxseg5_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4438,10 +4510,10 @@ define void @test_vsoxseg5_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4454,10 +4526,10 @@ define void @test_vsoxseg5_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4473,10 +4545,10 @@ define void @test_vsoxseg5_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4489,10 +4561,10 @@ define void @test_vsoxseg5_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4508,10 +4580,10 @@ define void @test_vsoxseg5_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4524,10 +4596,10 @@ define void @test_vsoxseg5_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4543,11 +4615,11 @@ define void @test_vsoxseg6_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4560,11 +4632,11 @@ define void @test_vsoxseg6_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4580,11 +4652,11 @@ define void @test_vsoxseg6_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4597,11 +4669,11 @@ define void @test_vsoxseg6_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4617,11 +4689,11 @@ define void @test_vsoxseg6_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4634,11 +4706,11 @@ define void @test_vsoxseg6_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4654,11 +4726,11 @@ define void @test_vsoxseg6_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4671,11 +4743,11 @@ define void @test_vsoxseg6_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4691,12 +4763,12 @@ define void @test_vsoxseg7_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4709,12 +4781,12 @@ define void @test_vsoxseg7_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4730,12 +4802,12 @@ define void @test_vsoxseg7_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4748,12 +4820,12 @@ define void @test_vsoxseg7_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4769,12 +4841,12 @@ define void @test_vsoxseg7_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4787,12 +4859,12 @@ define void @test_vsoxseg7_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4808,12 +4880,12 @@ define void @test_vsoxseg7_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4826,12 +4898,12 @@ define void @test_vsoxseg7_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4847,13 +4919,13 @@ define void @test_vsoxseg8_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4866,13 +4938,13 @@ define void @test_vsoxseg8_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4888,13 +4960,13 @@ define void @test_vsoxseg8_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4907,13 +4979,13 @@ define void @test_vsoxseg8_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4929,13 +5001,13 @@ define void @test_vsoxseg8_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4948,13 +5020,13 @@ define void @test_vsoxseg8_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4970,13 +5042,13 @@ define void @test_vsoxseg8_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4989,13 +5061,13 @@ define void @test_vsoxseg8_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5010,6 +5082,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i32.nxv2i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5023,6 +5096,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5039,6 +5113,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i32.nxv2i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5052,6 +5127,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5068,6 +5144,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i32.nxv2i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5081,6 +5158,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5097,6 +5175,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i32.nxv2i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10 @@ -5109,6 +5188,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i32_nxv2i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10, v0.t @@ -5125,8 +5205,8 @@ define void @test_vsoxseg3_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5139,8 +5219,8 @@ define void @test_vsoxseg3_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5156,8 +5236,8 @@ define void @test_vsoxseg3_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5170,8 +5250,8 @@ define void @test_vsoxseg3_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5187,8 +5267,8 @@ define void @test_vsoxseg3_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5201,8 +5281,8 @@ define void @test_vsoxseg3_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5217,11 +5297,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv2i32.nxv2i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv2i32.nxv2i64( %val, %val, %val, i32* %base, %index, i64 %vl) @@ -5231,11 +5311,11 @@ entry: define void @test_vsoxseg3_mask_nxv2i32_nxv2i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv2i32.nxv2i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl) @@ -5249,9 +5329,9 @@ define void @test_vsoxseg4_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5264,9 +5344,9 @@ define void @test_vsoxseg4_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5282,9 +5362,9 @@ define void @test_vsoxseg4_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5297,9 +5377,9 @@ define void @test_vsoxseg4_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5315,9 +5395,9 @@ define void @test_vsoxseg4_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5330,9 +5410,9 @@ define void @test_vsoxseg4_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5348,9 +5428,9 @@ define void @test_vsoxseg4_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5363,9 +5443,9 @@ define void @test_vsoxseg4_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5381,10 +5461,10 @@ define void @test_vsoxseg5_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5397,10 +5477,10 @@ define void @test_vsoxseg5_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5416,10 +5496,10 @@ define void @test_vsoxseg5_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5432,10 +5512,10 @@ define void @test_vsoxseg5_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5451,10 +5531,10 @@ define void @test_vsoxseg5_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5467,10 +5547,10 @@ define void @test_vsoxseg5_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5486,10 +5566,10 @@ define void @test_vsoxseg5_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5502,10 +5582,10 @@ define void @test_vsoxseg5_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5521,11 +5601,11 @@ define void @test_vsoxseg6_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5538,11 +5618,11 @@ define void @test_vsoxseg6_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5558,11 +5638,11 @@ define void @test_vsoxseg6_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5575,11 +5655,11 @@ define void @test_vsoxseg6_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5595,11 +5675,11 @@ define void @test_vsoxseg6_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5612,11 +5692,11 @@ define void @test_vsoxseg6_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5632,11 +5712,11 @@ define void @test_vsoxseg6_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5649,11 +5729,11 @@ define void @test_vsoxseg6_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5669,12 +5749,12 @@ define void @test_vsoxseg7_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5687,12 +5767,12 @@ define void @test_vsoxseg7_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5708,12 +5788,12 @@ define void @test_vsoxseg7_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5726,12 +5806,12 @@ define void @test_vsoxseg7_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5747,12 +5827,12 @@ define void @test_vsoxseg7_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5765,12 +5845,12 @@ define void @test_vsoxseg7_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5786,12 +5866,12 @@ define void @test_vsoxseg7_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5804,12 +5884,12 @@ define void @test_vsoxseg7_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5825,13 +5905,13 @@ define void @test_vsoxseg8_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5844,13 +5924,13 @@ define void @test_vsoxseg8_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5866,13 +5946,13 @@ define void @test_vsoxseg8_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5885,13 +5965,13 @@ define void @test_vsoxseg8_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5907,13 +5987,13 @@ define void @test_vsoxseg8_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5926,13 +6006,13 @@ define void @test_vsoxseg8_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5948,13 +6028,13 @@ define void @test_vsoxseg8_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5967,13 +6047,13 @@ define void @test_vsoxseg8_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5988,6 +6068,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v10 @@ -6000,6 +6081,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v10, v0.t @@ -6015,6 +6097,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i8.nxv8i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6028,6 +6111,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6044,6 +6128,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16 @@ -6056,6 +6141,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16, v0.t @@ -6071,6 +6157,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12 @@ -6083,6 +6170,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12, v0.t @@ -6098,11 +6186,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -6112,11 +6200,11 @@ entry: define void @test_vsoxseg3_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei16.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -6130,8 +6218,8 @@ define void @test_vsoxseg3_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6160,6 +6248,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6173,6 +6262,7 @@ entry: define void @test_vsoxseg3_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6189,6 +6279,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6202,6 +6293,7 @@ entry: define void @test_vsoxseg3_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6219,9 +6311,9 @@ define void @test_vsoxseg4_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6252,9 +6344,9 @@ define void @test_vsoxseg4_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6284,6 +6376,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6298,6 +6391,7 @@ entry: define void @test_vsoxseg4_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6315,6 +6409,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6329,6 +6424,7 @@ entry: define void @test_vsoxseg4_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6347,10 +6443,10 @@ define void @test_vsoxseg5_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6382,10 +6478,10 @@ define void @test_vsoxseg5_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6416,6 +6512,7 @@ declare void @llvm.riscv.vsoxseg5.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6431,6 +6528,7 @@ entry: define void @test_vsoxseg5_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6449,13 +6547,13 @@ declare void @llvm.riscv.vsoxseg5.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei32.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg5ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, i64 %vl) @@ -6465,13 +6563,13 @@ entry: define void @test_vsoxseg5_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg5ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -6485,11 +6583,11 @@ define void @test_vsoxseg6_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6522,11 +6620,11 @@ define void @test_vsoxseg6_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6558,6 +6656,7 @@ declare void @llvm.riscv.vsoxseg6.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg6_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6574,6 +6673,7 @@ entry: define void @test_vsoxseg6_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg6_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6594,11 +6694,11 @@ define void @test_vsoxseg6_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -6631,12 +6731,12 @@ define void @test_vsoxseg7_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6670,12 +6770,12 @@ define void @test_vsoxseg7_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6708,6 +6808,7 @@ declare void @llvm.riscv.vsoxseg7.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg7_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6725,6 +6826,7 @@ entry: define void @test_vsoxseg7_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg7_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6746,12 +6848,12 @@ define void @test_vsoxseg7_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -6785,13 +6887,13 @@ define void @test_vsoxseg8_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6826,13 +6928,13 @@ define void @test_vsoxseg8_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6866,6 +6968,7 @@ declare void @llvm.riscv.vsoxseg8.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg8_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6884,6 +6987,7 @@ entry: define void @test_vsoxseg8_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg8_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6906,13 +7010,13 @@ define void @test_vsoxseg8_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -6946,6 +7050,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i64.nxv4i32(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -6959,6 +7064,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i64_nxv4i32( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -6975,6 +7081,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i64.nxv4i8(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -6988,6 +7095,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i64_nxv4i8( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7004,6 +7112,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i64.nxv4i64(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7017,6 +7126,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i64_nxv4i64( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7033,6 +7143,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i64.nxv4i16(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7046,6 +7157,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i64_nxv4i16( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7062,6 +7174,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10 @@ -7074,6 +7187,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10, v0.t @@ -7089,6 +7203,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i16.nxv4i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7102,6 +7217,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7118,6 +7234,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12 @@ -7130,6 +7247,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12, v0.t @@ -7145,6 +7263,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4i16.nxv4i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7158,6 +7277,7 @@ entry: define void @test_vsoxseg2_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7174,11 +7294,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, i64 %vl) @@ -7188,11 +7308,11 @@ entry: define void @test_vsoxseg3_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -7206,8 +7326,8 @@ define void @test_vsoxseg3_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7220,8 +7340,8 @@ define void @test_vsoxseg3_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7236,6 +7356,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7249,6 +7370,7 @@ entry: define void @test_vsoxseg3_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7266,8 +7388,8 @@ define void @test_vsoxseg3_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7280,8 +7402,8 @@ define void @test_vsoxseg3_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7297,9 +7419,9 @@ define void @test_vsoxseg4_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7312,9 +7434,9 @@ define void @test_vsoxseg4_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7330,9 +7452,9 @@ define void @test_vsoxseg4_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7345,9 +7467,9 @@ define void @test_vsoxseg4_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7362,6 +7484,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -7376,6 +7499,7 @@ entry: define void @test_vsoxseg4_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -7394,9 +7518,9 @@ define void @test_vsoxseg4_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7409,9 +7533,9 @@ define void @test_vsoxseg4_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7427,10 +7551,10 @@ define void @test_vsoxseg5_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7443,10 +7567,10 @@ define void @test_vsoxseg5_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7462,10 +7586,10 @@ define void @test_vsoxseg5_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7478,10 +7602,10 @@ define void @test_vsoxseg5_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7496,13 +7620,13 @@ declare void @llvm.riscv.vsoxseg5.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei64.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg5ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, i16* %base, %index, i64 %vl) @@ -7512,13 +7636,13 @@ entry: define void @test_vsoxseg5_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg5ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -7532,10 +7656,10 @@ define void @test_vsoxseg5_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7548,10 +7672,10 @@ define void @test_vsoxseg5_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7567,11 +7691,11 @@ define void @test_vsoxseg6_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7584,11 +7708,11 @@ define void @test_vsoxseg6_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7604,11 +7728,11 @@ define void @test_vsoxseg6_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7621,11 +7745,11 @@ define void @test_vsoxseg6_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7641,11 +7765,11 @@ define void @test_vsoxseg6_nxv4i16_nxv4i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -7658,11 +7782,11 @@ define void @test_vsoxseg6_mask_nxv4i16_nxv4i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -7678,11 +7802,11 @@ define void @test_vsoxseg6_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7695,11 +7819,11 @@ define void @test_vsoxseg6_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7715,12 +7839,12 @@ define void @test_vsoxseg7_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7733,12 +7857,12 @@ define void @test_vsoxseg7_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7754,12 +7878,12 @@ define void @test_vsoxseg7_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7772,12 +7896,12 @@ define void @test_vsoxseg7_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7793,12 +7917,12 @@ define void @test_vsoxseg7_nxv4i16_nxv4i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -7811,12 +7935,12 @@ define void @test_vsoxseg7_mask_nxv4i16_nxv4i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -7832,12 +7956,12 @@ define void @test_vsoxseg7_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7850,12 +7974,12 @@ define void @test_vsoxseg7_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7871,13 +7995,13 @@ define void @test_vsoxseg8_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7890,13 +8014,13 @@ define void @test_vsoxseg8_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7912,13 +8036,13 @@ define void @test_vsoxseg8_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7931,13 +8055,13 @@ define void @test_vsoxseg8_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7953,13 +8077,13 @@ define void @test_vsoxseg8_nxv4i16_nxv4i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -7972,13 +8096,13 @@ define void @test_vsoxseg8_mask_nxv4i16_nxv4i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -7994,13 +8118,13 @@ define void @test_vsoxseg8_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8013,13 +8137,13 @@ define void @test_vsoxseg8_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8034,6 +8158,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i8.nxv1i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8047,6 +8172,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i8_nxv1i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8063,6 +8189,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i8.nxv1i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8076,6 +8203,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8092,6 +8220,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i8.nxv1i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8105,6 +8234,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8121,6 +8251,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1i8.nxv1i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8134,6 +8265,7 @@ entry: define void @test_vsoxseg2_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8151,8 +8283,8 @@ define void @test_vsoxseg3_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8182,8 +8314,8 @@ define void @test_vsoxseg3_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8213,8 +8345,8 @@ define void @test_vsoxseg3_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8244,8 +8376,8 @@ define void @test_vsoxseg3_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8275,9 +8407,9 @@ define void @test_vsoxseg4_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8308,9 +8440,9 @@ define void @test_vsoxseg4_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8341,9 +8473,9 @@ define void @test_vsoxseg4_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8374,9 +8506,9 @@ define void @test_vsoxseg4_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8407,10 +8539,10 @@ define void @test_vsoxseg5_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8442,10 +8574,10 @@ define void @test_vsoxseg5_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8477,10 +8609,10 @@ define void @test_vsoxseg5_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8512,10 +8644,10 @@ define void @test_vsoxseg5_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8547,11 +8679,11 @@ define void @test_vsoxseg6_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8584,11 +8716,11 @@ define void @test_vsoxseg6_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8621,11 +8753,11 @@ define void @test_vsoxseg6_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8658,11 +8790,11 @@ define void @test_vsoxseg6_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8695,12 +8827,12 @@ define void @test_vsoxseg7_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8734,12 +8866,12 @@ define void @test_vsoxseg7_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8773,12 +8905,12 @@ define void @test_vsoxseg7_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8812,12 +8944,12 @@ define void @test_vsoxseg7_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8851,13 +8983,13 @@ define void @test_vsoxseg8_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8892,13 +9024,13 @@ define void @test_vsoxseg8_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8933,13 +9065,13 @@ define void @test_vsoxseg8_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8974,13 +9106,13 @@ define void @test_vsoxseg8_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9014,6 +9146,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i8.nxv2i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9027,6 +9160,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9043,6 +9177,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i8.nxv2i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9056,6 +9191,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9072,6 +9208,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i8.nxv2i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9085,6 +9222,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9101,6 +9239,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i8.nxv2i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10 @@ -9113,6 +9252,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i8_nxv2i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10, v0.t @@ -9129,8 +9269,8 @@ define void @test_vsoxseg3_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9160,8 +9300,8 @@ define void @test_vsoxseg3_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9191,8 +9331,8 @@ define void @test_vsoxseg3_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9221,11 +9361,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv2i8.nxv2i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv2i8.nxv2i64( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -9235,11 +9375,11 @@ entry: define void @test_vsoxseg3_mask_nxv2i8_nxv2i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv2i8.nxv2i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -9253,9 +9393,9 @@ define void @test_vsoxseg4_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9286,9 +9426,9 @@ define void @test_vsoxseg4_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9319,9 +9459,9 @@ define void @test_vsoxseg4_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9352,9 +9492,9 @@ define void @test_vsoxseg4_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9385,10 +9525,10 @@ define void @test_vsoxseg5_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9420,10 +9560,10 @@ define void @test_vsoxseg5_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9455,10 +9595,10 @@ define void @test_vsoxseg5_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9490,10 +9630,10 @@ define void @test_vsoxseg5_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9525,11 +9665,11 @@ define void @test_vsoxseg6_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9562,11 +9702,11 @@ define void @test_vsoxseg6_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9599,11 +9739,11 @@ define void @test_vsoxseg6_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9636,11 +9776,11 @@ define void @test_vsoxseg6_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9673,12 +9813,12 @@ define void @test_vsoxseg7_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9712,12 +9852,12 @@ define void @test_vsoxseg7_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9751,12 +9891,12 @@ define void @test_vsoxseg7_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9790,12 +9930,12 @@ define void @test_vsoxseg7_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9829,13 +9969,13 @@ define void @test_vsoxseg8_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9870,13 +10010,13 @@ define void @test_vsoxseg8_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9911,13 +10051,13 @@ define void @test_vsoxseg8_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9952,13 +10092,13 @@ define void @test_vsoxseg8_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9992,6 +10132,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i32.nxv8i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10005,6 +10146,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i32_nxv8i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10021,6 +10163,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i32.nxv8i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10034,6 +10177,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i32_nxv8i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10050,6 +10194,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i32.nxv8i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16 @@ -10062,6 +10207,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i32_nxv8i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16, v0.t @@ -10077,6 +10223,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8i32.nxv8i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10090,6 +10237,7 @@ entry: define void @test_vsoxseg2_mask_nxv8i32_nxv8i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10106,6 +10254,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv32i8.nxv32i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v16 @@ -10118,6 +10267,7 @@ entry: define void @test_vsoxseg2_mask_nxv32i8_nxv32i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v16, v0.t @@ -10133,6 +10283,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv32i8.nxv32i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -10146,6 +10297,7 @@ entry: define void @test_vsoxseg2_mask_nxv32i8_nxv32i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -10162,6 +10314,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i16.nxv2i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10175,6 +10328,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10191,6 +10345,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i16.nxv2i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10204,6 +10359,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10220,6 +10376,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i16.nxv2i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10233,6 +10390,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10249,6 +10407,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i16.nxv2i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10 @@ -10261,6 +10420,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i16_nxv2i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10, v0.t @@ -10277,8 +10437,8 @@ define void @test_vsoxseg3_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10291,8 +10451,8 @@ define void @test_vsoxseg3_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10308,8 +10468,8 @@ define void @test_vsoxseg3_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10322,8 +10482,8 @@ define void @test_vsoxseg3_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10339,8 +10499,8 @@ define void @test_vsoxseg3_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10353,8 +10513,8 @@ define void @test_vsoxseg3_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10369,11 +10529,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv2i16.nxv2i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv2i16.nxv2i64( %val, %val, %val, i16* %base, %index, i64 %vl) @@ -10383,11 +10543,11 @@ entry: define void @test_vsoxseg3_mask_nxv2i16_nxv2i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv2i16.nxv2i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -10401,9 +10561,9 @@ define void @test_vsoxseg4_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10416,9 +10576,9 @@ define void @test_vsoxseg4_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10434,9 +10594,9 @@ define void @test_vsoxseg4_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10449,9 +10609,9 @@ define void @test_vsoxseg4_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10467,9 +10627,9 @@ define void @test_vsoxseg4_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10482,9 +10642,9 @@ define void @test_vsoxseg4_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10500,9 +10660,9 @@ define void @test_vsoxseg4_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10515,9 +10675,9 @@ define void @test_vsoxseg4_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10533,10 +10693,10 @@ define void @test_vsoxseg5_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10549,10 +10709,10 @@ define void @test_vsoxseg5_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10568,10 +10728,10 @@ define void @test_vsoxseg5_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg5_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10584,10 +10744,10 @@ define void @test_vsoxseg5_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10603,10 +10763,10 @@ define void @test_vsoxseg5_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10619,10 +10779,10 @@ define void @test_vsoxseg5_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10638,10 +10798,10 @@ define void @test_vsoxseg5_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10654,10 +10814,10 @@ define void @test_vsoxseg5_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg5_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10673,11 +10833,11 @@ define void @test_vsoxseg6_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10690,11 +10850,11 @@ define void @test_vsoxseg6_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10710,11 +10870,11 @@ define void @test_vsoxseg6_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg6_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10727,11 +10887,11 @@ define void @test_vsoxseg6_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10747,11 +10907,11 @@ define void @test_vsoxseg6_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10764,11 +10924,11 @@ define void @test_vsoxseg6_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10784,11 +10944,11 @@ define void @test_vsoxseg6_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10801,11 +10961,11 @@ define void @test_vsoxseg6_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg6_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10821,12 +10981,12 @@ define void @test_vsoxseg7_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10839,12 +10999,12 @@ define void @test_vsoxseg7_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10860,12 +11020,12 @@ define void @test_vsoxseg7_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg7_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10878,12 +11038,12 @@ define void @test_vsoxseg7_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10899,12 +11059,12 @@ define void @test_vsoxseg7_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10917,12 +11077,12 @@ define void @test_vsoxseg7_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10938,12 +11098,12 @@ define void @test_vsoxseg7_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10956,12 +11116,12 @@ define void @test_vsoxseg7_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg7_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10977,13 +11137,13 @@ define void @test_vsoxseg8_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10996,13 +11156,13 @@ define void @test_vsoxseg8_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11018,13 +11178,13 @@ define void @test_vsoxseg8_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsoxseg8_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11037,13 +11197,13 @@ define void @test_vsoxseg8_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11059,13 +11219,13 @@ define void @test_vsoxseg8_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11078,13 +11238,13 @@ define void @test_vsoxseg8_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11100,13 +11260,13 @@ define void @test_vsoxseg8_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11119,13 +11279,13 @@ define void @test_vsoxseg8_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsoxseg8_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11140,6 +11300,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i64.nxv2i32(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11153,6 +11314,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i64_nxv2i32( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11169,6 +11331,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i64.nxv2i8(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11182,6 +11345,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i64_nxv2i8( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11198,6 +11362,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i64.nxv2i16(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11211,6 +11376,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i64_nxv2i16( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11227,6 +11393,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2i64.nxv2i64(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11240,6 +11407,7 @@ entry: define void @test_vsoxseg2_mask_nxv2i64_nxv2i64( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11257,8 +11425,8 @@ define void @test_vsoxseg3_nxv2i64_nxv2i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11271,8 +11439,8 @@ define void @test_vsoxseg3_mask_nxv2i64_nxv2i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11288,8 +11456,8 @@ define void @test_vsoxseg3_nxv2i64_nxv2i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg3_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11302,8 +11470,8 @@ define void @test_vsoxseg3_mask_nxv2i64_nxv2i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11319,8 +11487,8 @@ define void @test_vsoxseg3_nxv2i64_nxv2i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11333,8 +11501,8 @@ define void @test_vsoxseg3_mask_nxv2i64_nxv2i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11350,8 +11518,8 @@ define void @test_vsoxseg3_nxv2i64_nxv2i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11364,8 +11532,8 @@ define void @test_vsoxseg3_mask_nxv2i64_nxv2i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg3_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11381,9 +11549,9 @@ define void @test_vsoxseg4_nxv2i64_nxv2i32( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11396,9 +11564,9 @@ define void @test_vsoxseg4_mask_nxv2i64_nxv2i32( %val, i64* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11414,9 +11582,9 @@ define void @test_vsoxseg4_nxv2i64_nxv2i8( %val, i64* %base, < ; CHECK-LABEL: test_vsoxseg4_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11429,9 +11597,9 @@ define void @test_vsoxseg4_mask_nxv2i64_nxv2i8( %val, i64* %ba ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11447,9 +11615,9 @@ define void @test_vsoxseg4_nxv2i64_nxv2i16( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11462,9 +11630,9 @@ define void @test_vsoxseg4_mask_nxv2i64_nxv2i16( %val, i64* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11480,9 +11648,9 @@ define void @test_vsoxseg4_nxv2i64_nxv2i64( %val, i64* %base, ; CHECK-LABEL: test_vsoxseg4_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11495,9 +11663,9 @@ define void @test_vsoxseg4_mask_nxv2i64_nxv2i64( %val, i64* %b ; CHECK-LABEL: test_vsoxseg4_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11512,6 +11680,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16f16.nxv16i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11525,6 +11694,7 @@ entry: define void @test_vsoxseg2_mask_nxv16f16_nxv16i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11541,6 +11711,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16f16.nxv16i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11554,6 +11725,7 @@ entry: define void @test_vsoxseg2_mask_nxv16f16_nxv16i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11570,6 +11742,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16f16.nxv16i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16 @@ -11582,6 +11755,7 @@ entry: define void @test_vsoxseg2_mask_nxv16f16_nxv16i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v16, v0.t @@ -11597,6 +11771,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f64.nxv4i32(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11610,6 +11785,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f64_nxv4i32( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11626,6 +11802,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f64.nxv4i8(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11639,6 +11816,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f64_nxv4i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11655,6 +11833,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f64.nxv4i64(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11668,6 +11847,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f64_nxv4i64( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11684,6 +11864,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f64.nxv4i16(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11697,6 +11878,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f64_nxv4i16( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11713,6 +11895,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f64.nxv1i64(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11726,6 +11909,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f64_nxv1i64( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11742,6 +11926,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f64.nxv1i32(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11755,6 +11940,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f64_nxv1i32( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11771,6 +11957,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f64.nxv1i16(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11784,6 +11971,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f64_nxv1i16( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11800,6 +11988,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f64.nxv1i8(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11813,6 +12002,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f64_nxv1i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11830,8 +12020,8 @@ define void @test_vsoxseg3_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11844,8 +12034,8 @@ define void @test_vsoxseg3_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11861,8 +12051,8 @@ define void @test_vsoxseg3_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11875,8 +12065,8 @@ define void @test_vsoxseg3_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11892,8 +12082,8 @@ define void @test_vsoxseg3_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11906,8 +12096,8 @@ define void @test_vsoxseg3_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11923,8 +12113,8 @@ define void @test_vsoxseg3_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg3_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11937,8 +12127,8 @@ define void @test_vsoxseg3_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11954,9 +12144,9 @@ define void @test_vsoxseg4_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11969,9 +12159,9 @@ define void @test_vsoxseg4_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11987,9 +12177,9 @@ define void @test_vsoxseg4_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12002,9 +12192,9 @@ define void @test_vsoxseg4_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12020,9 +12210,9 @@ define void @test_vsoxseg4_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12035,9 +12225,9 @@ define void @test_vsoxseg4_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12053,9 +12243,9 @@ define void @test_vsoxseg4_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg4_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12068,9 +12258,9 @@ define void @test_vsoxseg4_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12086,10 +12276,10 @@ define void @test_vsoxseg5_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsoxseg5_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12102,10 +12292,10 @@ define void @test_vsoxseg5_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12121,10 +12311,10 @@ define void @test_vsoxseg5_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg5_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12137,10 +12327,10 @@ define void @test_vsoxseg5_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12156,10 +12346,10 @@ define void @test_vsoxseg5_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg5_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12172,10 +12362,10 @@ define void @test_vsoxseg5_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12191,10 +12381,10 @@ define void @test_vsoxseg5_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg5_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12207,10 +12397,10 @@ define void @test_vsoxseg5_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12226,11 +12416,11 @@ define void @test_vsoxseg6_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsoxseg6_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12243,11 +12433,11 @@ define void @test_vsoxseg6_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12263,11 +12453,11 @@ define void @test_vsoxseg6_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg6_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12280,11 +12470,11 @@ define void @test_vsoxseg6_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12300,11 +12490,11 @@ define void @test_vsoxseg6_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg6_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12317,11 +12507,11 @@ define void @test_vsoxseg6_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12337,11 +12527,11 @@ define void @test_vsoxseg6_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg6_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12354,11 +12544,11 @@ define void @test_vsoxseg6_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12374,12 +12564,12 @@ define void @test_vsoxseg7_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsoxseg7_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12392,12 +12582,12 @@ define void @test_vsoxseg7_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12413,12 +12603,12 @@ define void @test_vsoxseg7_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg7_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12431,12 +12621,12 @@ define void @test_vsoxseg7_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12452,12 +12642,12 @@ define void @test_vsoxseg7_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg7_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12470,12 +12660,12 @@ define void @test_vsoxseg7_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12491,12 +12681,12 @@ define void @test_vsoxseg7_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg7_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12509,12 +12699,12 @@ define void @test_vsoxseg7_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12530,13 +12720,13 @@ define void @test_vsoxseg8_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsoxseg8_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12549,13 +12739,13 @@ define void @test_vsoxseg8_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12571,13 +12761,13 @@ define void @test_vsoxseg8_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsoxseg8_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12590,13 +12780,13 @@ define void @test_vsoxseg8_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12612,13 +12802,13 @@ define void @test_vsoxseg8_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsoxseg8_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12631,13 +12821,13 @@ define void @test_vsoxseg8_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12653,13 +12843,13 @@ define void @test_vsoxseg8_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg8_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12672,13 +12862,13 @@ define void @test_vsoxseg8_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12693,6 +12883,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f32.nxv2i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12706,6 +12897,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f32_nxv2i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12722,6 +12914,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f32.nxv2i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12735,6 +12928,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f32_nxv2i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12751,6 +12945,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f32.nxv2i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12764,6 +12959,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f32_nxv2i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12780,6 +12976,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f32.nxv2i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10 @@ -12792,6 +12989,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f32_nxv2i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10, v0.t @@ -12808,8 +13006,8 @@ define void @test_vsoxseg3_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12822,8 +13020,8 @@ define void @test_vsoxseg3_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12839,8 +13037,8 @@ define void @test_vsoxseg3_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg3_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12853,8 +13051,8 @@ define void @test_vsoxseg3_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12870,8 +13068,8 @@ define void @test_vsoxseg3_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12884,8 +13082,8 @@ define void @test_vsoxseg3_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12900,11 +13098,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv2f32.nxv2i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv2f32.nxv2i64( %val, %val, %val, float* %base, %index, i64 %vl) @@ -12914,11 +13112,11 @@ entry: define void @test_vsoxseg3_mask_nxv2f32_nxv2i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv2f32.nxv2i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl) @@ -12932,9 +13130,9 @@ define void @test_vsoxseg4_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12947,9 +13145,9 @@ define void @test_vsoxseg4_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12965,9 +13163,9 @@ define void @test_vsoxseg4_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg4_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12980,9 +13178,9 @@ define void @test_vsoxseg4_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12998,9 +13196,9 @@ define void @test_vsoxseg4_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13013,9 +13211,9 @@ define void @test_vsoxseg4_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13031,9 +13229,9 @@ define void @test_vsoxseg4_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13046,9 +13244,9 @@ define void @test_vsoxseg4_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13064,10 +13262,10 @@ define void @test_vsoxseg5_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13080,10 +13278,10 @@ define void @test_vsoxseg5_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13099,10 +13297,10 @@ define void @test_vsoxseg5_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg5_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13115,10 +13313,10 @@ define void @test_vsoxseg5_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13134,10 +13332,10 @@ define void @test_vsoxseg5_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13150,10 +13348,10 @@ define void @test_vsoxseg5_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13169,10 +13367,10 @@ define void @test_vsoxseg5_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13185,10 +13383,10 @@ define void @test_vsoxseg5_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13204,11 +13402,11 @@ define void @test_vsoxseg6_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13221,11 +13419,11 @@ define void @test_vsoxseg6_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13241,11 +13439,11 @@ define void @test_vsoxseg6_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg6_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13258,11 +13456,11 @@ define void @test_vsoxseg6_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13278,11 +13476,11 @@ define void @test_vsoxseg6_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13295,11 +13493,11 @@ define void @test_vsoxseg6_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13315,11 +13513,11 @@ define void @test_vsoxseg6_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13332,11 +13530,11 @@ define void @test_vsoxseg6_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13352,12 +13550,12 @@ define void @test_vsoxseg7_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13370,12 +13568,12 @@ define void @test_vsoxseg7_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13391,12 +13589,12 @@ define void @test_vsoxseg7_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg7_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13409,12 +13607,12 @@ define void @test_vsoxseg7_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13430,12 +13628,12 @@ define void @test_vsoxseg7_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13448,12 +13646,12 @@ define void @test_vsoxseg7_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13469,12 +13667,12 @@ define void @test_vsoxseg7_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13487,12 +13685,12 @@ define void @test_vsoxseg7_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13508,13 +13706,13 @@ define void @test_vsoxseg8_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13527,13 +13725,13 @@ define void @test_vsoxseg8_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13549,13 +13747,13 @@ define void @test_vsoxseg8_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg8_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13568,13 +13766,13 @@ define void @test_vsoxseg8_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13590,13 +13788,13 @@ define void @test_vsoxseg8_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13609,13 +13807,13 @@ define void @test_vsoxseg8_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13631,13 +13829,13 @@ define void @test_vsoxseg8_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13650,13 +13848,13 @@ define void @test_vsoxseg8_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13671,6 +13869,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f16.nxv1i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13684,6 +13883,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f16_nxv1i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13700,6 +13900,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f16.nxv1i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13713,6 +13914,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f16_nxv1i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13729,6 +13931,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f16.nxv1i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13742,6 +13945,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f16_nxv1i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13758,6 +13962,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f16.nxv1i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13771,6 +13976,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f16_nxv1i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13788,8 +13994,8 @@ define void @test_vsoxseg3_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13802,8 +14008,8 @@ define void @test_vsoxseg3_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13819,8 +14025,8 @@ define void @test_vsoxseg3_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13833,8 +14039,8 @@ define void @test_vsoxseg3_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13850,8 +14056,8 @@ define void @test_vsoxseg3_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13864,8 +14070,8 @@ define void @test_vsoxseg3_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13881,8 +14087,8 @@ define void @test_vsoxseg3_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13895,8 +14101,8 @@ define void @test_vsoxseg3_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13912,9 +14118,9 @@ define void @test_vsoxseg4_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13927,9 +14133,9 @@ define void @test_vsoxseg4_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13945,9 +14151,9 @@ define void @test_vsoxseg4_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13960,9 +14166,9 @@ define void @test_vsoxseg4_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13978,9 +14184,9 @@ define void @test_vsoxseg4_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13993,9 +14199,9 @@ define void @test_vsoxseg4_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14011,9 +14217,9 @@ define void @test_vsoxseg4_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg4_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14026,9 +14232,9 @@ define void @test_vsoxseg4_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14044,10 +14250,10 @@ define void @test_vsoxseg5_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14060,10 +14266,10 @@ define void @test_vsoxseg5_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14079,10 +14285,10 @@ define void @test_vsoxseg5_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14095,10 +14301,10 @@ define void @test_vsoxseg5_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14114,10 +14320,10 @@ define void @test_vsoxseg5_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14130,10 +14336,10 @@ define void @test_vsoxseg5_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14149,10 +14355,10 @@ define void @test_vsoxseg5_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg5_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14165,10 +14371,10 @@ define void @test_vsoxseg5_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14184,11 +14390,11 @@ define void @test_vsoxseg6_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14201,11 +14407,11 @@ define void @test_vsoxseg6_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14221,11 +14427,11 @@ define void @test_vsoxseg6_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14238,11 +14444,11 @@ define void @test_vsoxseg6_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14258,11 +14464,11 @@ define void @test_vsoxseg6_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14275,11 +14481,11 @@ define void @test_vsoxseg6_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14295,11 +14501,11 @@ define void @test_vsoxseg6_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg6_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14312,11 +14518,11 @@ define void @test_vsoxseg6_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14332,12 +14538,12 @@ define void @test_vsoxseg7_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14350,12 +14556,12 @@ define void @test_vsoxseg7_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14371,12 +14577,12 @@ define void @test_vsoxseg7_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14389,12 +14595,12 @@ define void @test_vsoxseg7_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14410,12 +14616,12 @@ define void @test_vsoxseg7_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14428,12 +14634,12 @@ define void @test_vsoxseg7_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14449,12 +14655,12 @@ define void @test_vsoxseg7_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg7_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14467,12 +14673,12 @@ define void @test_vsoxseg7_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14488,13 +14694,13 @@ define void @test_vsoxseg8_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14507,13 +14713,13 @@ define void @test_vsoxseg8_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14529,13 +14735,13 @@ define void @test_vsoxseg8_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14548,13 +14754,13 @@ define void @test_vsoxseg8_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14570,13 +14776,13 @@ define void @test_vsoxseg8_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14589,13 +14795,13 @@ define void @test_vsoxseg8_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14611,13 +14817,13 @@ define void @test_vsoxseg8_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg8_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14630,13 +14836,13 @@ define void @test_vsoxseg8_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14651,6 +14857,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f32.nxv1i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14664,6 +14871,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f32_nxv1i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14680,6 +14888,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f32.nxv1i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14693,6 +14902,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f32_nxv1i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14709,6 +14919,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f32.nxv1i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14722,6 +14933,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f32_nxv1i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14738,6 +14950,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv1f32.nxv1i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14751,6 +14964,7 @@ entry: define void @test_vsoxseg2_mask_nxv1f32_nxv1i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14768,8 +14982,8 @@ define void @test_vsoxseg3_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14782,8 +14996,8 @@ define void @test_vsoxseg3_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14799,8 +15013,8 @@ define void @test_vsoxseg3_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14813,8 +15027,8 @@ define void @test_vsoxseg3_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14830,8 +15044,8 @@ define void @test_vsoxseg3_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14844,8 +15058,8 @@ define void @test_vsoxseg3_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14861,8 +15075,8 @@ define void @test_vsoxseg3_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg3_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14875,8 +15089,8 @@ define void @test_vsoxseg3_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14892,9 +15106,9 @@ define void @test_vsoxseg4_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14907,9 +15121,9 @@ define void @test_vsoxseg4_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14925,9 +15139,9 @@ define void @test_vsoxseg4_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14940,9 +15154,9 @@ define void @test_vsoxseg4_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14958,9 +15172,9 @@ define void @test_vsoxseg4_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14973,9 +15187,9 @@ define void @test_vsoxseg4_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14991,9 +15205,9 @@ define void @test_vsoxseg4_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg4_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15006,9 +15220,9 @@ define void @test_vsoxseg4_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg4_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15024,10 +15238,10 @@ define void @test_vsoxseg5_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15040,10 +15254,10 @@ define void @test_vsoxseg5_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15059,10 +15273,10 @@ define void @test_vsoxseg5_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15075,10 +15289,10 @@ define void @test_vsoxseg5_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15094,10 +15308,10 @@ define void @test_vsoxseg5_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg5_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15110,10 +15324,10 @@ define void @test_vsoxseg5_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15129,10 +15343,10 @@ define void @test_vsoxseg5_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg5_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15145,10 +15359,10 @@ define void @test_vsoxseg5_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg5_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15164,11 +15378,11 @@ define void @test_vsoxseg6_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15181,11 +15395,11 @@ define void @test_vsoxseg6_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15201,11 +15415,11 @@ define void @test_vsoxseg6_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15218,11 +15432,11 @@ define void @test_vsoxseg6_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15238,11 +15452,11 @@ define void @test_vsoxseg6_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg6_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15255,11 +15469,11 @@ define void @test_vsoxseg6_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15275,11 +15489,11 @@ define void @test_vsoxseg6_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg6_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15292,11 +15506,11 @@ define void @test_vsoxseg6_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg6_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15312,12 +15526,12 @@ define void @test_vsoxseg7_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15330,12 +15544,12 @@ define void @test_vsoxseg7_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15351,12 +15565,12 @@ define void @test_vsoxseg7_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15369,12 +15583,12 @@ define void @test_vsoxseg7_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15390,12 +15604,12 @@ define void @test_vsoxseg7_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg7_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15408,12 +15622,12 @@ define void @test_vsoxseg7_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15429,12 +15643,12 @@ define void @test_vsoxseg7_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg7_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15447,12 +15661,12 @@ define void @test_vsoxseg7_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg7_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15468,13 +15682,13 @@ define void @test_vsoxseg8_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15487,13 +15701,13 @@ define void @test_vsoxseg8_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15509,13 +15723,13 @@ define void @test_vsoxseg8_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15528,13 +15742,13 @@ define void @test_vsoxseg8_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15550,13 +15764,13 @@ define void @test_vsoxseg8_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg8_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15569,13 +15783,13 @@ define void @test_vsoxseg8_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15591,13 +15805,13 @@ define void @test_vsoxseg8_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg8_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15610,13 +15824,13 @@ define void @test_vsoxseg8_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsoxseg8_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15631,6 +15845,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f16.nxv8i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15644,6 +15859,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f16_nxv8i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15660,6 +15876,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f16.nxv8i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15673,6 +15890,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f16_nxv8i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15689,6 +15907,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f16.nxv8i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16 @@ -15701,6 +15920,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f16_nxv8i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16, v0.t @@ -15716,6 +15936,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12 @@ -15728,6 +15949,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v12, v0.t @@ -15744,8 +15966,8 @@ define void @test_vsoxseg3_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15758,8 +15980,8 @@ define void @test_vsoxseg3_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15775,8 +15997,8 @@ define void @test_vsoxseg3_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15789,8 +16011,8 @@ define void @test_vsoxseg3_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15805,6 +16027,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8f16.nxv8i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15818,6 +16041,7 @@ entry: define void @test_vsoxseg3_mask_nxv8f16_nxv8i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15834,11 +16058,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, i64 %vl) @@ -15848,11 +16072,11 @@ entry: define void @test_vsoxseg3_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -15866,9 +16090,9 @@ define void @test_vsoxseg4_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15881,9 +16105,9 @@ define void @test_vsoxseg4_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15899,9 +16123,9 @@ define void @test_vsoxseg4_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg4_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15914,9 +16138,9 @@ define void @test_vsoxseg4_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15931,6 +16155,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv8f16.nxv8i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -15945,6 +16170,7 @@ entry: define void @test_vsoxseg4_mask_nxv8f16_nxv8i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -15963,9 +16189,9 @@ define void @test_vsoxseg4_nxv8f16_nxv8i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -15978,9 +16204,9 @@ define void @test_vsoxseg4_mask_nxv8f16_nxv8i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -15995,6 +16221,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f32.nxv8i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16008,6 +16235,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f32_nxv8i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16024,6 +16252,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f32.nxv8i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16037,6 +16266,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f32_nxv8i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16053,6 +16283,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f32.nxv8i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16 @@ -16065,6 +16296,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f32_nxv8i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v16, v0.t @@ -16080,6 +16312,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv8f32.nxv8i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16093,6 +16326,7 @@ entry: define void @test_vsoxseg2_mask_nxv8f32_nxv8i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16109,6 +16343,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f64.nxv2i32(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16122,6 +16357,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f64_nxv2i32( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16138,6 +16374,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f64.nxv2i8(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16151,6 +16388,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f64_nxv2i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16167,6 +16405,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f64.nxv2i16(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16180,6 +16419,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f64_nxv2i16( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16196,6 +16436,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f64.nxv2i64(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16209,6 +16450,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f64_nxv2i64( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16226,8 +16468,8 @@ define void @test_vsoxseg3_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16240,8 +16482,8 @@ define void @test_vsoxseg3_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16257,8 +16499,8 @@ define void @test_vsoxseg3_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg3_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16271,8 +16513,8 @@ define void @test_vsoxseg3_mask_nxv2f64_nxv2i8( %val, doubl ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16288,8 +16530,8 @@ define void @test_vsoxseg3_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16302,8 +16544,8 @@ define void @test_vsoxseg3_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16319,8 +16561,8 @@ define void @test_vsoxseg3_nxv2f64_nxv2i64( %val, double* % ; CHECK-LABEL: test_vsoxseg3_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16333,8 +16575,8 @@ define void @test_vsoxseg3_mask_nxv2f64_nxv2i64( %val, doub ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16350,9 +16592,9 @@ define void @test_vsoxseg4_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16365,9 +16607,9 @@ define void @test_vsoxseg4_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16383,9 +16625,9 @@ define void @test_vsoxseg4_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsoxseg4_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16397,10 +16639,10 @@ entry: define void @test_vsoxseg4_mask_nxv2f64_nxv2i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16416,9 +16658,9 @@ define void @test_vsoxseg4_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16431,9 +16673,9 @@ define void @test_vsoxseg4_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16449,9 +16691,9 @@ define void @test_vsoxseg4_nxv2f64_nxv2i64( %val, double* % ; CHECK-LABEL: test_vsoxseg4_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16464,9 +16706,9 @@ define void @test_vsoxseg4_mask_nxv2f64_nxv2i64( %val, doub ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16481,6 +16723,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10 @@ -16493,6 +16736,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei32.v v8, (a0), v10, v0.t @@ -16508,6 +16752,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f16.nxv4i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16521,6 +16766,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f16_nxv4i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16537,6 +16783,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12 @@ -16549,6 +16796,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12, v0.t @@ -16564,6 +16812,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f16.nxv4i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16577,6 +16826,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f16_nxv4i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16593,11 +16843,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, i64 %vl) @@ -16607,11 +16857,11 @@ entry: define void @test_vsoxseg3_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -16625,8 +16875,8 @@ define void @test_vsoxseg3_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16639,8 +16889,8 @@ define void @test_vsoxseg3_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16655,6 +16905,7 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16668,6 +16919,7 @@ entry: define void @test_vsoxseg3_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16685,8 +16937,8 @@ define void @test_vsoxseg3_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16699,8 +16951,8 @@ define void @test_vsoxseg3_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16716,9 +16968,9 @@ define void @test_vsoxseg4_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16731,9 +16983,9 @@ define void @test_vsoxseg4_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16749,9 +17001,9 @@ define void @test_vsoxseg4_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg4_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16764,9 +17016,9 @@ define void @test_vsoxseg4_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16781,6 +17033,7 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -16795,6 +17048,7 @@ entry: define void @test_vsoxseg4_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -16813,9 +17067,9 @@ define void @test_vsoxseg4_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16828,9 +17082,9 @@ define void @test_vsoxseg4_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16846,10 +17100,10 @@ define void @test_vsoxseg5_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16862,10 +17116,10 @@ define void @test_vsoxseg5_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16881,10 +17135,10 @@ define void @test_vsoxseg5_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg5_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16897,10 +17151,10 @@ define void @test_vsoxseg5_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg5_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16915,13 +17169,13 @@ declare void @llvm.riscv.vsoxseg5.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei64.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg5ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, half* %base, %index, i64 %vl) @@ -16931,13 +17185,13 @@ entry: define void @test_vsoxseg5_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg5_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsoxseg5ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg5ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg5.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -16951,10 +17205,10 @@ define void @test_vsoxseg5_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16967,10 +17221,10 @@ define void @test_vsoxseg5_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16986,11 +17240,11 @@ define void @test_vsoxseg6_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17003,11 +17257,11 @@ define void @test_vsoxseg6_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17023,11 +17277,11 @@ define void @test_vsoxseg6_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg6_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17040,11 +17294,11 @@ define void @test_vsoxseg6_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg6_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17060,11 +17314,11 @@ define void @test_vsoxseg6_nxv4f16_nxv4i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -17077,11 +17331,11 @@ define void @test_vsoxseg6_mask_nxv4f16_nxv4i64( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -17097,11 +17351,11 @@ define void @test_vsoxseg6_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17114,11 +17368,11 @@ define void @test_vsoxseg6_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17134,12 +17388,12 @@ define void @test_vsoxseg7_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17152,12 +17406,12 @@ define void @test_vsoxseg7_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17173,12 +17427,12 @@ define void @test_vsoxseg7_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg7_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17191,12 +17445,12 @@ define void @test_vsoxseg7_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg7_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17212,12 +17466,12 @@ define void @test_vsoxseg7_nxv4f16_nxv4i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -17230,12 +17484,12 @@ define void @test_vsoxseg7_mask_nxv4f16_nxv4i64( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -17251,12 +17505,12 @@ define void @test_vsoxseg7_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17269,12 +17523,12 @@ define void @test_vsoxseg7_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17290,13 +17544,13 @@ define void @test_vsoxseg8_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17309,13 +17563,13 @@ define void @test_vsoxseg8_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17331,13 +17585,13 @@ define void @test_vsoxseg8_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg8_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17350,13 +17604,13 @@ define void @test_vsoxseg8_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsoxseg8_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17372,13 +17626,13 @@ define void @test_vsoxseg8_nxv4f16_nxv4i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -17391,13 +17645,13 @@ define void @test_vsoxseg8_mask_nxv4f16_nxv4i64( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -17413,13 +17667,13 @@ define void @test_vsoxseg8_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17432,13 +17686,13 @@ define void @test_vsoxseg8_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17453,6 +17707,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f16.nxv2i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17466,6 +17721,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f16_nxv2i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17482,6 +17738,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f16.nxv2i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17495,6 +17752,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f16_nxv2i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17511,6 +17769,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f16.nxv2i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17524,6 +17783,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f16_nxv2i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17540,6 +17800,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv2f16.nxv2i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10 @@ -17552,6 +17813,7 @@ entry: define void @test_vsoxseg2_mask_nxv2f16_nxv2i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v10, v0.t @@ -17568,8 +17830,8 @@ define void @test_vsoxseg3_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17582,8 +17844,8 @@ define void @test_vsoxseg3_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17599,8 +17861,8 @@ define void @test_vsoxseg3_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg3_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17613,8 +17875,8 @@ define void @test_vsoxseg3_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17630,8 +17892,8 @@ define void @test_vsoxseg3_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg3_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17644,8 +17906,8 @@ define void @test_vsoxseg3_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17660,11 +17922,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv2f16.nxv2i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv2f16.nxv2i64( %val, %val, %val, half* %base, %index, i64 %vl) @@ -17674,11 +17936,11 @@ entry: define void @test_vsoxseg3_mask_nxv2f16_nxv2i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsoxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv2f16.nxv2i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -17692,9 +17954,9 @@ define void @test_vsoxseg4_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17707,9 +17969,9 @@ define void @test_vsoxseg4_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17724,10 +17986,10 @@ declare void @llvm.riscv.vsoxseg4.mask.nxv2f16.nxv2i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg4_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17740,9 +18002,9 @@ define void @test_vsoxseg4_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17758,9 +18020,9 @@ define void @test_vsoxseg4_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17773,9 +18035,9 @@ define void @test_vsoxseg4_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17791,9 +18053,9 @@ define void @test_vsoxseg4_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg4_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17806,9 +18068,9 @@ define void @test_vsoxseg4_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsoxseg4_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17824,10 +18086,10 @@ define void @test_vsoxseg5_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17840,10 +18102,10 @@ define void @test_vsoxseg5_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17859,10 +18121,10 @@ define void @test_vsoxseg5_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg5_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17875,10 +18137,10 @@ define void @test_vsoxseg5_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17894,10 +18156,10 @@ define void @test_vsoxseg5_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17910,10 +18172,10 @@ define void @test_vsoxseg5_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17929,10 +18191,10 @@ define void @test_vsoxseg5_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg5_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17945,10 +18207,10 @@ define void @test_vsoxseg5_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsoxseg5_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17964,11 +18226,11 @@ define void @test_vsoxseg6_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17981,11 +18243,11 @@ define void @test_vsoxseg6_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18001,11 +18263,11 @@ define void @test_vsoxseg6_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg6_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18018,11 +18280,11 @@ define void @test_vsoxseg6_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18038,11 +18300,11 @@ define void @test_vsoxseg6_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18055,11 +18317,11 @@ define void @test_vsoxseg6_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18075,11 +18337,11 @@ define void @test_vsoxseg6_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg6_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18092,11 +18354,11 @@ define void @test_vsoxseg6_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsoxseg6_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18112,12 +18374,12 @@ define void @test_vsoxseg7_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18130,12 +18392,12 @@ define void @test_vsoxseg7_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18151,12 +18413,12 @@ define void @test_vsoxseg7_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg7_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18169,12 +18431,12 @@ define void @test_vsoxseg7_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18190,12 +18452,12 @@ define void @test_vsoxseg7_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18208,12 +18470,12 @@ define void @test_vsoxseg7_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18229,12 +18491,12 @@ define void @test_vsoxseg7_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg7_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18247,12 +18509,12 @@ define void @test_vsoxseg7_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsoxseg7_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18268,13 +18530,13 @@ define void @test_vsoxseg8_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18287,13 +18549,13 @@ define void @test_vsoxseg8_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18309,13 +18571,13 @@ define void @test_vsoxseg8_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsoxseg8_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18328,13 +18590,13 @@ define void @test_vsoxseg8_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18350,13 +18612,13 @@ define void @test_vsoxseg8_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18369,13 +18631,13 @@ define void @test_vsoxseg8_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18391,13 +18653,13 @@ define void @test_vsoxseg8_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsoxseg8_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18410,13 +18672,13 @@ define void @test_vsoxseg8_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsoxseg8_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsoxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18431,6 +18693,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f32.nxv4i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18444,6 +18707,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f32_nxv4i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18460,6 +18724,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f32.nxv4i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18473,6 +18738,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f32_nxv4i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18489,6 +18755,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f32.nxv4i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12 @@ -18501,6 +18768,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f32_nxv4i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg2ei64.v v8, (a0), v12, v0.t @@ -18516,6 +18784,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv4f32.nxv4i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18529,6 +18798,7 @@ entry: define void @test_vsoxseg2_mask_nxv4f32_nxv4i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18546,8 +18816,8 @@ define void @test_vsoxseg3_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18560,8 +18830,8 @@ define void @test_vsoxseg3_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18577,8 +18847,8 @@ define void @test_vsoxseg3_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg3_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18591,8 +18861,8 @@ define void @test_vsoxseg3_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18607,11 +18877,11 @@ declare void @llvm.riscv.vsoxseg3.mask.nxv4f32.nxv4i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v16 +; CHECK-NEXT: vsoxseg3ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.nxv4f32.nxv4i64( %val, %val, %val, float* %base, %index, i64 %vl) @@ -18621,11 +18891,11 @@ entry: define void @test_vsoxseg3_mask_nxv4f32_nxv4i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsoxseg3ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsoxseg3ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg3.mask.nxv4f32.nxv4i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl) @@ -18639,8 +18909,8 @@ define void @test_vsoxseg3_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg3_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18653,8 +18923,8 @@ define void @test_vsoxseg3_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsoxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18670,9 +18940,9 @@ define void @test_vsoxseg4_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18685,9 +18955,9 @@ define void @test_vsoxseg4_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18703,9 +18973,9 @@ define void @test_vsoxseg4_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsoxseg4_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18718,9 +18988,9 @@ define void @test_vsoxseg4_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18736,9 +19006,9 @@ define void @test_vsoxseg4_nxv4f32_nxv4i64( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -18751,9 +19021,9 @@ define void @test_vsoxseg4_mask_nxv4f32_nxv4i64( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -18769,9 +19039,9 @@ define void @test_vsoxseg4_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsoxseg4_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18784,9 +19054,9 @@ define void @test_vsoxseg4_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsoxseg4_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsoxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret diff --git a/llvm/test/CodeGen/RISCV/rvv/vsseg-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vsseg-rv32.ll index f0e058fd6bfa3..03323f85fd962 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsseg-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsseg-rv32.ll @@ -8,6 +8,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv16i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -20,6 +21,7 @@ entry: define void @test_vsseg2_mask_nxv16i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -35,6 +37,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1i8(,, define void @test_vsseg2_nxv1i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -47,6 +50,7 @@ entry: define void @test_vsseg2_mask_nxv1i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -62,6 +66,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1i8(,, define void @test_vsseg3_nxv1i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -75,6 +80,7 @@ entry: define void @test_vsseg3_mask_nxv1i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -91,6 +97,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1i8(,, define void @test_vsseg4_nxv1i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -105,6 +112,7 @@ entry: define void @test_vsseg4_mask_nxv1i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -122,6 +130,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1i8(,, define void @test_vsseg5_nxv1i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -137,6 +146,7 @@ entry: define void @test_vsseg5_mask_nxv1i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -155,6 +165,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1i8(,, define void @test_vsseg6_nxv1i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -171,6 +182,7 @@ entry: define void @test_vsseg6_mask_nxv1i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -190,6 +202,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1i8(,, define void @test_vsseg7_nxv1i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -207,6 +220,7 @@ entry: define void @test_vsseg7_mask_nxv1i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -227,6 +241,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1i8(,, define void @test_vsseg8_nxv1i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -245,6 +260,7 @@ entry: define void @test_vsseg8_mask_nxv1i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -266,6 +282,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv16i8(, %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -278,6 +295,7 @@ entry: define void @test_vsseg2_mask_nxv16i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -293,6 +311,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv16i8(, %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -306,6 +325,7 @@ entry: define void @test_vsseg3_mask_nxv16i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -322,6 +342,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv16i8(, %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -336,6 +357,7 @@ entry: define void @test_vsseg4_mask_nxv16i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -353,6 +375,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -365,6 +388,7 @@ entry: define void @test_vsseg2_mask_nxv2i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -380,6 +404,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -393,6 +418,7 @@ entry: define void @test_vsseg3_mask_nxv2i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -409,6 +435,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -423,6 +450,7 @@ entry: define void @test_vsseg4_mask_nxv2i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -440,6 +468,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -455,6 +484,7 @@ entry: define void @test_vsseg5_mask_nxv2i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -473,6 +503,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -489,6 +520,7 @@ entry: define void @test_vsseg6_mask_nxv2i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -508,6 +540,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -525,6 +558,7 @@ entry: define void @test_vsseg7_mask_nxv2i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -545,6 +579,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -563,6 +598,7 @@ entry: define void @test_vsseg8_mask_nxv2i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -584,6 +620,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -596,6 +633,7 @@ entry: define void @test_vsseg2_mask_nxv4i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -611,6 +649,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -624,6 +663,7 @@ entry: define void @test_vsseg3_mask_nxv4i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -640,6 +680,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -654,6 +695,7 @@ entry: define void @test_vsseg4_mask_nxv4i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -671,6 +713,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv4i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -686,6 +729,7 @@ entry: define void @test_vsseg5_mask_nxv4i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -704,6 +748,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv4i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -720,6 +765,7 @@ entry: define void @test_vsseg6_mask_nxv4i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -739,6 +785,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv4i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -756,6 +803,7 @@ entry: define void @test_vsseg7_mask_nxv4i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -776,6 +824,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv4i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -794,6 +843,7 @@ entry: define void @test_vsseg8_mask_nxv4i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -815,6 +865,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -827,6 +878,7 @@ entry: define void @test_vsseg2_mask_nxv1i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -842,6 +894,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -855,6 +908,7 @@ entry: define void @test_vsseg3_mask_nxv1i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -871,6 +925,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -885,6 +940,7 @@ entry: define void @test_vsseg4_mask_nxv1i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -902,6 +958,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -917,6 +974,7 @@ entry: define void @test_vsseg5_mask_nxv1i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -935,6 +993,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -951,6 +1010,7 @@ entry: define void @test_vsseg6_mask_nxv1i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -970,6 +1030,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -987,6 +1048,7 @@ entry: define void @test_vsseg7_mask_nxv1i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1007,6 +1069,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1025,6 +1088,7 @@ entry: define void @test_vsseg8_mask_nxv1i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1046,6 +1110,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -1058,6 +1123,7 @@ entry: define void @test_vsseg2_mask_nxv8i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -1073,6 +1139,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv8i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -1086,6 +1153,7 @@ entry: define void @test_vsseg3_mask_nxv8i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -1102,6 +1170,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv8i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1116,6 +1185,7 @@ entry: define void @test_vsseg4_mask_nxv8i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1133,6 +1203,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8i8(,, define void @test_vsseg2_nxv8i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -1145,6 +1216,7 @@ entry: define void @test_vsseg2_mask_nxv8i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -1160,6 +1232,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv8i8(,, define void @test_vsseg3_nxv8i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -1173,6 +1246,7 @@ entry: define void @test_vsseg3_mask_nxv8i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -1189,6 +1263,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv8i8(,, define void @test_vsseg4_nxv8i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1203,6 +1278,7 @@ entry: define void @test_vsseg4_mask_nxv8i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1220,6 +1296,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv8i8(,, define void @test_vsseg5_nxv8i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1235,6 +1312,7 @@ entry: define void @test_vsseg5_mask_nxv8i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1253,6 +1331,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv8i8(,, define void @test_vsseg6_nxv8i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1269,6 +1348,7 @@ entry: define void @test_vsseg6_mask_nxv8i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1288,6 +1368,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv8i8(,, define void @test_vsseg7_nxv8i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1305,6 +1386,7 @@ entry: define void @test_vsseg7_mask_nxv8i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1325,6 +1407,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv8i8(,, define void @test_vsseg8_nxv8i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1343,6 +1426,7 @@ entry: define void @test_vsseg8_mask_nxv8i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1364,6 +1448,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -1376,6 +1461,7 @@ entry: define void @test_vsseg2_mask_nxv8i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -1391,6 +1477,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4i8(,, define void @test_vsseg2_nxv4i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -1403,6 +1490,7 @@ entry: define void @test_vsseg2_mask_nxv4i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -1418,6 +1506,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4i8(,, define void @test_vsseg3_nxv4i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -1431,6 +1520,7 @@ entry: define void @test_vsseg3_mask_nxv4i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -1447,6 +1537,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4i8(,, define void @test_vsseg4_nxv4i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1461,6 +1552,7 @@ entry: define void @test_vsseg4_mask_nxv4i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1478,6 +1570,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv4i8(,, define void @test_vsseg5_nxv4i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1493,6 +1586,7 @@ entry: define void @test_vsseg5_mask_nxv4i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1511,6 +1605,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv4i8(,, define void @test_vsseg6_nxv4i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1527,6 +1622,7 @@ entry: define void @test_vsseg6_mask_nxv4i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1546,6 +1642,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv4i8(,, define void @test_vsseg7_nxv4i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1563,6 +1660,7 @@ entry: define void @test_vsseg7_mask_nxv4i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1583,6 +1681,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv4i8(,, define void @test_vsseg8_nxv4i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1601,6 +1700,7 @@ entry: define void @test_vsseg8_mask_nxv4i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1622,6 +1722,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -1634,6 +1735,7 @@ entry: define void @test_vsseg2_mask_nxv1i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -1649,6 +1751,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -1662,6 +1765,7 @@ entry: define void @test_vsseg3_mask_nxv1i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -1678,6 +1782,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1692,6 +1797,7 @@ entry: define void @test_vsseg4_mask_nxv1i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1709,6 +1815,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1724,6 +1831,7 @@ entry: define void @test_vsseg5_mask_nxv1i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1742,6 +1850,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1758,6 +1867,7 @@ entry: define void @test_vsseg6_mask_nxv1i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1777,6 +1887,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1794,6 +1905,7 @@ entry: define void @test_vsseg7_mask_nxv1i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1814,6 +1926,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1832,6 +1945,7 @@ entry: define void @test_vsseg8_mask_nxv1i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1853,6 +1967,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv32i8(, %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -1865,6 +1980,7 @@ entry: define void @test_vsseg2_mask_nxv32i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -1880,6 +1996,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2i8(,, define void @test_vsseg2_nxv2i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -1892,6 +2009,7 @@ entry: define void @test_vsseg2_mask_nxv2i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -1907,6 +2025,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2i8(,, define void @test_vsseg3_nxv2i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -1920,6 +2039,7 @@ entry: define void @test_vsseg3_mask_nxv2i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -1936,6 +2056,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2i8(,, define void @test_vsseg4_nxv2i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1950,6 +2071,7 @@ entry: define void @test_vsseg4_mask_nxv2i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1967,6 +2089,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2i8(,, define void @test_vsseg5_nxv2i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1982,6 +2105,7 @@ entry: define void @test_vsseg5_mask_nxv2i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2000,6 +2124,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2i8(,, define void @test_vsseg6_nxv2i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2016,6 +2141,7 @@ entry: define void @test_vsseg6_mask_nxv2i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2035,6 +2161,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2i8(,, define void @test_vsseg7_nxv2i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2052,6 +2179,7 @@ entry: define void @test_vsseg7_mask_nxv2i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2072,6 +2200,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2i8(,, define void @test_vsseg8_nxv2i8( %val, i8* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2090,6 +2219,7 @@ entry: define void @test_vsseg8_mask_nxv2i8( %val, i8* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2111,6 +2241,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -2123,6 +2254,7 @@ entry: define void @test_vsseg2_mask_nxv2i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -2138,6 +2270,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -2151,6 +2284,7 @@ entry: define void @test_vsseg3_mask_nxv2i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -2167,6 +2301,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2181,6 +2316,7 @@ entry: define void @test_vsseg4_mask_nxv2i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2198,6 +2334,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2213,6 +2350,7 @@ entry: define void @test_vsseg5_mask_nxv2i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2231,6 +2369,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2247,6 +2386,7 @@ entry: define void @test_vsseg6_mask_nxv2i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2266,6 +2406,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2283,6 +2424,7 @@ entry: define void @test_vsseg7_mask_nxv2i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2303,6 +2445,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2i16(, %val, i16* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2321,6 +2464,7 @@ entry: define void @test_vsseg8_mask_nxv2i16( %val, i16* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2342,6 +2486,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -2354,6 +2499,7 @@ entry: define void @test_vsseg2_mask_nxv4i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -2369,6 +2515,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -2382,6 +2529,7 @@ entry: define void @test_vsseg3_mask_nxv4i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -2398,6 +2546,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4i32(, %val, i32* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2412,6 +2561,7 @@ entry: define void @test_vsseg4_mask_nxv4i32( %val, i32* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2429,6 +2579,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv16f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -2441,6 +2592,7 @@ entry: define void @test_vsseg2_mask_nxv16f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -2456,6 +2608,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -2468,6 +2621,7 @@ entry: define void @test_vsseg2_mask_nxv4f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -2483,6 +2637,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -2495,6 +2650,7 @@ entry: define void @test_vsseg2_mask_nxv1f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -2510,6 +2666,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -2523,6 +2680,7 @@ entry: define void @test_vsseg3_mask_nxv1f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -2539,6 +2697,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2553,6 +2712,7 @@ entry: define void @test_vsseg4_mask_nxv1f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2570,6 +2730,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2585,6 +2746,7 @@ entry: define void @test_vsseg5_mask_nxv1f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2603,6 +2765,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2619,6 +2782,7 @@ entry: define void @test_vsseg6_mask_nxv1f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2638,6 +2802,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2655,6 +2820,7 @@ entry: define void @test_vsseg7_mask_nxv1f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2675,6 +2841,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2693,6 +2860,7 @@ entry: define void @test_vsseg8_mask_nxv1f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2714,6 +2882,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -2726,6 +2895,7 @@ entry: define void @test_vsseg2_mask_nxv2f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -2741,6 +2911,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -2754,6 +2925,7 @@ entry: define void @test_vsseg3_mask_nxv2f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -2770,6 +2942,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2784,6 +2957,7 @@ entry: define void @test_vsseg4_mask_nxv2f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2801,6 +2975,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2816,6 +2991,7 @@ entry: define void @test_vsseg5_mask_nxv2f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2834,6 +3010,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2850,6 +3027,7 @@ entry: define void @test_vsseg6_mask_nxv2f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2869,6 +3047,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2886,6 +3065,7 @@ entry: define void @test_vsseg7_mask_nxv2f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2906,6 +3086,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2924,6 +3105,7 @@ entry: define void @test_vsseg8_mask_nxv2f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2945,6 +3127,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -2957,6 +3140,7 @@ entry: define void @test_vsseg2_mask_nxv1f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -2972,6 +3156,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -2985,6 +3170,7 @@ entry: define void @test_vsseg3_mask_nxv1f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -3001,6 +3187,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3015,6 +3202,7 @@ entry: define void @test_vsseg4_mask_nxv1f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3032,6 +3220,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3047,6 +3236,7 @@ entry: define void @test_vsseg5_mask_nxv1f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3065,6 +3255,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3081,6 +3272,7 @@ entry: define void @test_vsseg6_mask_nxv1f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3100,6 +3292,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3117,6 +3310,7 @@ entry: define void @test_vsseg7_mask_nxv1f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3137,6 +3331,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3155,6 +3350,7 @@ entry: define void @test_vsseg8_mask_nxv1f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3176,6 +3372,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -3188,6 +3385,7 @@ entry: define void @test_vsseg2_mask_nxv1f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -3203,6 +3401,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -3216,6 +3415,7 @@ entry: define void @test_vsseg3_mask_nxv1f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -3232,6 +3432,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3246,6 +3447,7 @@ entry: define void @test_vsseg4_mask_nxv1f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3263,6 +3465,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3278,6 +3481,7 @@ entry: define void @test_vsseg5_mask_nxv1f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3296,6 +3500,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3312,6 +3517,7 @@ entry: define void @test_vsseg6_mask_nxv1f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3331,6 +3537,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3348,6 +3555,7 @@ entry: define void @test_vsseg7_mask_nxv1f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3368,6 +3576,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3386,6 +3595,7 @@ entry: define void @test_vsseg8_mask_nxv1f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3407,6 +3617,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -3419,6 +3630,7 @@ entry: define void @test_vsseg2_mask_nxv8f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -3434,6 +3646,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv8f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3447,6 +3660,7 @@ entry: define void @test_vsseg3_mask_nxv8f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3463,6 +3677,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv8f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3477,6 +3692,7 @@ entry: define void @test_vsseg4_mask_nxv8f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3494,6 +3710,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -3506,6 +3723,7 @@ entry: define void @test_vsseg2_mask_nxv8f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -3521,6 +3739,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -3533,6 +3752,7 @@ entry: define void @test_vsseg2_mask_nxv2f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -3548,6 +3768,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -3561,6 +3782,7 @@ entry: define void @test_vsseg3_mask_nxv2f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -3577,6 +3799,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2f64(, %val, double* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3591,6 +3814,7 @@ entry: define void @test_vsseg4_mask_nxv2f64( %val, double* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3608,6 +3832,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -3620,6 +3845,7 @@ entry: define void @test_vsseg2_mask_nxv4f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -3635,6 +3861,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -3648,6 +3875,7 @@ entry: define void @test_vsseg3_mask_nxv4f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -3664,6 +3892,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3678,6 +3907,7 @@ entry: define void @test_vsseg4_mask_nxv4f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3695,6 +3925,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv4f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3710,6 +3941,7 @@ entry: define void @test_vsseg5_mask_nxv4f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3728,6 +3960,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv4f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3744,6 +3977,7 @@ entry: define void @test_vsseg6_mask_nxv4f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3763,6 +3997,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv4f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3780,6 +4015,7 @@ entry: define void @test_vsseg7_mask_nxv4f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3800,6 +4036,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv4f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3818,6 +4055,7 @@ entry: define void @test_vsseg8_mask_nxv4f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3839,6 +4077,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -3851,6 +4090,7 @@ entry: define void @test_vsseg2_mask_nxv2f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -3866,6 +4106,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -3879,6 +4120,7 @@ entry: define void @test_vsseg3_mask_nxv2f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -3895,6 +4137,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3909,6 +4152,7 @@ entry: define void @test_vsseg4_mask_nxv2f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3926,6 +4170,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3941,6 +4186,7 @@ entry: define void @test_vsseg5_mask_nxv2f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3959,6 +4205,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3975,6 +4222,7 @@ entry: define void @test_vsseg6_mask_nxv2f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3994,6 +4242,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4011,6 +4260,7 @@ entry: define void @test_vsseg7_mask_nxv2f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4031,6 +4281,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2f16(, %val, half* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4049,6 +4300,7 @@ entry: define void @test_vsseg8_mask_nxv2f16( %val, half* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4070,6 +4322,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -4082,6 +4335,7 @@ entry: define void @test_vsseg2_mask_nxv4f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -4097,6 +4351,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -4110,6 +4365,7 @@ entry: define void @test_vsseg3_mask_nxv4f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -4126,6 +4382,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4f32(, %val, float* %base, i32 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -4140,6 +4397,7 @@ entry: define void @test_vsseg4_mask_nxv4f32( %val, float* %base, %mask, i32 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 diff --git a/llvm/test/CodeGen/RISCV/rvv/vsseg-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vsseg-rv64.ll index 66070a68f38a8..827032644a64e 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsseg-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsseg-rv64.ll @@ -8,6 +8,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv16i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -20,6 +21,7 @@ entry: define void @test_vsseg2_mask_nxv16i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -35,6 +37,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -47,6 +50,7 @@ entry: define void @test_vsseg2_mask_nxv4i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -62,6 +66,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -75,6 +80,7 @@ entry: define void @test_vsseg3_mask_nxv4i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -91,6 +97,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -105,6 +112,7 @@ entry: define void @test_vsseg4_mask_nxv4i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -122,6 +130,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv16i8(, %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -134,6 +143,7 @@ entry: define void @test_vsseg2_mask_nxv16i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -149,6 +159,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv16i8(, %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -162,6 +173,7 @@ entry: define void @test_vsseg3_mask_nxv16i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -178,6 +190,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv16i8(, %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -192,6 +205,7 @@ entry: define void @test_vsseg4_mask_nxv16i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -209,6 +223,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -221,6 +236,7 @@ entry: define void @test_vsseg2_mask_nxv1i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -236,6 +252,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -249,6 +266,7 @@ entry: define void @test_vsseg3_mask_nxv1i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -265,6 +283,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -279,6 +298,7 @@ entry: define void @test_vsseg4_mask_nxv1i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -296,6 +316,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -311,6 +332,7 @@ entry: define void @test_vsseg5_mask_nxv1i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -329,6 +351,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -345,6 +368,7 @@ entry: define void @test_vsseg6_mask_nxv1i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -364,6 +388,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -381,6 +406,7 @@ entry: define void @test_vsseg7_mask_nxv1i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -401,6 +427,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -419,6 +446,7 @@ entry: define void @test_vsseg8_mask_nxv1i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -440,6 +468,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -452,6 +481,7 @@ entry: define void @test_vsseg2_mask_nxv1i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -467,6 +497,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -480,6 +511,7 @@ entry: define void @test_vsseg3_mask_nxv1i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -496,6 +528,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -510,6 +543,7 @@ entry: define void @test_vsseg4_mask_nxv1i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -527,6 +561,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -542,6 +577,7 @@ entry: define void @test_vsseg5_mask_nxv1i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -560,6 +596,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -576,6 +613,7 @@ entry: define void @test_vsseg6_mask_nxv1i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -595,6 +633,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -612,6 +651,7 @@ entry: define void @test_vsseg7_mask_nxv1i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -632,6 +672,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -650,6 +691,7 @@ entry: define void @test_vsseg8_mask_nxv1i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -671,6 +713,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -683,6 +726,7 @@ entry: define void @test_vsseg2_mask_nxv8i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -698,6 +742,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv8i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -711,6 +756,7 @@ entry: define void @test_vsseg3_mask_nxv8i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -727,6 +773,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv8i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -741,6 +788,7 @@ entry: define void @test_vsseg4_mask_nxv8i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -758,6 +806,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4i8(,, define void @test_vsseg2_nxv4i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -770,6 +819,7 @@ entry: define void @test_vsseg2_mask_nxv4i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -785,6 +835,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4i8(,, define void @test_vsseg3_nxv4i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -798,6 +849,7 @@ entry: define void @test_vsseg3_mask_nxv4i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -814,6 +866,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4i8(,, define void @test_vsseg4_nxv4i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -828,6 +881,7 @@ entry: define void @test_vsseg4_mask_nxv4i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -845,6 +899,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv4i8(,, define void @test_vsseg5_nxv4i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -860,6 +915,7 @@ entry: define void @test_vsseg5_mask_nxv4i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -878,6 +934,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv4i8(,, define void @test_vsseg6_nxv4i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -894,6 +951,7 @@ entry: define void @test_vsseg6_mask_nxv4i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -913,6 +971,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv4i8(,, define void @test_vsseg7_nxv4i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -930,6 +989,7 @@ entry: define void @test_vsseg7_mask_nxv4i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -950,6 +1010,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv4i8(,, define void @test_vsseg8_nxv4i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -968,6 +1029,7 @@ entry: define void @test_vsseg8_mask_nxv4i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -989,6 +1051,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -1001,6 +1064,7 @@ entry: define void @test_vsseg2_mask_nxv1i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -1016,6 +1080,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -1029,6 +1094,7 @@ entry: define void @test_vsseg3_mask_nxv1i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -1045,6 +1111,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1059,6 +1126,7 @@ entry: define void @test_vsseg4_mask_nxv1i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1076,6 +1144,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1091,6 +1160,7 @@ entry: define void @test_vsseg5_mask_nxv1i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1109,6 +1179,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1125,6 +1196,7 @@ entry: define void @test_vsseg6_mask_nxv1i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1144,6 +1216,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1161,6 +1234,7 @@ entry: define void @test_vsseg7_mask_nxv1i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1181,6 +1255,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1199,6 +1274,7 @@ entry: define void @test_vsseg8_mask_nxv1i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1220,6 +1296,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -1232,6 +1309,7 @@ entry: define void @test_vsseg2_mask_nxv2i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -1247,6 +1325,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1260,6 +1339,7 @@ entry: define void @test_vsseg3_mask_nxv2i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1276,6 +1356,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1290,6 +1371,7 @@ entry: define void @test_vsseg4_mask_nxv2i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1307,6 +1389,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1322,6 +1405,7 @@ entry: define void @test_vsseg5_mask_nxv2i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1340,6 +1424,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1356,6 +1441,7 @@ entry: define void @test_vsseg6_mask_nxv2i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1375,6 +1461,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1392,6 +1479,7 @@ entry: define void @test_vsseg7_mask_nxv2i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1412,6 +1500,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1430,6 +1519,7 @@ entry: define void @test_vsseg8_mask_nxv2i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1451,6 +1541,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8i8(,, define void @test_vsseg2_nxv8i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -1463,6 +1554,7 @@ entry: define void @test_vsseg2_mask_nxv8i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -1478,6 +1570,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv8i8(,, define void @test_vsseg3_nxv8i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -1491,6 +1584,7 @@ entry: define void @test_vsseg3_mask_nxv8i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -1507,6 +1601,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv8i8(,, define void @test_vsseg4_nxv8i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1521,6 +1616,7 @@ entry: define void @test_vsseg4_mask_nxv8i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1538,6 +1634,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv8i8(,, define void @test_vsseg5_nxv8i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1553,6 +1650,7 @@ entry: define void @test_vsseg5_mask_nxv8i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1571,6 +1669,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv8i8(,, define void @test_vsseg6_nxv8i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1587,6 +1686,7 @@ entry: define void @test_vsseg6_mask_nxv8i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1606,6 +1706,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv8i8(,, define void @test_vsseg7_nxv8i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1623,6 +1724,7 @@ entry: define void @test_vsseg7_mask_nxv8i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1643,6 +1745,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv8i8(,, define void @test_vsseg8_nxv8i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1661,6 +1764,7 @@ entry: define void @test_vsseg8_mask_nxv8i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1682,6 +1786,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -1694,6 +1799,7 @@ entry: define void @test_vsseg2_mask_nxv4i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -1709,6 +1815,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -1721,6 +1828,7 @@ entry: define void @test_vsseg2_mask_nxv4i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -1736,6 +1844,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1749,6 +1858,7 @@ entry: define void @test_vsseg3_mask_nxv4i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1765,6 +1875,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1779,6 +1890,7 @@ entry: define void @test_vsseg4_mask_nxv4i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1796,6 +1908,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv4i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1811,6 +1924,7 @@ entry: define void @test_vsseg5_mask_nxv4i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1829,6 +1943,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv4i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1845,6 +1960,7 @@ entry: define void @test_vsseg6_mask_nxv4i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1864,6 +1980,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv4i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1881,6 +1998,7 @@ entry: define void @test_vsseg7_mask_nxv4i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1901,6 +2019,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv4i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1919,6 +2038,7 @@ entry: define void @test_vsseg8_mask_nxv4i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1940,6 +2060,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1i8(,, define void @test_vsseg2_nxv1i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -1952,6 +2073,7 @@ entry: define void @test_vsseg2_mask_nxv1i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -1967,6 +2089,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1i8(,, define void @test_vsseg3_nxv1i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -1980,6 +2103,7 @@ entry: define void @test_vsseg3_mask_nxv1i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -1996,6 +2120,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1i8(,, define void @test_vsseg4_nxv1i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2010,6 +2135,7 @@ entry: define void @test_vsseg4_mask_nxv1i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2027,6 +2153,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1i8(,, define void @test_vsseg5_nxv1i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2042,6 +2169,7 @@ entry: define void @test_vsseg5_mask_nxv1i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2060,6 +2188,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1i8(,, define void @test_vsseg6_nxv1i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2076,6 +2205,7 @@ entry: define void @test_vsseg6_mask_nxv1i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2095,6 +2225,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1i8(,, define void @test_vsseg7_nxv1i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2112,6 +2243,7 @@ entry: define void @test_vsseg7_mask_nxv1i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2132,6 +2264,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1i8(,, define void @test_vsseg8_nxv1i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2150,6 +2283,7 @@ entry: define void @test_vsseg8_mask_nxv1i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2171,6 +2305,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2i8(,, define void @test_vsseg2_nxv2i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -2183,6 +2318,7 @@ entry: define void @test_vsseg2_mask_nxv2i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -2198,6 +2334,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2i8(,, define void @test_vsseg3_nxv2i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -2211,6 +2348,7 @@ entry: define void @test_vsseg3_mask_nxv2i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -2227,6 +2365,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2i8(,, define void @test_vsseg4_nxv2i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2241,6 +2380,7 @@ entry: define void @test_vsseg4_mask_nxv2i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2258,6 +2398,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2i8(,, define void @test_vsseg5_nxv2i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2273,6 +2414,7 @@ entry: define void @test_vsseg5_mask_nxv2i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2291,6 +2433,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2i8(,, define void @test_vsseg6_nxv2i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2307,6 +2450,7 @@ entry: define void @test_vsseg6_mask_nxv2i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2326,6 +2470,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2i8(,, define void @test_vsseg7_nxv2i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2343,6 +2488,7 @@ entry: define void @test_vsseg7_mask_nxv2i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2363,6 +2509,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2i8(,, define void @test_vsseg8_nxv2i8( %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2381,6 +2528,7 @@ entry: define void @test_vsseg8_mask_nxv2i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2402,6 +2550,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8i32(, %val, i32* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -2414,6 +2563,7 @@ entry: define void @test_vsseg2_mask_nxv8i32( %val, i32* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -2429,6 +2579,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv32i8(, %val, i8* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0) @@ -2441,6 +2592,7 @@ entry: define void @test_vsseg2_mask_nxv32i8( %val, i8* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsseg2e8.v v8, (a0), v0.t @@ -2456,6 +2608,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -2468,6 +2621,7 @@ entry: define void @test_vsseg2_mask_nxv2i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -2483,6 +2637,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -2496,6 +2651,7 @@ entry: define void @test_vsseg3_mask_nxv2i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -2512,6 +2668,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2526,6 +2683,7 @@ entry: define void @test_vsseg4_mask_nxv2i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2543,6 +2701,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2558,6 +2717,7 @@ entry: define void @test_vsseg5_mask_nxv2i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2576,6 +2736,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2592,6 +2753,7 @@ entry: define void @test_vsseg6_mask_nxv2i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2611,6 +2773,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2628,6 +2791,7 @@ entry: define void @test_vsseg7_mask_nxv2i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2648,6 +2812,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2i16(, %val, i16* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2666,6 +2831,7 @@ entry: define void @test_vsseg8_mask_nxv2i16( %val, i16* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2687,6 +2853,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -2699,6 +2866,7 @@ entry: define void @test_vsseg2_mask_nxv2i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -2714,6 +2882,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -2727,6 +2896,7 @@ entry: define void @test_vsseg3_mask_nxv2i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -2743,6 +2913,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2i64(, %val, i64* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2757,6 +2928,7 @@ entry: define void @test_vsseg4_mask_nxv2i64( %val, i64* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2774,6 +2946,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv16f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -2786,6 +2959,7 @@ entry: define void @test_vsseg2_mask_nxv16f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -2801,6 +2975,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -2813,6 +2988,7 @@ entry: define void @test_vsseg2_mask_nxv4f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -2828,6 +3004,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -2840,6 +3017,7 @@ entry: define void @test_vsseg2_mask_nxv1f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -2855,6 +3033,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -2868,6 +3047,7 @@ entry: define void @test_vsseg3_mask_nxv1f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -2884,6 +3064,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2898,6 +3079,7 @@ entry: define void @test_vsseg4_mask_nxv1f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2915,6 +3097,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2930,6 +3113,7 @@ entry: define void @test_vsseg5_mask_nxv1f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2948,6 +3132,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2964,6 +3149,7 @@ entry: define void @test_vsseg6_mask_nxv1f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2983,6 +3169,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3000,6 +3187,7 @@ entry: define void @test_vsseg7_mask_nxv1f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3020,6 +3208,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3038,6 +3227,7 @@ entry: define void @test_vsseg8_mask_nxv1f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3059,6 +3249,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -3071,6 +3262,7 @@ entry: define void @test_vsseg2_mask_nxv2f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -3086,6 +3278,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -3099,6 +3292,7 @@ entry: define void @test_vsseg3_mask_nxv2f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -3115,6 +3309,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3129,6 +3324,7 @@ entry: define void @test_vsseg4_mask_nxv2f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3146,6 +3342,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3161,6 +3358,7 @@ entry: define void @test_vsseg5_mask_nxv2f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3179,6 +3377,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3195,6 +3394,7 @@ entry: define void @test_vsseg6_mask_nxv2f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3214,6 +3414,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3231,6 +3432,7 @@ entry: define void @test_vsseg7_mask_nxv2f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3251,6 +3453,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3269,6 +3472,7 @@ entry: define void @test_vsseg8_mask_nxv2f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3290,6 +3494,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -3302,6 +3507,7 @@ entry: define void @test_vsseg2_mask_nxv1f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -3317,6 +3523,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -3330,6 +3537,7 @@ entry: define void @test_vsseg3_mask_nxv1f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -3346,6 +3554,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3360,6 +3569,7 @@ entry: define void @test_vsseg4_mask_nxv1f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3377,6 +3587,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3392,6 +3603,7 @@ entry: define void @test_vsseg5_mask_nxv1f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3410,6 +3622,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3426,6 +3639,7 @@ entry: define void @test_vsseg6_mask_nxv1f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3445,6 +3659,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3462,6 +3677,7 @@ entry: define void @test_vsseg7_mask_nxv1f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3482,6 +3698,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3500,6 +3717,7 @@ entry: define void @test_vsseg8_mask_nxv1f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3521,6 +3739,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv1f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -3533,6 +3752,7 @@ entry: define void @test_vsseg2_mask_nxv1f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -3548,6 +3768,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv1f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -3561,6 +3782,7 @@ entry: define void @test_vsseg3_mask_nxv1f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -3577,6 +3799,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv1f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3591,6 +3814,7 @@ entry: define void @test_vsseg4_mask_nxv1f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3608,6 +3832,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv1f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3623,6 +3848,7 @@ entry: define void @test_vsseg5_mask_nxv1f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3641,6 +3867,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv1f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3657,6 +3884,7 @@ entry: define void @test_vsseg6_mask_nxv1f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3676,6 +3904,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv1f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3693,6 +3922,7 @@ entry: define void @test_vsseg7_mask_nxv1f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3713,6 +3943,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv1f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3731,6 +3962,7 @@ entry: define void @test_vsseg8_mask_nxv1f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3752,6 +3984,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -3764,6 +3997,7 @@ entry: define void @test_vsseg2_mask_nxv8f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -3779,6 +4013,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv8f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3792,6 +4027,7 @@ entry: define void @test_vsseg3_mask_nxv8f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3808,6 +4044,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv8f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3822,6 +4059,7 @@ entry: define void @test_vsseg4_mask_nxv8f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3839,6 +4077,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv8f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -3851,6 +4090,7 @@ entry: define void @test_vsseg2_mask_nxv8f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -3866,6 +4106,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0) @@ -3878,6 +4119,7 @@ entry: define void @test_vsseg2_mask_nxv2f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsseg2e64.v v8, (a0), v0.t @@ -3893,6 +4135,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -3906,6 +4149,7 @@ entry: define void @test_vsseg3_mask_nxv2f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -3922,6 +4166,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2f64(, %val, double* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3936,6 +4181,7 @@ entry: define void @test_vsseg4_mask_nxv2f64( %val, double* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3953,6 +4199,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -3965,6 +4212,7 @@ entry: define void @test_vsseg2_mask_nxv4f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -3980,6 +4228,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -3993,6 +4242,7 @@ entry: define void @test_vsseg3_mask_nxv4f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -4009,6 +4259,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4023,6 +4274,7 @@ entry: define void @test_vsseg4_mask_nxv4f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4040,6 +4292,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv4f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4055,6 +4308,7 @@ entry: define void @test_vsseg5_mask_nxv4f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4073,6 +4327,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv4f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4089,6 +4344,7 @@ entry: define void @test_vsseg6_mask_nxv4f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4108,6 +4364,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv4f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4125,6 +4382,7 @@ entry: define void @test_vsseg7_mask_nxv4f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4145,6 +4403,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv4f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4163,6 +4422,7 @@ entry: define void @test_vsseg8_mask_nxv4f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4184,6 +4444,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv2f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -4196,6 +4457,7 @@ entry: define void @test_vsseg2_mask_nxv2f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -4211,6 +4473,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv2f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -4224,6 +4487,7 @@ entry: define void @test_vsseg3_mask_nxv2f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -4240,6 +4504,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv2f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4254,6 +4519,7 @@ entry: define void @test_vsseg4_mask_nxv2f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4271,6 +4537,7 @@ declare void @llvm.riscv.vsseg5.mask.nxv2f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg5_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4286,6 +4553,7 @@ entry: define void @test_vsseg5_mask_nxv2f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg5_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4304,6 +4572,7 @@ declare void @llvm.riscv.vsseg6.mask.nxv2f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg6_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4320,6 +4589,7 @@ entry: define void @test_vsseg6_mask_nxv2f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg6_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4339,6 +4609,7 @@ declare void @llvm.riscv.vsseg7.mask.nxv2f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg7_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4356,6 +4627,7 @@ entry: define void @test_vsseg7_mask_nxv2f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg7_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4376,6 +4648,7 @@ declare void @llvm.riscv.vsseg8.mask.nxv2f16(, %val, half* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg8_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4394,6 +4667,7 @@ entry: define void @test_vsseg8_mask_nxv2f16( %val, half* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg8_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4415,6 +4689,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv4f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg2_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0) @@ -4427,6 +4702,7 @@ entry: define void @test_vsseg2_mask_nxv4f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg2_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsseg2e32.v v8, (a0), v0.t @@ -4442,6 +4718,7 @@ declare void @llvm.riscv.vsseg3.mask.nxv4f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg3_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -4455,6 +4732,7 @@ entry: define void @test_vsseg3_mask_nxv4f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg3_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -4471,6 +4749,7 @@ declare void @llvm.riscv.vsseg4.mask.nxv4f32(, %val, float* %base, i64 %vl) { ; CHECK-LABEL: test_vsseg4_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -4485,6 +4764,7 @@ entry: define void @test_vsseg4_mask_nxv4f32( %val, float* %base, %mask, i64 %vl) { ; CHECK-LABEL: test_vsseg4_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 diff --git a/llvm/test/CodeGen/RISCV/rvv/vssseg-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vssseg-rv32.ll index 64ec98f92f553..ee6fd45ddf5cb 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vssseg-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vssseg-rv32.ll @@ -8,6 +8,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv16i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -20,6 +21,7 @@ entry: define void @test_vssseg2_mask_nxv16i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -35,6 +37,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1i8(, define void @test_vssseg2_nxv1i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -47,6 +50,7 @@ entry: define void @test_vssseg2_mask_nxv1i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -62,6 +66,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1i8(, define void @test_vssseg3_nxv1i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu @@ -75,6 +80,7 @@ entry: define void @test_vssseg3_mask_nxv1i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu @@ -91,6 +97,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1i8(, define void @test_vssseg4_nxv1i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -105,6 +112,7 @@ entry: define void @test_vssseg4_mask_nxv1i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -122,6 +130,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1i8(, define void @test_vssseg5_nxv1i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -137,6 +146,7 @@ entry: define void @test_vssseg5_mask_nxv1i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -155,6 +165,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1i8(, define void @test_vssseg6_nxv1i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -171,6 +182,7 @@ entry: define void @test_vssseg6_mask_nxv1i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -190,6 +202,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1i8(, define void @test_vssseg7_nxv1i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -207,6 +220,7 @@ entry: define void @test_vssseg7_mask_nxv1i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -227,6 +241,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1i8(, define void @test_vssseg8_nxv1i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -245,6 +260,7 @@ entry: define void @test_vssseg8_mask_nxv1i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -266,6 +282,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv16i8(, %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -278,6 +295,7 @@ entry: define void @test_vssseg2_mask_nxv16i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -293,6 +311,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv16i8(, %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu @@ -306,6 +325,7 @@ entry: define void @test_vssseg3_mask_nxv16i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu @@ -322,6 +342,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv16i8(, %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -336,6 +357,7 @@ entry: define void @test_vssseg4_mask_nxv16i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -353,6 +375,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -365,6 +388,7 @@ entry: define void @test_vssseg2_mask_nxv2i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -380,6 +404,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -393,6 +418,7 @@ entry: define void @test_vssseg3_mask_nxv2i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -409,6 +435,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -423,6 +450,7 @@ entry: define void @test_vssseg4_mask_nxv2i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -440,6 +468,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -455,6 +484,7 @@ entry: define void @test_vssseg5_mask_nxv2i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -473,6 +503,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -489,6 +520,7 @@ entry: define void @test_vssseg6_mask_nxv2i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -508,6 +540,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -525,6 +558,7 @@ entry: define void @test_vssseg7_mask_nxv2i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -545,6 +579,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -563,6 +598,7 @@ entry: define void @test_vssseg8_mask_nxv2i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -584,6 +620,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -596,6 +633,7 @@ entry: define void @test_vssseg2_mask_nxv4i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -611,6 +649,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -624,6 +663,7 @@ entry: define void @test_vssseg3_mask_nxv4i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -640,6 +680,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -654,6 +695,7 @@ entry: define void @test_vssseg4_mask_nxv4i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -671,6 +713,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv4i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -686,6 +729,7 @@ entry: define void @test_vssseg5_mask_nxv4i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -704,6 +748,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv4i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -720,6 +765,7 @@ entry: define void @test_vssseg6_mask_nxv4i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -739,6 +785,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv4i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -756,6 +803,7 @@ entry: define void @test_vssseg7_mask_nxv4i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -776,6 +824,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv4i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -794,6 +843,7 @@ entry: define void @test_vssseg8_mask_nxv4i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -815,6 +865,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -827,6 +878,7 @@ entry: define void @test_vssseg2_mask_nxv1i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -842,6 +894,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -855,6 +908,7 @@ entry: define void @test_vssseg3_mask_nxv1i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -871,6 +925,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -885,6 +940,7 @@ entry: define void @test_vssseg4_mask_nxv1i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -902,6 +958,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -917,6 +974,7 @@ entry: define void @test_vssseg5_mask_nxv1i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -935,6 +993,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -951,6 +1010,7 @@ entry: define void @test_vssseg6_mask_nxv1i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -970,6 +1030,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -987,6 +1048,7 @@ entry: define void @test_vssseg7_mask_nxv1i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1007,6 +1069,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1025,6 +1088,7 @@ entry: define void @test_vssseg8_mask_nxv1i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1046,6 +1110,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -1058,6 +1123,7 @@ entry: define void @test_vssseg2_mask_nxv8i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -1073,6 +1139,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv8i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -1086,6 +1153,7 @@ entry: define void @test_vssseg3_mask_nxv8i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -1102,6 +1170,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv8i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1116,6 +1185,7 @@ entry: define void @test_vssseg4_mask_nxv8i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1133,6 +1203,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8i8(, define void @test_vssseg2_nxv8i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -1145,6 +1216,7 @@ entry: define void @test_vssseg2_mask_nxv8i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -1160,6 +1232,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv8i8(, define void @test_vssseg3_nxv8i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu @@ -1173,6 +1246,7 @@ entry: define void @test_vssseg3_mask_nxv8i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu @@ -1189,6 +1263,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv8i8(, define void @test_vssseg4_nxv8i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1203,6 +1278,7 @@ entry: define void @test_vssseg4_mask_nxv8i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1220,6 +1296,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv8i8(, define void @test_vssseg5_nxv8i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1235,6 +1312,7 @@ entry: define void @test_vssseg5_mask_nxv8i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1253,6 +1331,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv8i8(, define void @test_vssseg6_nxv8i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1269,6 +1348,7 @@ entry: define void @test_vssseg6_mask_nxv8i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1288,6 +1368,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv8i8(, define void @test_vssseg7_nxv8i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1305,6 +1386,7 @@ entry: define void @test_vssseg7_mask_nxv8i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1325,6 +1407,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv8i8(, define void @test_vssseg8_nxv8i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1343,6 +1426,7 @@ entry: define void @test_vssseg8_mask_nxv8i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1364,6 +1448,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -1376,6 +1461,7 @@ entry: define void @test_vssseg2_mask_nxv8i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -1391,6 +1477,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4i8(, define void @test_vssseg2_nxv4i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -1403,6 +1490,7 @@ entry: define void @test_vssseg2_mask_nxv4i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -1418,6 +1506,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4i8(, define void @test_vssseg3_nxv4i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu @@ -1431,6 +1520,7 @@ entry: define void @test_vssseg3_mask_nxv4i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu @@ -1447,6 +1537,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4i8(, define void @test_vssseg4_nxv4i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1461,6 +1552,7 @@ entry: define void @test_vssseg4_mask_nxv4i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1478,6 +1570,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv4i8(, define void @test_vssseg5_nxv4i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1493,6 +1586,7 @@ entry: define void @test_vssseg5_mask_nxv4i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1511,6 +1605,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv4i8(, define void @test_vssseg6_nxv4i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1527,6 +1622,7 @@ entry: define void @test_vssseg6_mask_nxv4i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1546,6 +1642,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv4i8(, define void @test_vssseg7_nxv4i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1563,6 +1660,7 @@ entry: define void @test_vssseg7_mask_nxv4i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1583,6 +1681,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv4i8(, define void @test_vssseg8_nxv4i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1601,6 +1700,7 @@ entry: define void @test_vssseg8_mask_nxv4i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1622,6 +1722,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -1634,6 +1735,7 @@ entry: define void @test_vssseg2_mask_nxv1i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -1649,6 +1751,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -1662,6 +1765,7 @@ entry: define void @test_vssseg3_mask_nxv1i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -1678,6 +1782,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1692,6 +1797,7 @@ entry: define void @test_vssseg4_mask_nxv1i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1709,6 +1815,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1724,6 +1831,7 @@ entry: define void @test_vssseg5_mask_nxv1i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1742,6 +1850,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1758,6 +1867,7 @@ entry: define void @test_vssseg6_mask_nxv1i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1777,6 +1887,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1794,6 +1905,7 @@ entry: define void @test_vssseg7_mask_nxv1i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1814,6 +1926,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1832,6 +1945,7 @@ entry: define void @test_vssseg8_mask_nxv1i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1853,6 +1967,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv32i8(, %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -1865,6 +1980,7 @@ entry: define void @test_vssseg2_mask_nxv32i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -1880,6 +1996,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2i8(, define void @test_vssseg2_nxv2i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -1892,6 +2009,7 @@ entry: define void @test_vssseg2_mask_nxv2i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -1907,6 +2025,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2i8(, define void @test_vssseg3_nxv2i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu @@ -1920,6 +2039,7 @@ entry: define void @test_vssseg3_mask_nxv2i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu @@ -1936,6 +2056,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2i8(, define void @test_vssseg4_nxv2i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1950,6 +2071,7 @@ entry: define void @test_vssseg4_mask_nxv2i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1967,6 +2089,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2i8(, define void @test_vssseg5_nxv2i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1982,6 +2105,7 @@ entry: define void @test_vssseg5_mask_nxv2i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2000,6 +2124,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2i8(, define void @test_vssseg6_nxv2i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2016,6 +2141,7 @@ entry: define void @test_vssseg6_mask_nxv2i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2035,6 +2161,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2i8(, define void @test_vssseg7_nxv2i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2052,6 +2179,7 @@ entry: define void @test_vssseg7_mask_nxv2i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2072,6 +2200,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2i8(, define void @test_vssseg8_nxv2i8( %val, i8* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2090,6 +2219,7 @@ entry: define void @test_vssseg8_mask_nxv2i8( %val, i8* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2111,6 +2241,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -2123,6 +2254,7 @@ entry: define void @test_vssseg2_mask_nxv2i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -2138,6 +2270,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -2151,6 +2284,7 @@ entry: define void @test_vssseg3_mask_nxv2i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -2167,6 +2301,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2181,6 +2316,7 @@ entry: define void @test_vssseg4_mask_nxv2i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2198,6 +2334,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2213,6 +2350,7 @@ entry: define void @test_vssseg5_mask_nxv2i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2231,6 +2369,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2247,6 +2386,7 @@ entry: define void @test_vssseg6_mask_nxv2i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2266,6 +2406,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2283,6 +2424,7 @@ entry: define void @test_vssseg7_mask_nxv2i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2303,6 +2445,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2i16(, %val, i16* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2321,6 +2464,7 @@ entry: define void @test_vssseg8_mask_nxv2i16( %val, i16* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2342,6 +2486,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -2354,6 +2499,7 @@ entry: define void @test_vssseg2_mask_nxv4i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -2369,6 +2515,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -2382,6 +2529,7 @@ entry: define void @test_vssseg3_mask_nxv4i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -2398,6 +2546,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4i32(, %val, i32* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2412,6 +2561,7 @@ entry: define void @test_vssseg4_mask_nxv4i32( %val, i32* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2429,6 +2579,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv16f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -2441,6 +2592,7 @@ entry: define void @test_vssseg2_mask_nxv16f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -2456,6 +2608,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -2468,6 +2621,7 @@ entry: define void @test_vssseg2_mask_nxv4f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -2483,6 +2637,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -2495,6 +2650,7 @@ entry: define void @test_vssseg2_mask_nxv1f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -2510,6 +2666,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu @@ -2523,6 +2680,7 @@ entry: define void @test_vssseg3_mask_nxv1f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu @@ -2539,6 +2697,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2553,6 +2712,7 @@ entry: define void @test_vssseg4_mask_nxv1f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2570,6 +2730,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2585,6 +2746,7 @@ entry: define void @test_vssseg5_mask_nxv1f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2603,6 +2765,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2619,6 +2782,7 @@ entry: define void @test_vssseg6_mask_nxv1f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2638,6 +2802,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2655,6 +2820,7 @@ entry: define void @test_vssseg7_mask_nxv1f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2675,6 +2841,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2693,6 +2860,7 @@ entry: define void @test_vssseg8_mask_nxv1f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2714,6 +2882,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -2726,6 +2895,7 @@ entry: define void @test_vssseg2_mask_nxv2f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -2741,6 +2911,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -2754,6 +2925,7 @@ entry: define void @test_vssseg3_mask_nxv2f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -2770,6 +2942,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2784,6 +2957,7 @@ entry: define void @test_vssseg4_mask_nxv2f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2801,6 +2975,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2816,6 +2991,7 @@ entry: define void @test_vssseg5_mask_nxv2f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2834,6 +3010,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2850,6 +3027,7 @@ entry: define void @test_vssseg6_mask_nxv2f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2869,6 +3047,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2886,6 +3065,7 @@ entry: define void @test_vssseg7_mask_nxv2f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2906,6 +3086,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2924,6 +3105,7 @@ entry: define void @test_vssseg8_mask_nxv2f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2945,6 +3127,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -2957,6 +3140,7 @@ entry: define void @test_vssseg2_mask_nxv1f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -2972,6 +3156,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -2985,6 +3170,7 @@ entry: define void @test_vssseg3_mask_nxv1f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -3001,6 +3187,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3015,6 +3202,7 @@ entry: define void @test_vssseg4_mask_nxv1f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3032,6 +3220,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3047,6 +3236,7 @@ entry: define void @test_vssseg5_mask_nxv1f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3065,6 +3255,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3081,6 +3272,7 @@ entry: define void @test_vssseg6_mask_nxv1f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3100,6 +3292,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3117,6 +3310,7 @@ entry: define void @test_vssseg7_mask_nxv1f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3137,6 +3331,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3155,6 +3350,7 @@ entry: define void @test_vssseg8_mask_nxv1f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3176,6 +3372,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -3188,6 +3385,7 @@ entry: define void @test_vssseg2_mask_nxv1f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -3203,6 +3401,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -3216,6 +3415,7 @@ entry: define void @test_vssseg3_mask_nxv1f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -3232,6 +3432,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3246,6 +3447,7 @@ entry: define void @test_vssseg4_mask_nxv1f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3263,6 +3465,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3278,6 +3481,7 @@ entry: define void @test_vssseg5_mask_nxv1f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3296,6 +3500,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3312,6 +3517,7 @@ entry: define void @test_vssseg6_mask_nxv1f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3331,6 +3537,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3348,6 +3555,7 @@ entry: define void @test_vssseg7_mask_nxv1f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3368,6 +3576,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3386,6 +3595,7 @@ entry: define void @test_vssseg8_mask_nxv1f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3407,6 +3617,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -3419,6 +3630,7 @@ entry: define void @test_vssseg2_mask_nxv8f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -3434,6 +3646,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv8f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -3447,6 +3660,7 @@ entry: define void @test_vssseg3_mask_nxv8f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -3463,6 +3677,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv8f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3477,6 +3692,7 @@ entry: define void @test_vssseg4_mask_nxv8f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3494,6 +3710,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -3506,6 +3723,7 @@ entry: define void @test_vssseg2_mask_nxv8f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -3521,6 +3739,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -3533,6 +3752,7 @@ entry: define void @test_vssseg2_mask_nxv2f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -3548,6 +3768,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu @@ -3561,6 +3782,7 @@ entry: define void @test_vssseg3_mask_nxv2f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu @@ -3577,6 +3799,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2f64(, %val, double* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3591,6 +3814,7 @@ entry: define void @test_vssseg4_mask_nxv2f64( %val, double* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3608,6 +3832,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -3620,6 +3845,7 @@ entry: define void @test_vssseg2_mask_nxv4f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -3635,6 +3861,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -3648,6 +3875,7 @@ entry: define void @test_vssseg3_mask_nxv4f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -3664,6 +3892,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3678,6 +3907,7 @@ entry: define void @test_vssseg4_mask_nxv4f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3695,6 +3925,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv4f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3710,6 +3941,7 @@ entry: define void @test_vssseg5_mask_nxv4f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3728,6 +3960,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv4f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3744,6 +3977,7 @@ entry: define void @test_vssseg6_mask_nxv4f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3763,6 +3997,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv4f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3780,6 +4015,7 @@ entry: define void @test_vssseg7_mask_nxv4f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3800,6 +4036,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv4f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3818,6 +4055,7 @@ entry: define void @test_vssseg8_mask_nxv4f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3839,6 +4077,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -3851,6 +4090,7 @@ entry: define void @test_vssseg2_mask_nxv2f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -3866,6 +4106,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -3879,6 +4120,7 @@ entry: define void @test_vssseg3_mask_nxv2f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -3895,6 +4137,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3909,6 +4152,7 @@ entry: define void @test_vssseg4_mask_nxv2f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3926,6 +4170,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3941,6 +4186,7 @@ entry: define void @test_vssseg5_mask_nxv2f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3959,6 +4205,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3975,6 +4222,7 @@ entry: define void @test_vssseg6_mask_nxv2f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3994,6 +4242,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4011,6 +4260,7 @@ entry: define void @test_vssseg7_mask_nxv2f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4031,6 +4281,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2f16(, %val, half* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4049,6 +4300,7 @@ entry: define void @test_vssseg8_mask_nxv2f16( %val, half* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4070,6 +4322,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -4082,6 +4335,7 @@ entry: define void @test_vssseg2_mask_nxv4f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -4097,6 +4351,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -4110,6 +4365,7 @@ entry: define void @test_vssseg3_mask_nxv4f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -4126,6 +4382,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4f32(, %val, float* %base, i32 %offset, i32 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -4140,6 +4397,7 @@ entry: define void @test_vssseg4_mask_nxv4f32( %val, float* %base, i32 %offset, %mask, i32 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 diff --git a/llvm/test/CodeGen/RISCV/rvv/vssseg-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vssseg-rv64.ll index e34ad01360860..14fe8e535395e 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vssseg-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vssseg-rv64.ll @@ -8,6 +8,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv16i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -20,6 +21,7 @@ entry: define void @test_vssseg2_mask_nxv16i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -35,6 +37,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -47,6 +50,7 @@ entry: define void @test_vssseg2_mask_nxv4i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -62,6 +66,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -75,6 +80,7 @@ entry: define void @test_vssseg3_mask_nxv4i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -91,6 +97,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -105,6 +112,7 @@ entry: define void @test_vssseg4_mask_nxv4i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -122,6 +130,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv16i8(, %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -134,6 +143,7 @@ entry: define void @test_vssseg2_mask_nxv16i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -149,6 +159,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv16i8(, %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu @@ -162,6 +173,7 @@ entry: define void @test_vssseg3_mask_nxv16i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m2, ta, mu @@ -178,6 +190,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv16i8(, %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -192,6 +205,7 @@ entry: define void @test_vssseg4_mask_nxv16i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -209,6 +223,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -221,6 +236,7 @@ entry: define void @test_vssseg2_mask_nxv1i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -236,6 +252,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu @@ -249,6 +266,7 @@ entry: define void @test_vssseg3_mask_nxv1i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu @@ -265,6 +283,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -279,6 +298,7 @@ entry: define void @test_vssseg4_mask_nxv1i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -296,6 +316,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -311,6 +332,7 @@ entry: define void @test_vssseg5_mask_nxv1i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -329,6 +351,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -345,6 +368,7 @@ entry: define void @test_vssseg6_mask_nxv1i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -364,6 +388,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -381,6 +406,7 @@ entry: define void @test_vssseg7_mask_nxv1i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -401,6 +427,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -419,6 +446,7 @@ entry: define void @test_vssseg8_mask_nxv1i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -440,6 +468,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -452,6 +481,7 @@ entry: define void @test_vssseg2_mask_nxv1i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -467,6 +497,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -480,6 +511,7 @@ entry: define void @test_vssseg3_mask_nxv1i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -496,6 +528,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -510,6 +543,7 @@ entry: define void @test_vssseg4_mask_nxv1i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -527,6 +561,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -542,6 +577,7 @@ entry: define void @test_vssseg5_mask_nxv1i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -560,6 +596,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -576,6 +613,7 @@ entry: define void @test_vssseg6_mask_nxv1i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -595,6 +633,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -612,6 +651,7 @@ entry: define void @test_vssseg7_mask_nxv1i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -632,6 +672,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -650,6 +691,7 @@ entry: define void @test_vssseg8_mask_nxv1i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -671,6 +713,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -683,6 +726,7 @@ entry: define void @test_vssseg2_mask_nxv8i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -698,6 +742,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv8i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -711,6 +756,7 @@ entry: define void @test_vssseg3_mask_nxv8i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -727,6 +773,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv8i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -741,6 +788,7 @@ entry: define void @test_vssseg4_mask_nxv8i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -758,6 +806,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4i8(, define void @test_vssseg2_nxv4i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -770,6 +819,7 @@ entry: define void @test_vssseg2_mask_nxv4i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -785,6 +835,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4i8(, define void @test_vssseg3_nxv4i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu @@ -798,6 +849,7 @@ entry: define void @test_vssseg3_mask_nxv4i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf2, ta, mu @@ -814,6 +866,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4i8(, define void @test_vssseg4_nxv4i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -828,6 +881,7 @@ entry: define void @test_vssseg4_mask_nxv4i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -845,6 +899,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv4i8(, define void @test_vssseg5_nxv4i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -860,6 +915,7 @@ entry: define void @test_vssseg5_mask_nxv4i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -878,6 +934,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv4i8(, define void @test_vssseg6_nxv4i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -894,6 +951,7 @@ entry: define void @test_vssseg6_mask_nxv4i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -913,6 +971,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv4i8(, define void @test_vssseg7_nxv4i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -930,6 +989,7 @@ entry: define void @test_vssseg7_mask_nxv4i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -950,6 +1010,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv4i8(, define void @test_vssseg8_nxv4i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -968,6 +1029,7 @@ entry: define void @test_vssseg8_mask_nxv4i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -989,6 +1051,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -1001,6 +1064,7 @@ entry: define void @test_vssseg2_mask_nxv1i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -1016,6 +1080,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -1029,6 +1094,7 @@ entry: define void @test_vssseg3_mask_nxv1i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -1045,6 +1111,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1059,6 +1126,7 @@ entry: define void @test_vssseg4_mask_nxv1i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1076,6 +1144,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1091,6 +1160,7 @@ entry: define void @test_vssseg5_mask_nxv1i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1109,6 +1179,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1125,6 +1196,7 @@ entry: define void @test_vssseg6_mask_nxv1i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1144,6 +1216,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1161,6 +1234,7 @@ entry: define void @test_vssseg7_mask_nxv1i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1181,6 +1255,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1199,6 +1274,7 @@ entry: define void @test_vssseg8_mask_nxv1i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1220,6 +1296,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -1232,6 +1309,7 @@ entry: define void @test_vssseg2_mask_nxv2i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -1247,6 +1325,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -1260,6 +1339,7 @@ entry: define void @test_vssseg3_mask_nxv2i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -1276,6 +1356,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1290,6 +1371,7 @@ entry: define void @test_vssseg4_mask_nxv2i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1307,6 +1389,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1322,6 +1405,7 @@ entry: define void @test_vssseg5_mask_nxv2i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1340,6 +1424,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1356,6 +1441,7 @@ entry: define void @test_vssseg6_mask_nxv2i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1375,6 +1461,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1392,6 +1479,7 @@ entry: define void @test_vssseg7_mask_nxv2i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1412,6 +1500,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1430,6 +1519,7 @@ entry: define void @test_vssseg8_mask_nxv2i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1451,6 +1541,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8i8(, define void @test_vssseg2_nxv8i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -1463,6 +1554,7 @@ entry: define void @test_vssseg2_mask_nxv8i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -1478,6 +1570,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv8i8(, define void @test_vssseg3_nxv8i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu @@ -1491,6 +1584,7 @@ entry: define void @test_vssseg3_mask_nxv8i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m1, ta, mu @@ -1507,6 +1601,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv8i8(, define void @test_vssseg4_nxv8i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1521,6 +1616,7 @@ entry: define void @test_vssseg4_mask_nxv8i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1538,6 +1634,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv8i8(, define void @test_vssseg5_nxv8i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1553,6 +1650,7 @@ entry: define void @test_vssseg5_mask_nxv8i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1571,6 +1669,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv8i8(, define void @test_vssseg6_nxv8i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1587,6 +1686,7 @@ entry: define void @test_vssseg6_mask_nxv8i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1606,6 +1706,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv8i8(, define void @test_vssseg7_nxv8i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1623,6 +1724,7 @@ entry: define void @test_vssseg7_mask_nxv8i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1643,6 +1745,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv8i8(, define void @test_vssseg8_nxv8i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1661,6 +1764,7 @@ entry: define void @test_vssseg8_mask_nxv8i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1682,6 +1786,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -1694,6 +1799,7 @@ entry: define void @test_vssseg2_mask_nxv4i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -1709,6 +1815,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -1721,6 +1828,7 @@ entry: define void @test_vssseg2_mask_nxv4i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -1736,6 +1844,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -1749,6 +1858,7 @@ entry: define void @test_vssseg3_mask_nxv4i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -1765,6 +1875,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1779,6 +1890,7 @@ entry: define void @test_vssseg4_mask_nxv4i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1796,6 +1908,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv4i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1811,6 +1924,7 @@ entry: define void @test_vssseg5_mask_nxv4i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1829,6 +1943,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv4i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1845,6 +1960,7 @@ entry: define void @test_vssseg6_mask_nxv4i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1864,6 +1980,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv4i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1881,6 +1998,7 @@ entry: define void @test_vssseg7_mask_nxv4i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1901,6 +2019,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv4i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1919,6 +2038,7 @@ entry: define void @test_vssseg8_mask_nxv4i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -1940,6 +2060,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1i8(, define void @test_vssseg2_nxv1i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -1952,6 +2073,7 @@ entry: define void @test_vssseg2_mask_nxv1i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -1967,6 +2089,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1i8(, define void @test_vssseg3_nxv1i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu @@ -1980,6 +2103,7 @@ entry: define void @test_vssseg3_mask_nxv1i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf8, ta, mu @@ -1996,6 +2120,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1i8(, define void @test_vssseg4_nxv1i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2010,6 +2135,7 @@ entry: define void @test_vssseg4_mask_nxv1i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2027,6 +2153,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1i8(, define void @test_vssseg5_nxv1i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2042,6 +2169,7 @@ entry: define void @test_vssseg5_mask_nxv1i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2060,6 +2188,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1i8(, define void @test_vssseg6_nxv1i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2076,6 +2205,7 @@ entry: define void @test_vssseg6_mask_nxv1i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2095,6 +2225,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1i8(, define void @test_vssseg7_nxv1i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2112,6 +2243,7 @@ entry: define void @test_vssseg7_mask_nxv1i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2132,6 +2264,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1i8(, define void @test_vssseg8_nxv1i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2150,6 +2283,7 @@ entry: define void @test_vssseg8_mask_nxv1i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2171,6 +2305,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2i8(, define void @test_vssseg2_nxv2i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -2183,6 +2318,7 @@ entry: define void @test_vssseg2_mask_nxv2i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -2198,6 +2334,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2i8(, define void @test_vssseg3_nxv2i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu @@ -2211,6 +2348,7 @@ entry: define void @test_vssseg3_mask_nxv2i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, mf4, ta, mu @@ -2227,6 +2365,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2i8(, define void @test_vssseg4_nxv2i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2241,6 +2380,7 @@ entry: define void @test_vssseg4_mask_nxv2i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2258,6 +2398,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2i8(, define void @test_vssseg5_nxv2i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2273,6 +2414,7 @@ entry: define void @test_vssseg5_mask_nxv2i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2291,6 +2433,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2i8(, define void @test_vssseg6_nxv2i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2307,6 +2450,7 @@ entry: define void @test_vssseg6_mask_nxv2i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2326,6 +2470,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2i8(, define void @test_vssseg7_nxv2i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2343,6 +2488,7 @@ entry: define void @test_vssseg7_mask_nxv2i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2363,6 +2509,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2i8(, define void @test_vssseg8_nxv2i8( %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2381,6 +2528,7 @@ entry: define void @test_vssseg8_mask_nxv2i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2402,6 +2550,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8i32(, %val, i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -2414,6 +2563,7 @@ entry: define void @test_vssseg2_mask_nxv8i32( %val, i32* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -2429,6 +2579,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv32i8(, %val, i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1 @@ -2441,6 +2592,7 @@ entry: define void @test_vssseg2_mask_nxv32i8( %val, i8* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e8, m4, ta, mu ; CHECK-NEXT: vssseg2e8.v v8, (a0), a1, v0.t @@ -2456,6 +2608,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -2468,6 +2621,7 @@ entry: define void @test_vssseg2_mask_nxv2i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -2483,6 +2637,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -2496,6 +2651,7 @@ entry: define void @test_vssseg3_mask_nxv2i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -2512,6 +2668,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2526,6 +2683,7 @@ entry: define void @test_vssseg4_mask_nxv2i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2543,6 +2701,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2558,6 +2717,7 @@ entry: define void @test_vssseg5_mask_nxv2i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2576,6 +2736,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2592,6 +2753,7 @@ entry: define void @test_vssseg6_mask_nxv2i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2611,6 +2773,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2628,6 +2791,7 @@ entry: define void @test_vssseg7_mask_nxv2i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2648,6 +2812,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2i16(, %val, i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2666,6 +2831,7 @@ entry: define void @test_vssseg8_mask_nxv2i16( %val, i16* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2687,6 +2853,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -2699,6 +2866,7 @@ entry: define void @test_vssseg2_mask_nxv2i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -2714,6 +2882,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu @@ -2727,6 +2896,7 @@ entry: define void @test_vssseg3_mask_nxv2i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu @@ -2743,6 +2913,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2i64(, %val, i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2757,6 +2928,7 @@ entry: define void @test_vssseg4_mask_nxv2i64( %val, i64* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -2774,6 +2946,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv16f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -2786,6 +2959,7 @@ entry: define void @test_vssseg2_mask_nxv16f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv16f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -2801,6 +2975,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -2813,6 +2988,7 @@ entry: define void @test_vssseg2_mask_nxv4f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m4, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -2828,6 +3004,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -2840,6 +3017,7 @@ entry: define void @test_vssseg2_mask_nxv1f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -2855,6 +3033,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu @@ -2868,6 +3047,7 @@ entry: define void @test_vssseg3_mask_nxv1f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m1, ta, mu @@ -2884,6 +3064,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2898,6 +3079,7 @@ entry: define void @test_vssseg4_mask_nxv1f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2915,6 +3097,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2930,6 +3113,7 @@ entry: define void @test_vssseg5_mask_nxv1f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2948,6 +3132,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2964,6 +3149,7 @@ entry: define void @test_vssseg6_mask_nxv1f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -2983,6 +3169,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3000,6 +3187,7 @@ entry: define void @test_vssseg7_mask_nxv1f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3020,6 +3208,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3038,6 +3227,7 @@ entry: define void @test_vssseg8_mask_nxv1f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3059,6 +3249,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -3071,6 +3262,7 @@ entry: define void @test_vssseg2_mask_nxv2f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -3086,6 +3278,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -3099,6 +3292,7 @@ entry: define void @test_vssseg3_mask_nxv2f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m1, ta, mu @@ -3115,6 +3309,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3129,6 +3324,7 @@ entry: define void @test_vssseg4_mask_nxv2f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3146,6 +3342,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3161,6 +3358,7 @@ entry: define void @test_vssseg5_mask_nxv2f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3179,6 +3377,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3195,6 +3394,7 @@ entry: define void @test_vssseg6_mask_nxv2f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3214,6 +3414,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3231,6 +3432,7 @@ entry: define void @test_vssseg7_mask_nxv2f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3251,6 +3453,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3269,6 +3472,7 @@ entry: define void @test_vssseg8_mask_nxv2f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3290,6 +3494,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -3302,6 +3507,7 @@ entry: define void @test_vssseg2_mask_nxv1f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -3317,6 +3523,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -3330,6 +3537,7 @@ entry: define void @test_vssseg3_mask_nxv1f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf4, ta, mu @@ -3346,6 +3554,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3360,6 +3569,7 @@ entry: define void @test_vssseg4_mask_nxv1f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3377,6 +3587,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3392,6 +3603,7 @@ entry: define void @test_vssseg5_mask_nxv1f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3410,6 +3622,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3426,6 +3639,7 @@ entry: define void @test_vssseg6_mask_nxv1f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3445,6 +3659,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3462,6 +3677,7 @@ entry: define void @test_vssseg7_mask_nxv1f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3482,6 +3698,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3500,6 +3717,7 @@ entry: define void @test_vssseg8_mask_nxv1f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3521,6 +3739,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv1f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -3533,6 +3752,7 @@ entry: define void @test_vssseg2_mask_nxv1f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -3548,6 +3768,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv1f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -3561,6 +3782,7 @@ entry: define void @test_vssseg3_mask_nxv1f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, mf2, ta, mu @@ -3577,6 +3799,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv1f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3591,6 +3814,7 @@ entry: define void @test_vssseg4_mask_nxv1f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3608,6 +3832,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv1f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3623,6 +3848,7 @@ entry: define void @test_vssseg5_mask_nxv1f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3641,6 +3867,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv1f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3657,6 +3884,7 @@ entry: define void @test_vssseg6_mask_nxv1f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3676,6 +3904,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv1f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3693,6 +3922,7 @@ entry: define void @test_vssseg7_mask_nxv1f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3713,6 +3943,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv1f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3731,6 +3962,7 @@ entry: define void @test_vssseg8_mask_nxv1f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv1f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3752,6 +3984,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -3764,6 +3997,7 @@ entry: define void @test_vssseg2_mask_nxv8f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -3779,6 +4013,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv8f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -3792,6 +4027,7 @@ entry: define void @test_vssseg3_mask_nxv8f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m2, ta, mu @@ -3808,6 +4044,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv8f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3822,6 +4059,7 @@ entry: define void @test_vssseg4_mask_nxv8f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv8f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3839,6 +4077,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv8f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -3851,6 +4090,7 @@ entry: define void @test_vssseg2_mask_nxv8f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv8f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m4, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -3866,6 +4106,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1 @@ -3878,6 +4119,7 @@ entry: define void @test_vssseg2_mask_nxv2f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu ; CHECK-NEXT: vssseg2e64.v v8, (a0), a1, v0.t @@ -3893,6 +4135,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu @@ -3906,6 +4149,7 @@ entry: define void @test_vssseg3_mask_nxv2f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e64, m2, ta, mu @@ -3922,6 +4166,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2f64(, %val, double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3936,6 +4181,7 @@ entry: define void @test_vssseg4_mask_nxv2f64( %val, double* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2f64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3953,6 +4199,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -3965,6 +4212,7 @@ entry: define void @test_vssseg2_mask_nxv4f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -3980,6 +4228,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -3993,6 +4242,7 @@ entry: define void @test_vssseg3_mask_nxv4f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, m1, ta, mu @@ -4009,6 +4259,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4023,6 +4274,7 @@ entry: define void @test_vssseg4_mask_nxv4f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4040,6 +4292,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv4f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4055,6 +4308,7 @@ entry: define void @test_vssseg5_mask_nxv4f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4073,6 +4327,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv4f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4089,6 +4344,7 @@ entry: define void @test_vssseg6_mask_nxv4f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4108,6 +4364,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv4f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4125,6 +4382,7 @@ entry: define void @test_vssseg7_mask_nxv4f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4145,6 +4403,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv4f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4163,6 +4422,7 @@ entry: define void @test_vssseg8_mask_nxv4f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv4f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4184,6 +4444,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv2f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -4196,6 +4457,7 @@ entry: define void @test_vssseg2_mask_nxv2f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -4211,6 +4473,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv2f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -4224,6 +4487,7 @@ entry: define void @test_vssseg3_mask_nxv2f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e16, mf2, ta, mu @@ -4240,6 +4504,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv2f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4254,6 +4519,7 @@ entry: define void @test_vssseg4_mask_nxv2f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4271,6 +4537,7 @@ declare void @llvm.riscv.vssseg5.mask.nxv2f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg5_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4286,6 +4553,7 @@ entry: define void @test_vssseg5_mask_nxv2f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg5_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4304,6 +4572,7 @@ declare void @llvm.riscv.vssseg6.mask.nxv2f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg6_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4320,6 +4589,7 @@ entry: define void @test_vssseg6_mask_nxv2f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg6_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4339,6 +4609,7 @@ declare void @llvm.riscv.vssseg7.mask.nxv2f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg7_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4356,6 +4627,7 @@ entry: define void @test_vssseg7_mask_nxv2f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg7_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4376,6 +4648,7 @@ declare void @llvm.riscv.vssseg8.mask.nxv2f16(, %val, half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg8_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4394,6 +4667,7 @@ entry: define void @test_vssseg8_mask_nxv2f16( %val, half* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg8_mask_nxv2f16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -4415,6 +4689,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv4f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg2_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1 @@ -4427,6 +4702,7 @@ entry: define void @test_vssseg2_mask_nxv4f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg2_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu ; CHECK-NEXT: vssseg2e32.v v8, (a0), a1, v0.t @@ -4442,6 +4718,7 @@ declare void @llvm.riscv.vssseg3.mask.nxv4f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg3_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -4455,6 +4732,7 @@ entry: define void @test_vssseg3_mask_nxv4f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg3_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a2, e32, m2, ta, mu @@ -4471,6 +4749,7 @@ declare void @llvm.riscv.vssseg4.mask.nxv4f32(, %val, float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vssseg4_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -4485,6 +4764,7 @@ entry: define void @test_vssseg4_mask_nxv4f32( %val, float* %base, i64 %offset, %mask, i64 %vl) { ; CHECK-LABEL: test_vssseg4_mask_nxv4f32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 diff --git a/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv32.ll index adc8dadc9228f..d333314d92a36 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv32.ll @@ -8,6 +8,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -21,6 +22,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i16_nxv16i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -37,6 +39,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -50,6 +53,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i16_nxv16i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -66,6 +70,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16 @@ -78,6 +83,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i16_nxv16i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t @@ -93,6 +99,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -106,6 +113,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -122,6 +130,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -135,6 +144,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -151,6 +161,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -164,6 +175,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -181,8 +193,8 @@ define void @test_vsuxseg3_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -212,8 +224,8 @@ define void @test_vsuxseg3_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -243,8 +255,8 @@ define void @test_vsuxseg3_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -274,9 +286,9 @@ define void @test_vsuxseg4_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -307,9 +319,9 @@ define void @test_vsuxseg4_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -340,9 +352,9 @@ define void @test_vsuxseg4_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -373,10 +385,10 @@ define void @test_vsuxseg5_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -408,10 +420,10 @@ define void @test_vsuxseg5_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -443,10 +455,10 @@ define void @test_vsuxseg5_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -478,11 +490,11 @@ define void @test_vsuxseg6_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -515,11 +527,11 @@ define void @test_vsuxseg6_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -552,11 +564,11 @@ define void @test_vsuxseg6_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -589,12 +601,12 @@ define void @test_vsuxseg7_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -628,12 +640,12 @@ define void @test_vsuxseg7_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -667,12 +679,12 @@ define void @test_vsuxseg7_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -706,13 +718,13 @@ define void @test_vsuxseg8_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -747,13 +759,13 @@ define void @test_vsuxseg8_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -788,13 +800,13 @@ define void @test_vsuxseg8_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -828,6 +840,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12 @@ -840,6 +853,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t @@ -855,6 +869,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -868,6 +883,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -884,6 +900,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16 @@ -896,6 +913,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t @@ -911,11 +929,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei16.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, i32 %vl) @@ -925,11 +943,11 @@ entry: define void @test_vsuxseg3_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -943,8 +961,8 @@ define void @test_vsuxseg3_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -957,8 +975,8 @@ define void @test_vsuxseg3_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -973,6 +991,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -986,6 +1005,7 @@ entry: define void @test_vsuxseg3_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -1003,9 +1023,9 @@ define void @test_vsuxseg4_nxv16i8_nxv16i16( %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -1018,9 +1038,9 @@ define void @test_vsuxseg4_mask_nxv16i8_nxv16i16( %val, i8* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -1036,9 +1056,9 @@ define void @test_vsuxseg4_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -1051,9 +1071,9 @@ define void @test_vsuxseg4_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -1068,6 +1088,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1082,6 +1103,7 @@ entry: define void @test_vsuxseg4_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -1099,6 +1121,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1112,6 +1135,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1128,6 +1152,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1141,6 +1166,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1157,6 +1183,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1170,6 +1197,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -1187,8 +1215,8 @@ define void @test_vsuxseg3_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1201,8 +1229,8 @@ define void @test_vsuxseg3_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1218,8 +1246,8 @@ define void @test_vsuxseg3_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1232,8 +1260,8 @@ define void @test_vsuxseg3_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1249,8 +1277,8 @@ define void @test_vsuxseg3_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1263,8 +1291,8 @@ define void @test_vsuxseg3_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1280,9 +1308,9 @@ define void @test_vsuxseg4_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1295,9 +1323,9 @@ define void @test_vsuxseg4_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1313,9 +1341,9 @@ define void @test_vsuxseg4_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1328,9 +1356,9 @@ define void @test_vsuxseg4_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1346,9 +1374,9 @@ define void @test_vsuxseg4_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1361,9 +1389,9 @@ define void @test_vsuxseg4_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1379,10 +1407,10 @@ define void @test_vsuxseg5_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1395,10 +1423,10 @@ define void @test_vsuxseg5_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1414,10 +1442,10 @@ define void @test_vsuxseg5_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1430,10 +1458,10 @@ define void @test_vsuxseg5_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1449,10 +1477,10 @@ define void @test_vsuxseg5_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1465,10 +1493,10 @@ define void @test_vsuxseg5_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1484,11 +1512,11 @@ define void @test_vsuxseg6_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1501,11 +1529,11 @@ define void @test_vsuxseg6_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1521,11 +1549,11 @@ define void @test_vsuxseg6_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1538,11 +1566,11 @@ define void @test_vsuxseg6_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1558,11 +1586,11 @@ define void @test_vsuxseg6_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1575,11 +1603,11 @@ define void @test_vsuxseg6_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1595,12 +1623,12 @@ define void @test_vsuxseg7_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1613,12 +1641,12 @@ define void @test_vsuxseg7_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1634,12 +1662,12 @@ define void @test_vsuxseg7_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1652,12 +1680,12 @@ define void @test_vsuxseg7_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1673,12 +1701,12 @@ define void @test_vsuxseg7_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1691,12 +1719,12 @@ define void @test_vsuxseg7_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1712,13 +1740,13 @@ define void @test_vsuxseg8_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1731,13 +1759,13 @@ define void @test_vsuxseg8_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1753,13 +1781,13 @@ define void @test_vsuxseg8_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1772,13 +1800,13 @@ define void @test_vsuxseg8_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1794,13 +1822,13 @@ define void @test_vsuxseg8_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1813,13 +1841,13 @@ define void @test_vsuxseg8_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1834,6 +1862,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1847,6 +1876,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1863,6 +1893,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1876,6 +1907,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -1892,6 +1924,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10 @@ -1904,6 +1937,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t @@ -1920,8 +1954,8 @@ define void @test_vsuxseg3_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1934,8 +1968,8 @@ define void @test_vsuxseg3_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1951,8 +1985,8 @@ define void @test_vsuxseg3_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1965,8 +1999,8 @@ define void @test_vsuxseg3_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1981,11 +2015,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, i32 %vl) @@ -1995,11 +2029,11 @@ entry: define void @test_vsuxseg3_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl) @@ -2013,9 +2047,9 @@ define void @test_vsuxseg4_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2028,9 +2062,9 @@ define void @test_vsuxseg4_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2046,9 +2080,9 @@ define void @test_vsuxseg4_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2061,9 +2095,9 @@ define void @test_vsuxseg4_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2079,9 +2113,9 @@ define void @test_vsuxseg4_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2094,9 +2128,9 @@ define void @test_vsuxseg4_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2112,10 +2146,10 @@ define void @test_vsuxseg5_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2128,10 +2162,10 @@ define void @test_vsuxseg5_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2147,10 +2181,10 @@ define void @test_vsuxseg5_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2163,10 +2197,10 @@ define void @test_vsuxseg5_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2182,10 +2216,10 @@ define void @test_vsuxseg5_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2198,10 +2232,10 @@ define void @test_vsuxseg5_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2217,11 +2251,11 @@ define void @test_vsuxseg6_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2234,11 +2268,11 @@ define void @test_vsuxseg6_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2254,11 +2288,11 @@ define void @test_vsuxseg6_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2271,11 +2305,11 @@ define void @test_vsuxseg6_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2291,11 +2325,11 @@ define void @test_vsuxseg6_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2308,11 +2342,11 @@ define void @test_vsuxseg6_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2328,12 +2362,12 @@ define void @test_vsuxseg7_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2346,12 +2380,12 @@ define void @test_vsuxseg7_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2367,12 +2401,12 @@ define void @test_vsuxseg7_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2385,12 +2419,12 @@ define void @test_vsuxseg7_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2406,12 +2440,12 @@ define void @test_vsuxseg7_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2424,12 +2458,12 @@ define void @test_vsuxseg7_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2445,13 +2479,13 @@ define void @test_vsuxseg8_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2464,13 +2498,13 @@ define void @test_vsuxseg8_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2486,13 +2520,13 @@ define void @test_vsuxseg8_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2505,13 +2539,13 @@ define void @test_vsuxseg8_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2527,13 +2561,13 @@ define void @test_vsuxseg8_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2546,13 +2580,13 @@ define void @test_vsuxseg8_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2567,6 +2601,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2580,6 +2615,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2596,6 +2632,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2609,6 +2646,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2625,6 +2663,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2638,6 +2677,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -2655,8 +2695,8 @@ define void @test_vsuxseg3_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2669,8 +2709,8 @@ define void @test_vsuxseg3_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2686,8 +2726,8 @@ define void @test_vsuxseg3_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2700,8 +2740,8 @@ define void @test_vsuxseg3_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2717,8 +2757,8 @@ define void @test_vsuxseg3_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2731,8 +2771,8 @@ define void @test_vsuxseg3_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2748,9 +2788,9 @@ define void @test_vsuxseg4_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2763,9 +2803,9 @@ define void @test_vsuxseg4_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2781,9 +2821,9 @@ define void @test_vsuxseg4_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2796,9 +2836,9 @@ define void @test_vsuxseg4_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2814,9 +2854,9 @@ define void @test_vsuxseg4_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2829,9 +2869,9 @@ define void @test_vsuxseg4_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2847,10 +2887,10 @@ define void @test_vsuxseg5_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2863,10 +2903,10 @@ define void @test_vsuxseg5_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2882,10 +2922,10 @@ define void @test_vsuxseg5_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2898,10 +2938,10 @@ define void @test_vsuxseg5_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2917,10 +2957,10 @@ define void @test_vsuxseg5_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2933,10 +2973,10 @@ define void @test_vsuxseg5_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2952,11 +2992,11 @@ define void @test_vsuxseg6_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2969,11 +3009,11 @@ define void @test_vsuxseg6_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2989,11 +3029,11 @@ define void @test_vsuxseg6_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3006,11 +3046,11 @@ define void @test_vsuxseg6_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3026,11 +3066,11 @@ define void @test_vsuxseg6_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3043,11 +3083,11 @@ define void @test_vsuxseg6_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3063,12 +3103,12 @@ define void @test_vsuxseg7_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3081,12 +3121,12 @@ define void @test_vsuxseg7_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3102,12 +3142,12 @@ define void @test_vsuxseg7_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3120,12 +3160,12 @@ define void @test_vsuxseg7_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3141,12 +3181,12 @@ define void @test_vsuxseg7_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3159,12 +3199,12 @@ define void @test_vsuxseg7_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3180,13 +3220,13 @@ define void @test_vsuxseg8_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3199,13 +3239,13 @@ define void @test_vsuxseg8_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3221,13 +3261,13 @@ define void @test_vsuxseg8_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3240,13 +3280,13 @@ define void @test_vsuxseg8_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3262,13 +3302,13 @@ define void @test_vsuxseg8_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -3281,13 +3321,13 @@ define void @test_vsuxseg8_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3302,6 +3342,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3315,6 +3356,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3331,6 +3373,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3344,6 +3387,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -3360,6 +3404,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12 @@ -3372,6 +3417,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t @@ -3388,8 +3434,8 @@ define void @test_vsuxseg3_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3402,8 +3448,8 @@ define void @test_vsuxseg3_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3419,8 +3465,8 @@ define void @test_vsuxseg3_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3433,8 +3479,8 @@ define void @test_vsuxseg3_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3449,11 +3495,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, i32 %vl) @@ -3463,11 +3509,11 @@ entry: define void @test_vsuxseg3_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i32 %vl) @@ -3481,9 +3527,9 @@ define void @test_vsuxseg4_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3496,9 +3542,9 @@ define void @test_vsuxseg4_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3514,9 +3560,9 @@ define void @test_vsuxseg4_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -3529,9 +3575,9 @@ define void @test_vsuxseg4_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3547,9 +3593,9 @@ define void @test_vsuxseg4_nxv8i16_nxv8i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -3562,9 +3608,9 @@ define void @test_vsuxseg4_mask_nxv8i16_nxv8i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3579,6 +3625,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10 @@ -3591,6 +3638,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t @@ -3606,6 +3654,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3619,6 +3668,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3635,6 +3685,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12 @@ -3647,6 +3698,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t @@ -3662,11 +3714,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, i32 %vl) @@ -3676,11 +3728,11 @@ entry: define void @test_vsuxseg3_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -3694,8 +3746,8 @@ define void @test_vsuxseg3_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3724,6 +3776,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3737,6 +3790,7 @@ entry: define void @test_vsuxseg3_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -3754,9 +3808,9 @@ define void @test_vsuxseg4_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3787,9 +3841,9 @@ define void @test_vsuxseg4_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3819,6 +3873,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3833,6 +3888,7 @@ entry: define void @test_vsuxseg4_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3851,10 +3907,10 @@ define void @test_vsuxseg5_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3886,10 +3942,10 @@ define void @test_vsuxseg5_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3920,13 +3976,13 @@ declare void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg5_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei32.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg5ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, i32 %vl) @@ -3936,13 +3992,13 @@ entry: define void @test_vsuxseg5_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg5ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -3956,11 +4012,11 @@ define void @test_vsuxseg6_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3993,11 +4049,11 @@ define void @test_vsuxseg6_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4030,11 +4086,11 @@ define void @test_vsuxseg6_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -4067,12 +4123,12 @@ define void @test_vsuxseg7_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4106,12 +4162,12 @@ define void @test_vsuxseg7_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4145,12 +4201,12 @@ define void @test_vsuxseg7_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -4184,13 +4240,13 @@ define void @test_vsuxseg8_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4225,13 +4281,13 @@ define void @test_vsuxseg8_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4266,13 +4322,13 @@ define void @test_vsuxseg8_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -4306,6 +4362,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4319,6 +4376,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i32_nxv8i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4335,6 +4393,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4348,6 +4407,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i32_nxv8i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4364,6 +4424,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4377,6 +4438,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i32_nxv8i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -4393,6 +4455,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4406,6 +4469,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4422,6 +4486,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4435,6 +4500,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -4451,6 +4517,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10 @@ -4463,6 +4530,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t @@ -4479,8 +4547,8 @@ define void @test_vsuxseg3_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4510,8 +4578,8 @@ define void @test_vsuxseg3_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4540,11 +4608,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, i32 %vl) @@ -4554,11 +4622,11 @@ entry: define void @test_vsuxseg3_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i32 %vl) @@ -4572,9 +4640,9 @@ define void @test_vsuxseg4_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4605,9 +4673,9 @@ define void @test_vsuxseg4_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4638,9 +4706,9 @@ define void @test_vsuxseg4_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4671,10 +4739,10 @@ define void @test_vsuxseg5_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4706,10 +4774,10 @@ define void @test_vsuxseg5_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4741,10 +4809,10 @@ define void @test_vsuxseg5_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4776,11 +4844,11 @@ define void @test_vsuxseg6_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4813,11 +4881,11 @@ define void @test_vsuxseg6_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4850,11 +4918,11 @@ define void @test_vsuxseg6_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -4887,12 +4955,12 @@ define void @test_vsuxseg7_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4926,12 +4994,12 @@ define void @test_vsuxseg7_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4965,12 +5033,12 @@ define void @test_vsuxseg7_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5004,13 +5072,13 @@ define void @test_vsuxseg8_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5045,13 +5113,13 @@ define void @test_vsuxseg8_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5086,13 +5154,13 @@ define void @test_vsuxseg8_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5126,6 +5194,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5139,6 +5208,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5155,6 +5225,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5168,6 +5239,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5184,6 +5256,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5197,6 +5270,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -5214,8 +5288,8 @@ define void @test_vsuxseg3_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5228,8 +5302,8 @@ define void @test_vsuxseg3_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5245,8 +5319,8 @@ define void @test_vsuxseg3_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5259,8 +5333,8 @@ define void @test_vsuxseg3_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5276,8 +5350,8 @@ define void @test_vsuxseg3_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5290,8 +5364,8 @@ define void @test_vsuxseg3_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5307,9 +5381,9 @@ define void @test_vsuxseg4_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5322,9 +5396,9 @@ define void @test_vsuxseg4_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5340,9 +5414,9 @@ define void @test_vsuxseg4_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5355,9 +5429,9 @@ define void @test_vsuxseg4_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5373,9 +5447,9 @@ define void @test_vsuxseg4_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5388,9 +5462,9 @@ define void @test_vsuxseg4_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5406,10 +5480,10 @@ define void @test_vsuxseg5_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5422,10 +5496,10 @@ define void @test_vsuxseg5_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5441,10 +5515,10 @@ define void @test_vsuxseg5_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5457,10 +5531,10 @@ define void @test_vsuxseg5_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5476,10 +5550,10 @@ define void @test_vsuxseg5_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5492,10 +5566,10 @@ define void @test_vsuxseg5_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5511,11 +5585,11 @@ define void @test_vsuxseg6_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5528,11 +5602,11 @@ define void @test_vsuxseg6_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5548,11 +5622,11 @@ define void @test_vsuxseg6_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5565,11 +5639,11 @@ define void @test_vsuxseg6_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5585,11 +5659,11 @@ define void @test_vsuxseg6_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5602,11 +5676,11 @@ define void @test_vsuxseg6_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5622,12 +5696,12 @@ define void @test_vsuxseg7_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5640,12 +5714,12 @@ define void @test_vsuxseg7_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5661,12 +5735,12 @@ define void @test_vsuxseg7_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5679,12 +5753,12 @@ define void @test_vsuxseg7_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5700,12 +5774,12 @@ define void @test_vsuxseg7_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5718,12 +5792,12 @@ define void @test_vsuxseg7_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5739,13 +5813,13 @@ define void @test_vsuxseg8_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5758,13 +5832,13 @@ define void @test_vsuxseg8_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5780,13 +5854,13 @@ define void @test_vsuxseg8_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5799,13 +5873,13 @@ define void @test_vsuxseg8_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5821,13 +5895,13 @@ define void @test_vsuxseg8_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5840,13 +5914,13 @@ define void @test_vsuxseg8_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5861,6 +5935,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16 @@ -5873,6 +5948,7 @@ entry: define void @test_vsuxseg2_mask_nxv32i8_nxv32i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t @@ -5888,6 +5964,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -5901,6 +5978,7 @@ entry: define void @test_vsuxseg2_mask_nxv32i8_nxv32i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -5917,6 +5995,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i32(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5930,6 +6009,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5946,6 +6026,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i8(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5959,6 +6040,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5975,6 +6057,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i16(, %val, i8* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -5988,6 +6071,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -6005,8 +6089,8 @@ define void @test_vsuxseg3_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6036,8 +6120,8 @@ define void @test_vsuxseg3_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6067,8 +6151,8 @@ define void @test_vsuxseg3_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6098,9 +6182,9 @@ define void @test_vsuxseg4_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6131,9 +6215,9 @@ define void @test_vsuxseg4_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6164,9 +6248,9 @@ define void @test_vsuxseg4_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6197,10 +6281,10 @@ define void @test_vsuxseg5_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6232,10 +6316,10 @@ define void @test_vsuxseg5_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6267,10 +6351,10 @@ define void @test_vsuxseg5_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6302,11 +6386,11 @@ define void @test_vsuxseg6_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6339,11 +6423,11 @@ define void @test_vsuxseg6_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6376,11 +6460,11 @@ define void @test_vsuxseg6_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6413,12 +6497,12 @@ define void @test_vsuxseg7_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6452,12 +6536,12 @@ define void @test_vsuxseg7_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6491,12 +6575,12 @@ define void @test_vsuxseg7_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6530,13 +6614,13 @@ define void @test_vsuxseg8_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6571,13 +6655,13 @@ define void @test_vsuxseg8_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6612,13 +6696,13 @@ define void @test_vsuxseg8_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6652,6 +6736,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i32(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6665,6 +6750,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6681,6 +6767,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i8(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6694,6 +6781,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6710,6 +6798,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i16(, %val, i16* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6723,6 +6812,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -6740,8 +6830,8 @@ define void @test_vsuxseg3_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6754,8 +6844,8 @@ define void @test_vsuxseg3_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6771,8 +6861,8 @@ define void @test_vsuxseg3_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6785,8 +6875,8 @@ define void @test_vsuxseg3_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6802,8 +6892,8 @@ define void @test_vsuxseg3_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6816,8 +6906,8 @@ define void @test_vsuxseg3_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6833,9 +6923,9 @@ define void @test_vsuxseg4_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6848,9 +6938,9 @@ define void @test_vsuxseg4_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6866,9 +6956,9 @@ define void @test_vsuxseg4_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6881,9 +6971,9 @@ define void @test_vsuxseg4_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6899,9 +6989,9 @@ define void @test_vsuxseg4_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6914,9 +7004,9 @@ define void @test_vsuxseg4_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6932,10 +7022,10 @@ define void @test_vsuxseg5_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6948,10 +7038,10 @@ define void @test_vsuxseg5_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6967,10 +7057,10 @@ define void @test_vsuxseg5_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -6983,10 +7073,10 @@ define void @test_vsuxseg5_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7002,10 +7092,10 @@ define void @test_vsuxseg5_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7018,10 +7108,10 @@ define void @test_vsuxseg5_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7037,11 +7127,11 @@ define void @test_vsuxseg6_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7054,11 +7144,11 @@ define void @test_vsuxseg6_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7074,11 +7164,11 @@ define void @test_vsuxseg6_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7091,11 +7181,11 @@ define void @test_vsuxseg6_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7111,11 +7201,11 @@ define void @test_vsuxseg6_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7128,11 +7218,11 @@ define void @test_vsuxseg6_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7148,12 +7238,12 @@ define void @test_vsuxseg7_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7166,12 +7256,12 @@ define void @test_vsuxseg7_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7187,12 +7277,12 @@ define void @test_vsuxseg7_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7205,12 +7295,12 @@ define void @test_vsuxseg7_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7226,12 +7316,12 @@ define void @test_vsuxseg7_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7244,12 +7334,12 @@ define void @test_vsuxseg7_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7265,13 +7355,13 @@ define void @test_vsuxseg8_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7284,13 +7374,13 @@ define void @test_vsuxseg8_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7306,13 +7396,13 @@ define void @test_vsuxseg8_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7325,13 +7415,13 @@ define void @test_vsuxseg8_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7347,13 +7437,13 @@ define void @test_vsuxseg8_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7366,13 +7456,13 @@ define void @test_vsuxseg8_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7387,6 +7477,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7400,6 +7491,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7416,6 +7508,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7429,6 +7522,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7445,6 +7539,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32(, %val, i32* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7458,6 +7553,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -7475,8 +7571,8 @@ define void @test_vsuxseg3_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7489,8 +7585,8 @@ define void @test_vsuxseg3_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7506,8 +7602,8 @@ define void @test_vsuxseg3_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7520,8 +7616,8 @@ define void @test_vsuxseg3_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7537,8 +7633,8 @@ define void @test_vsuxseg3_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7551,8 +7647,8 @@ define void @test_vsuxseg3_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7568,9 +7664,9 @@ define void @test_vsuxseg4_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7583,9 +7679,9 @@ define void @test_vsuxseg4_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7601,9 +7697,9 @@ define void @test_vsuxseg4_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7616,9 +7712,9 @@ define void @test_vsuxseg4_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7634,9 +7730,9 @@ define void @test_vsuxseg4_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7649,9 +7745,9 @@ define void @test_vsuxseg4_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7666,6 +7762,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7679,6 +7776,7 @@ entry: define void @test_vsuxseg2_mask_nxv16f16_nxv16i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7695,6 +7793,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7708,6 +7807,7 @@ entry: define void @test_vsuxseg2_mask_nxv16f16_nxv16i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -7724,6 +7824,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16 @@ -7736,6 +7837,7 @@ entry: define void @test_vsuxseg2_mask_nxv16f16_nxv16i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t @@ -7751,6 +7853,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i16(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7764,6 +7867,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f64_nxv4i16( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7780,6 +7884,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i8(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7793,6 +7898,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f64_nxv4i8( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7809,6 +7915,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i32(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7822,6 +7929,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f64_nxv4i32( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7838,6 +7946,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i8(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7851,6 +7960,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f64_nxv1i8( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7867,6 +7977,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i32(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7880,6 +7991,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f64_nxv1i32( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7896,6 +8008,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i16(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7909,6 +8022,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f64_nxv1i16( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -7926,8 +8040,8 @@ define void @test_vsuxseg3_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7940,8 +8054,8 @@ define void @test_vsuxseg3_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7957,8 +8071,8 @@ define void @test_vsuxseg3_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7971,8 +8085,8 @@ define void @test_vsuxseg3_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7988,8 +8102,8 @@ define void @test_vsuxseg3_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8002,8 +8116,8 @@ define void @test_vsuxseg3_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8019,9 +8133,9 @@ define void @test_vsuxseg4_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8034,9 +8148,9 @@ define void @test_vsuxseg4_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8052,9 +8166,9 @@ define void @test_vsuxseg4_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8067,9 +8181,9 @@ define void @test_vsuxseg4_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8085,9 +8199,9 @@ define void @test_vsuxseg4_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8100,9 +8214,9 @@ define void @test_vsuxseg4_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8118,10 +8232,10 @@ define void @test_vsuxseg5_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8134,10 +8248,10 @@ define void @test_vsuxseg5_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8153,10 +8267,10 @@ define void @test_vsuxseg5_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8169,10 +8283,10 @@ define void @test_vsuxseg5_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8188,10 +8302,10 @@ define void @test_vsuxseg5_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8204,10 +8318,10 @@ define void @test_vsuxseg5_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8223,11 +8337,11 @@ define void @test_vsuxseg6_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8240,11 +8354,11 @@ define void @test_vsuxseg6_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8260,11 +8374,11 @@ define void @test_vsuxseg6_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8277,11 +8391,11 @@ define void @test_vsuxseg6_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8297,11 +8411,11 @@ define void @test_vsuxseg6_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8314,11 +8428,11 @@ define void @test_vsuxseg6_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8334,12 +8448,12 @@ define void @test_vsuxseg7_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8352,12 +8466,12 @@ define void @test_vsuxseg7_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8373,12 +8487,12 @@ define void @test_vsuxseg7_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8391,12 +8505,12 @@ define void @test_vsuxseg7_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8412,12 +8526,12 @@ define void @test_vsuxseg7_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8430,12 +8544,12 @@ define void @test_vsuxseg7_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8451,13 +8565,13 @@ define void @test_vsuxseg8_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8470,13 +8584,13 @@ define void @test_vsuxseg8_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8492,13 +8606,13 @@ define void @test_vsuxseg8_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8511,13 +8625,13 @@ define void @test_vsuxseg8_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8533,13 +8647,13 @@ define void @test_vsuxseg8_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8552,13 +8666,13 @@ define void @test_vsuxseg8_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8573,6 +8687,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8586,6 +8701,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f32_nxv2i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8602,6 +8718,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8615,6 +8732,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f32_nxv2i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8631,6 +8749,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8644,6 +8763,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f32_nxv2i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -8661,8 +8781,8 @@ define void @test_vsuxseg3_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8675,8 +8795,8 @@ define void @test_vsuxseg3_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8692,8 +8812,8 @@ define void @test_vsuxseg3_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8706,8 +8826,8 @@ define void @test_vsuxseg3_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8723,8 +8843,8 @@ define void @test_vsuxseg3_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8737,8 +8857,8 @@ define void @test_vsuxseg3_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8754,9 +8874,9 @@ define void @test_vsuxseg4_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8769,9 +8889,9 @@ define void @test_vsuxseg4_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8787,9 +8907,9 @@ define void @test_vsuxseg4_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8802,9 +8922,9 @@ define void @test_vsuxseg4_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8820,9 +8940,9 @@ define void @test_vsuxseg4_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8835,9 +8955,9 @@ define void @test_vsuxseg4_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8853,10 +8973,10 @@ define void @test_vsuxseg5_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8869,10 +8989,10 @@ define void @test_vsuxseg5_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8888,10 +9008,10 @@ define void @test_vsuxseg5_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8904,10 +9024,10 @@ define void @test_vsuxseg5_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8923,10 +9043,10 @@ define void @test_vsuxseg5_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8939,10 +9059,10 @@ define void @test_vsuxseg5_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8958,11 +9078,11 @@ define void @test_vsuxseg6_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8975,11 +9095,11 @@ define void @test_vsuxseg6_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8995,11 +9115,11 @@ define void @test_vsuxseg6_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9012,11 +9132,11 @@ define void @test_vsuxseg6_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9032,11 +9152,11 @@ define void @test_vsuxseg6_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9049,11 +9169,11 @@ define void @test_vsuxseg6_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9069,12 +9189,12 @@ define void @test_vsuxseg7_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9087,12 +9207,12 @@ define void @test_vsuxseg7_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9108,12 +9228,12 @@ define void @test_vsuxseg7_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9126,12 +9246,12 @@ define void @test_vsuxseg7_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9147,12 +9267,12 @@ define void @test_vsuxseg7_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9165,12 +9285,12 @@ define void @test_vsuxseg7_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9186,13 +9306,13 @@ define void @test_vsuxseg8_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9205,13 +9325,13 @@ define void @test_vsuxseg8_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9227,13 +9347,13 @@ define void @test_vsuxseg8_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9246,13 +9366,13 @@ define void @test_vsuxseg8_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9268,13 +9388,13 @@ define void @test_vsuxseg8_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9287,13 +9407,13 @@ define void @test_vsuxseg8_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9308,6 +9428,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9321,6 +9442,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f16_nxv1i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9337,6 +9459,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9350,6 +9473,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f16_nxv1i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9366,6 +9490,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9379,6 +9504,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f16_nxv1i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -9396,8 +9522,8 @@ define void @test_vsuxseg3_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9410,8 +9536,8 @@ define void @test_vsuxseg3_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9427,8 +9553,8 @@ define void @test_vsuxseg3_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9441,8 +9567,8 @@ define void @test_vsuxseg3_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9458,8 +9584,8 @@ define void @test_vsuxseg3_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9472,8 +9598,8 @@ define void @test_vsuxseg3_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9489,9 +9615,9 @@ define void @test_vsuxseg4_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9504,9 +9630,9 @@ define void @test_vsuxseg4_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9522,9 +9648,9 @@ define void @test_vsuxseg4_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9537,9 +9663,9 @@ define void @test_vsuxseg4_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9555,9 +9681,9 @@ define void @test_vsuxseg4_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9570,9 +9696,9 @@ define void @test_vsuxseg4_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9588,10 +9714,10 @@ define void @test_vsuxseg5_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9604,10 +9730,10 @@ define void @test_vsuxseg5_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9623,10 +9749,10 @@ define void @test_vsuxseg5_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9639,10 +9765,10 @@ define void @test_vsuxseg5_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9658,10 +9784,10 @@ define void @test_vsuxseg5_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9674,10 +9800,10 @@ define void @test_vsuxseg5_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9693,11 +9819,11 @@ define void @test_vsuxseg6_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9710,11 +9836,11 @@ define void @test_vsuxseg6_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9730,11 +9856,11 @@ define void @test_vsuxseg6_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9747,11 +9873,11 @@ define void @test_vsuxseg6_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9767,11 +9893,11 @@ define void @test_vsuxseg6_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9784,11 +9910,11 @@ define void @test_vsuxseg6_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9804,12 +9930,12 @@ define void @test_vsuxseg7_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9822,12 +9948,12 @@ define void @test_vsuxseg7_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9843,12 +9969,12 @@ define void @test_vsuxseg7_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9861,12 +9987,12 @@ define void @test_vsuxseg7_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9882,12 +10008,12 @@ define void @test_vsuxseg7_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9900,12 +10026,12 @@ define void @test_vsuxseg7_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9921,13 +10047,13 @@ define void @test_vsuxseg8_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9940,13 +10066,13 @@ define void @test_vsuxseg8_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9962,13 +10088,13 @@ define void @test_vsuxseg8_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -9981,13 +10107,13 @@ define void @test_vsuxseg8_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10003,13 +10129,13 @@ define void @test_vsuxseg8_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10022,13 +10148,13 @@ define void @test_vsuxseg8_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10043,6 +10169,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10056,6 +10183,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f32_nxv1i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10072,6 +10200,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10085,6 +10214,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f32_nxv1i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10101,6 +10231,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10114,6 +10245,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f32_nxv1i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -10131,8 +10263,8 @@ define void @test_vsuxseg3_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10145,8 +10277,8 @@ define void @test_vsuxseg3_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10162,8 +10294,8 @@ define void @test_vsuxseg3_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10176,8 +10308,8 @@ define void @test_vsuxseg3_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10193,8 +10325,8 @@ define void @test_vsuxseg3_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10207,8 +10339,8 @@ define void @test_vsuxseg3_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10224,9 +10356,9 @@ define void @test_vsuxseg4_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10239,9 +10371,9 @@ define void @test_vsuxseg4_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10257,9 +10389,9 @@ define void @test_vsuxseg4_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10272,9 +10404,9 @@ define void @test_vsuxseg4_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10290,9 +10422,9 @@ define void @test_vsuxseg4_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10305,9 +10437,9 @@ define void @test_vsuxseg4_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10323,10 +10455,10 @@ define void @test_vsuxseg5_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10339,10 +10471,10 @@ define void @test_vsuxseg5_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10358,10 +10490,10 @@ define void @test_vsuxseg5_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10374,10 +10506,10 @@ define void @test_vsuxseg5_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10393,10 +10525,10 @@ define void @test_vsuxseg5_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10409,10 +10541,10 @@ define void @test_vsuxseg5_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10428,11 +10560,11 @@ define void @test_vsuxseg6_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10445,11 +10577,11 @@ define void @test_vsuxseg6_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10465,11 +10597,11 @@ define void @test_vsuxseg6_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10482,11 +10614,11 @@ define void @test_vsuxseg6_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10502,11 +10634,11 @@ define void @test_vsuxseg6_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10519,11 +10651,11 @@ define void @test_vsuxseg6_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10539,12 +10671,12 @@ define void @test_vsuxseg7_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10557,12 +10689,12 @@ define void @test_vsuxseg7_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10578,12 +10710,12 @@ define void @test_vsuxseg7_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10596,12 +10728,12 @@ define void @test_vsuxseg7_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10617,12 +10749,12 @@ define void @test_vsuxseg7_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10635,12 +10767,12 @@ define void @test_vsuxseg7_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10656,13 +10788,13 @@ define void @test_vsuxseg8_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10675,13 +10807,13 @@ define void @test_vsuxseg8_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10697,13 +10829,13 @@ define void @test_vsuxseg8_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10716,13 +10848,13 @@ define void @test_vsuxseg8_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10738,13 +10870,13 @@ define void @test_vsuxseg8_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10757,13 +10889,13 @@ define void @test_vsuxseg8_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10778,6 +10910,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10791,6 +10924,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f16_nxv8i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10807,6 +10941,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10820,6 +10955,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f16_nxv8i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -10836,6 +10972,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12 @@ -10848,6 +10985,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t @@ -10864,8 +11002,8 @@ define void @test_vsuxseg3_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10878,8 +11016,8 @@ define void @test_vsuxseg3_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10895,8 +11033,8 @@ define void @test_vsuxseg3_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10909,8 +11047,8 @@ define void @test_vsuxseg3_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10925,11 +11063,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, i32 %vl) @@ -10939,11 +11077,11 @@ entry: define void @test_vsuxseg3_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl) @@ -10957,9 +11095,9 @@ define void @test_vsuxseg4_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10972,9 +11110,9 @@ define void @test_vsuxseg4_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10990,9 +11128,9 @@ define void @test_vsuxseg4_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11005,9 +11143,9 @@ define void @test_vsuxseg4_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11023,9 +11161,9 @@ define void @test_vsuxseg4_nxv8f16_nxv8i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -11038,9 +11176,9 @@ define void @test_vsuxseg4_mask_nxv8f16_nxv8i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -11055,6 +11193,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11068,6 +11207,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f32_nxv8i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11084,6 +11224,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11097,6 +11238,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f32_nxv8i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11113,6 +11255,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11126,6 +11269,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f32_nxv8i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -11142,6 +11286,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i32(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11155,6 +11300,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f64_nxv2i32( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11171,6 +11317,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i8(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11184,6 +11331,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f64_nxv2i8( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11200,6 +11348,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i16(, %val, double* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11213,6 +11362,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f64_nxv2i16( %val, double* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11230,8 +11380,8 @@ define void @test_vsuxseg3_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11244,8 +11394,8 @@ define void @test_vsuxseg3_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11261,8 +11411,8 @@ define void @test_vsuxseg3_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11275,8 +11425,8 @@ define void @test_vsuxseg3_mask_nxv2f64_nxv2i8( %val, doubl ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11292,8 +11442,8 @@ define void @test_vsuxseg3_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11306,8 +11456,8 @@ define void @test_vsuxseg3_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11323,9 +11473,9 @@ define void @test_vsuxseg4_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11338,9 +11488,9 @@ define void @test_vsuxseg4_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11356,9 +11506,9 @@ define void @test_vsuxseg4_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11371,9 +11521,9 @@ define void @test_vsuxseg4_mask_nxv2f64_nxv2i8( %val, doubl ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11389,9 +11539,9 @@ define void @test_vsuxseg4_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11404,9 +11554,9 @@ define void @test_vsuxseg4_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11421,6 +11571,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11434,6 +11585,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f16_nxv4i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11450,6 +11602,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11463,6 +11616,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f16_nxv4i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -11479,6 +11633,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10 @@ -11491,6 +11646,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t @@ -11507,8 +11663,8 @@ define void @test_vsuxseg3_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11521,8 +11677,8 @@ define void @test_vsuxseg3_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11538,8 +11694,8 @@ define void @test_vsuxseg3_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11552,8 +11708,8 @@ define void @test_vsuxseg3_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11568,11 +11724,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, i32 %vl) @@ -11582,11 +11738,11 @@ entry: define void @test_vsuxseg3_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i32 %vl) @@ -11600,9 +11756,9 @@ define void @test_vsuxseg4_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11615,9 +11771,9 @@ define void @test_vsuxseg4_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11633,9 +11789,9 @@ define void @test_vsuxseg4_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11648,9 +11804,9 @@ define void @test_vsuxseg4_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11666,9 +11822,9 @@ define void @test_vsuxseg4_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11681,9 +11837,9 @@ define void @test_vsuxseg4_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11699,10 +11855,10 @@ define void @test_vsuxseg5_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11715,10 +11871,10 @@ define void @test_vsuxseg5_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11734,10 +11890,10 @@ define void @test_vsuxseg5_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11750,10 +11906,10 @@ define void @test_vsuxseg5_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11769,10 +11925,10 @@ define void @test_vsuxseg5_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11785,10 +11941,10 @@ define void @test_vsuxseg5_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11804,11 +11960,11 @@ define void @test_vsuxseg6_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11821,11 +11977,11 @@ define void @test_vsuxseg6_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11841,11 +11997,11 @@ define void @test_vsuxseg6_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11858,11 +12014,11 @@ define void @test_vsuxseg6_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11878,11 +12034,11 @@ define void @test_vsuxseg6_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11895,11 +12051,11 @@ define void @test_vsuxseg6_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11915,12 +12071,12 @@ define void @test_vsuxseg7_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11933,12 +12089,12 @@ define void @test_vsuxseg7_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11954,12 +12110,12 @@ define void @test_vsuxseg7_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11972,12 +12128,12 @@ define void @test_vsuxseg7_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11993,12 +12149,12 @@ define void @test_vsuxseg7_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -12011,12 +12167,12 @@ define void @test_vsuxseg7_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -12032,13 +12188,13 @@ define void @test_vsuxseg8_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12051,13 +12207,13 @@ define void @test_vsuxseg8_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12073,13 +12229,13 @@ define void @test_vsuxseg8_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12092,13 +12248,13 @@ define void @test_vsuxseg8_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12114,13 +12270,13 @@ define void @test_vsuxseg8_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -12133,13 +12289,13 @@ define void @test_vsuxseg8_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -12154,6 +12310,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i32(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12167,6 +12324,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f16_nxv2i32( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12183,6 +12341,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i8(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12196,6 +12355,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f16_nxv2i8( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12212,6 +12372,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i16(, %val, half* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12225,6 +12386,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f16_nxv2i16( %val, half* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -12242,8 +12404,8 @@ define void @test_vsuxseg3_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12256,8 +12418,8 @@ define void @test_vsuxseg3_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12273,8 +12435,8 @@ define void @test_vsuxseg3_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12287,8 +12449,8 @@ define void @test_vsuxseg3_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12304,8 +12466,8 @@ define void @test_vsuxseg3_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12318,8 +12480,8 @@ define void @test_vsuxseg3_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12335,9 +12497,9 @@ define void @test_vsuxseg4_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12350,9 +12512,9 @@ define void @test_vsuxseg4_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12368,9 +12530,9 @@ define void @test_vsuxseg4_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12383,9 +12545,9 @@ define void @test_vsuxseg4_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12401,9 +12563,9 @@ define void @test_vsuxseg4_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12416,9 +12578,9 @@ define void @test_vsuxseg4_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12434,10 +12596,10 @@ define void @test_vsuxseg5_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12450,10 +12612,10 @@ define void @test_vsuxseg5_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12469,10 +12631,10 @@ define void @test_vsuxseg5_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12485,10 +12647,10 @@ define void @test_vsuxseg5_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12504,10 +12666,10 @@ define void @test_vsuxseg5_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12520,10 +12682,10 @@ define void @test_vsuxseg5_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12539,11 +12701,11 @@ define void @test_vsuxseg6_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12556,11 +12718,11 @@ define void @test_vsuxseg6_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12576,11 +12738,11 @@ define void @test_vsuxseg6_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12593,11 +12755,11 @@ define void @test_vsuxseg6_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12613,11 +12775,11 @@ define void @test_vsuxseg6_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12630,11 +12792,11 @@ define void @test_vsuxseg6_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12650,12 +12812,12 @@ define void @test_vsuxseg7_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12668,12 +12830,12 @@ define void @test_vsuxseg7_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12689,12 +12851,12 @@ define void @test_vsuxseg7_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12707,12 +12869,12 @@ define void @test_vsuxseg7_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12728,12 +12890,12 @@ define void @test_vsuxseg7_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12746,12 +12908,12 @@ define void @test_vsuxseg7_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12767,13 +12929,13 @@ define void @test_vsuxseg8_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12786,13 +12948,13 @@ define void @test_vsuxseg8_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12808,13 +12970,13 @@ define void @test_vsuxseg8_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12827,13 +12989,13 @@ define void @test_vsuxseg8_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12849,13 +13011,13 @@ define void @test_vsuxseg8_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12868,13 +13030,13 @@ define void @test_vsuxseg8_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12889,6 +13051,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i16(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12902,6 +13065,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f32_nxv4i16( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12918,6 +13082,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i8(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12931,6 +13096,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f32_nxv4i8( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12947,6 +13113,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i32(, %val, float* %base, %index, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12960,6 +13127,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f32_nxv4i32( %val, float* %base, %index, %mask, i32 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -12977,8 +13145,8 @@ define void @test_vsuxseg3_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -12991,8 +13159,8 @@ define void @test_vsuxseg3_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13008,8 +13176,8 @@ define void @test_vsuxseg3_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13022,8 +13190,8 @@ define void @test_vsuxseg3_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13039,8 +13207,8 @@ define void @test_vsuxseg3_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13053,8 +13221,8 @@ define void @test_vsuxseg3_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13070,9 +13238,9 @@ define void @test_vsuxseg4_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13085,9 +13253,9 @@ define void @test_vsuxseg4_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13103,9 +13271,9 @@ define void @test_vsuxseg4_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13118,9 +13286,9 @@ define void @test_vsuxseg4_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13136,9 +13304,9 @@ define void @test_vsuxseg4_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13151,9 +13319,9 @@ define void @test_vsuxseg4_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret diff --git a/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv64.ll index 8fcc16c30fe49..9d384a3c000a5 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsuxseg-rv64.ll @@ -8,6 +8,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -21,6 +22,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i16_nxv16i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -37,6 +39,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -50,6 +53,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i16_nxv16i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -66,6 +70,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16 @@ -78,6 +83,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i16_nxv16i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t @@ -93,6 +99,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -106,6 +113,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i32_nxv4i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -122,6 +130,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -135,6 +144,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i32_nxv4i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -151,6 +161,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12 @@ -163,6 +174,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i32_nxv4i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12, v0.t @@ -178,6 +190,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -191,6 +204,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i32_nxv4i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -208,8 +222,8 @@ define void @test_vsuxseg3_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -222,8 +236,8 @@ define void @test_vsuxseg3_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -239,8 +253,8 @@ define void @test_vsuxseg3_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -253,8 +267,8 @@ define void @test_vsuxseg3_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -269,11 +283,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i64( %val, %val, %val, i32* %base, %index, i64 %vl) @@ -283,11 +297,11 @@ entry: define void @test_vsuxseg3_mask_nxv4i32_nxv4i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl) @@ -301,8 +315,8 @@ define void @test_vsuxseg3_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -315,8 +329,8 @@ define void @test_vsuxseg3_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -332,9 +346,9 @@ define void @test_vsuxseg4_nxv4i32_nxv4i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -347,9 +361,9 @@ define void @test_vsuxseg4_mask_nxv4i32_nxv4i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -365,9 +379,9 @@ define void @test_vsuxseg4_nxv4i32_nxv4i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -380,9 +394,9 @@ define void @test_vsuxseg4_mask_nxv4i32_nxv4i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -398,9 +412,9 @@ define void @test_vsuxseg4_nxv4i32_nxv4i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -413,9 +427,9 @@ define void @test_vsuxseg4_mask_nxv4i32_nxv4i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -431,9 +445,9 @@ define void @test_vsuxseg4_nxv4i32_nxv4i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -446,9 +460,9 @@ define void @test_vsuxseg4_mask_nxv4i32_nxv4i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -463,6 +477,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12 @@ -475,6 +490,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t @@ -490,6 +506,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -503,6 +520,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i8_nxv16i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -519,6 +537,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16 @@ -531,6 +550,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t @@ -546,11 +566,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei16.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -560,11 +580,11 @@ entry: define void @test_vsuxseg3_mask_nxv16i8_nxv16i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -578,8 +598,8 @@ define void @test_vsuxseg3_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -592,8 +612,8 @@ define void @test_vsuxseg3_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -608,6 +628,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -621,6 +642,7 @@ entry: define void @test_vsuxseg3_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu @@ -638,9 +660,9 @@ define void @test_vsuxseg4_nxv16i8_nxv16i16( %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -653,9 +675,9 @@ define void @test_vsuxseg4_mask_nxv16i8_nxv16i16( %val, i8* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -671,9 +693,9 @@ define void @test_vsuxseg4_nxv16i8_nxv16i8( %val, i8* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -686,9 +708,9 @@ define void @test_vsuxseg4_mask_nxv16i8_nxv16i8( %val, i8* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -703,6 +725,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -717,6 +740,7 @@ entry: define void @test_vsuxseg4_mask_nxv16i8_nxv16i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -734,6 +758,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i64(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -747,6 +772,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i64_nxv1i64( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -763,6 +789,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i32(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -776,6 +803,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i64_nxv1i32( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -792,6 +820,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i16(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -805,6 +834,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i64_nxv1i16( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -821,6 +851,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i8(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -834,6 +865,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i64_nxv1i8( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -851,8 +883,8 @@ define void @test_vsuxseg3_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -865,8 +897,8 @@ define void @test_vsuxseg3_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -882,8 +914,8 @@ define void @test_vsuxseg3_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -896,8 +928,8 @@ define void @test_vsuxseg3_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -913,8 +945,8 @@ define void @test_vsuxseg3_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -927,8 +959,8 @@ define void @test_vsuxseg3_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -944,8 +976,8 @@ define void @test_vsuxseg3_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -958,8 +990,8 @@ define void @test_vsuxseg3_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -975,9 +1007,9 @@ define void @test_vsuxseg4_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -990,9 +1022,9 @@ define void @test_vsuxseg4_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1008,9 +1040,9 @@ define void @test_vsuxseg4_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1023,9 +1055,9 @@ define void @test_vsuxseg4_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1041,9 +1073,9 @@ define void @test_vsuxseg4_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1056,9 +1088,9 @@ define void @test_vsuxseg4_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1074,9 +1106,9 @@ define void @test_vsuxseg4_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1089,9 +1121,9 @@ define void @test_vsuxseg4_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1107,10 +1139,10 @@ define void @test_vsuxseg5_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1123,10 +1155,10 @@ define void @test_vsuxseg5_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1142,10 +1174,10 @@ define void @test_vsuxseg5_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1158,10 +1190,10 @@ define void @test_vsuxseg5_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1177,10 +1209,10 @@ define void @test_vsuxseg5_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1193,10 +1225,10 @@ define void @test_vsuxseg5_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1212,10 +1244,10 @@ define void @test_vsuxseg5_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1228,10 +1260,10 @@ define void @test_vsuxseg5_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1247,11 +1279,11 @@ define void @test_vsuxseg6_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1264,11 +1296,11 @@ define void @test_vsuxseg6_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1284,11 +1316,11 @@ define void @test_vsuxseg6_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1301,11 +1333,11 @@ define void @test_vsuxseg6_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1321,11 +1353,11 @@ define void @test_vsuxseg6_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1338,11 +1370,11 @@ define void @test_vsuxseg6_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1358,11 +1390,11 @@ define void @test_vsuxseg6_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1375,11 +1407,11 @@ define void @test_vsuxseg6_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1395,12 +1427,12 @@ define void @test_vsuxseg7_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1413,12 +1445,12 @@ define void @test_vsuxseg7_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1434,12 +1466,12 @@ define void @test_vsuxseg7_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1452,12 +1484,12 @@ define void @test_vsuxseg7_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1473,12 +1505,12 @@ define void @test_vsuxseg7_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1491,12 +1523,12 @@ define void @test_vsuxseg7_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1512,12 +1544,12 @@ define void @test_vsuxseg7_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1530,12 +1562,12 @@ define void @test_vsuxseg7_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1551,13 +1583,13 @@ define void @test_vsuxseg8_nxv1i64_nxv1i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1570,13 +1602,13 @@ define void @test_vsuxseg8_mask_nxv1i64_nxv1i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1592,13 +1624,13 @@ define void @test_vsuxseg8_nxv1i64_nxv1i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1611,13 +1643,13 @@ define void @test_vsuxseg8_mask_nxv1i64_nxv1i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1633,13 +1665,13 @@ define void @test_vsuxseg8_nxv1i64_nxv1i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1652,13 +1684,13 @@ define void @test_vsuxseg8_mask_nxv1i64_nxv1i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1674,13 +1706,13 @@ define void @test_vsuxseg8_nxv1i64_nxv1i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1693,13 +1725,13 @@ define void @test_vsuxseg8_mask_nxv1i64_nxv1i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1714,6 +1746,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1727,6 +1760,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i32_nxv1i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1743,6 +1777,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1756,6 +1791,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i32_nxv1i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1772,6 +1808,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1785,6 +1822,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i32_nxv1i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1801,6 +1839,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1814,6 +1853,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i32_nxv1i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -1831,8 +1871,8 @@ define void @test_vsuxseg3_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1845,8 +1885,8 @@ define void @test_vsuxseg3_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1862,8 +1902,8 @@ define void @test_vsuxseg3_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1876,8 +1916,8 @@ define void @test_vsuxseg3_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1893,8 +1933,8 @@ define void @test_vsuxseg3_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1907,8 +1947,8 @@ define void @test_vsuxseg3_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1924,8 +1964,8 @@ define void @test_vsuxseg3_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1938,8 +1978,8 @@ define void @test_vsuxseg3_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1955,9 +1995,9 @@ define void @test_vsuxseg4_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -1970,9 +2010,9 @@ define void @test_vsuxseg4_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -1988,9 +2028,9 @@ define void @test_vsuxseg4_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2003,9 +2043,9 @@ define void @test_vsuxseg4_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2021,9 +2061,9 @@ define void @test_vsuxseg4_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2036,9 +2076,9 @@ define void @test_vsuxseg4_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2054,9 +2094,9 @@ define void @test_vsuxseg4_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2069,9 +2109,9 @@ define void @test_vsuxseg4_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2087,10 +2127,10 @@ define void @test_vsuxseg5_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2103,10 +2143,10 @@ define void @test_vsuxseg5_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2122,10 +2162,10 @@ define void @test_vsuxseg5_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2138,10 +2178,10 @@ define void @test_vsuxseg5_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2157,10 +2197,10 @@ define void @test_vsuxseg5_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2173,10 +2213,10 @@ define void @test_vsuxseg5_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2192,10 +2232,10 @@ define void @test_vsuxseg5_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2208,10 +2248,10 @@ define void @test_vsuxseg5_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2227,11 +2267,11 @@ define void @test_vsuxseg6_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2244,11 +2284,11 @@ define void @test_vsuxseg6_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2264,11 +2304,11 @@ define void @test_vsuxseg6_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2281,11 +2321,11 @@ define void @test_vsuxseg6_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2301,11 +2341,11 @@ define void @test_vsuxseg6_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2318,11 +2358,11 @@ define void @test_vsuxseg6_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2338,11 +2378,11 @@ define void @test_vsuxseg6_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2355,11 +2395,11 @@ define void @test_vsuxseg6_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2375,12 +2415,12 @@ define void @test_vsuxseg7_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2393,12 +2433,12 @@ define void @test_vsuxseg7_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2414,12 +2454,12 @@ define void @test_vsuxseg7_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2432,12 +2472,12 @@ define void @test_vsuxseg7_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2453,12 +2493,12 @@ define void @test_vsuxseg7_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2471,12 +2511,12 @@ define void @test_vsuxseg7_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2492,12 +2532,12 @@ define void @test_vsuxseg7_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2510,12 +2550,12 @@ define void @test_vsuxseg7_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2531,13 +2571,13 @@ define void @test_vsuxseg8_nxv1i32_nxv1i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2550,13 +2590,13 @@ define void @test_vsuxseg8_mask_nxv1i32_nxv1i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2572,13 +2612,13 @@ define void @test_vsuxseg8_nxv1i32_nxv1i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2591,13 +2631,13 @@ define void @test_vsuxseg8_mask_nxv1i32_nxv1i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2613,13 +2653,13 @@ define void @test_vsuxseg8_nxv1i32_nxv1i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2632,13 +2672,13 @@ define void @test_vsuxseg8_mask_nxv1i32_nxv1i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2654,13 +2694,13 @@ define void @test_vsuxseg8_nxv1i32_nxv1i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -2673,13 +2713,13 @@ define void @test_vsuxseg8_mask_nxv1i32_nxv1i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -2694,6 +2734,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2707,6 +2748,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i16_nxv8i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2723,6 +2765,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2736,6 +2779,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i16_nxv8i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2752,6 +2796,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16 @@ -2764,6 +2809,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i16_nxv8i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16, v0.t @@ -2779,6 +2825,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12 @@ -2791,6 +2838,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t @@ -2807,8 +2855,8 @@ define void @test_vsuxseg3_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2821,8 +2869,8 @@ define void @test_vsuxseg3_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2838,8 +2886,8 @@ define void @test_vsuxseg3_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2852,8 +2900,8 @@ define void @test_vsuxseg3_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2868,6 +2916,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2881,6 +2930,7 @@ entry: define void @test_vsuxseg3_mask_nxv8i16_nxv8i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -2897,11 +2947,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, i64 %vl) @@ -2911,11 +2961,11 @@ entry: define void @test_vsuxseg3_mask_nxv8i16_nxv8i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -2929,9 +2979,9 @@ define void @test_vsuxseg4_nxv8i16_nxv8i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2944,9 +2994,9 @@ define void @test_vsuxseg4_mask_nxv8i16_nxv8i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2962,9 +3012,9 @@ define void @test_vsuxseg4_nxv8i16_nxv8i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -2977,9 +3027,9 @@ define void @test_vsuxseg4_mask_nxv8i16_nxv8i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -2994,6 +3044,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3008,6 +3059,7 @@ entry: define void @test_vsuxseg4_mask_nxv8i16_nxv8i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -3026,9 +3078,9 @@ define void @test_vsuxseg4_nxv8i16_nxv8i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -3041,9 +3093,9 @@ define void @test_vsuxseg4_mask_nxv8i16_nxv8i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3058,6 +3110,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10 @@ -3070,6 +3123,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t @@ -3085,6 +3139,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3098,6 +3153,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i8_nxv4i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3114,6 +3170,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12 @@ -3126,6 +3183,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12, v0.t @@ -3141,6 +3199,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3154,6 +3213,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i8_nxv4i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3170,11 +3230,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -3184,11 +3244,11 @@ entry: define void @test_vsuxseg3_mask_nxv4i8_nxv4i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -3202,8 +3262,8 @@ define void @test_vsuxseg3_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3232,6 +3292,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3245,6 +3306,7 @@ entry: define void @test_vsuxseg3_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu @@ -3262,8 +3324,8 @@ define void @test_vsuxseg3_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3293,9 +3355,9 @@ define void @test_vsuxseg4_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3326,9 +3388,9 @@ define void @test_vsuxseg4_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3358,6 +3420,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3372,6 +3435,7 @@ entry: define void @test_vsuxseg4_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -3390,9 +3454,9 @@ define void @test_vsuxseg4_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3423,10 +3487,10 @@ define void @test_vsuxseg5_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3458,10 +3522,10 @@ define void @test_vsuxseg5_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3492,13 +3556,13 @@ declare void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsuxseg5ei64.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg5ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, i8* %base, %index, i64 %vl) @@ -3508,13 +3572,13 @@ entry: define void @test_vsuxseg5_mask_nxv4i8_nxv4i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu -; CHECK-NEXT: vsuxseg5ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg5ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i64( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -3528,10 +3592,10 @@ define void @test_vsuxseg5_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3563,11 +3627,11 @@ define void @test_vsuxseg6_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3600,11 +3664,11 @@ define void @test_vsuxseg6_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3637,11 +3701,11 @@ define void @test_vsuxseg6_nxv4i8_nxv4i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3674,11 +3738,11 @@ define void @test_vsuxseg6_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3711,12 +3775,12 @@ define void @test_vsuxseg7_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3750,12 +3814,12 @@ define void @test_vsuxseg7_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3789,12 +3853,12 @@ define void @test_vsuxseg7_nxv4i8_nxv4i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3828,12 +3892,12 @@ define void @test_vsuxseg7_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3867,13 +3931,13 @@ define void @test_vsuxseg8_nxv4i8_nxv4i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -3908,13 +3972,13 @@ define void @test_vsuxseg8_nxv4i8_nxv4i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -3949,13 +4013,13 @@ define void @test_vsuxseg8_nxv4i8_nxv4i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -3990,13 +4054,13 @@ define void @test_vsuxseg8_nxv4i8_nxv4i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4030,6 +4094,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4043,6 +4108,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i16_nxv1i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4059,6 +4125,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4072,6 +4139,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i16_nxv1i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4088,6 +4156,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4101,6 +4170,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i16_nxv1i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4117,6 +4187,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4130,6 +4201,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i16_nxv1i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -4147,8 +4219,8 @@ define void @test_vsuxseg3_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4161,8 +4233,8 @@ define void @test_vsuxseg3_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4178,8 +4250,8 @@ define void @test_vsuxseg3_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4192,8 +4264,8 @@ define void @test_vsuxseg3_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4209,8 +4281,8 @@ define void @test_vsuxseg3_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4223,8 +4295,8 @@ define void @test_vsuxseg3_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4240,8 +4312,8 @@ define void @test_vsuxseg3_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4254,8 +4326,8 @@ define void @test_vsuxseg3_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4271,9 +4343,9 @@ define void @test_vsuxseg4_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4286,9 +4358,9 @@ define void @test_vsuxseg4_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4304,9 +4376,9 @@ define void @test_vsuxseg4_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4319,9 +4391,9 @@ define void @test_vsuxseg4_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4337,9 +4409,9 @@ define void @test_vsuxseg4_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4352,9 +4424,9 @@ define void @test_vsuxseg4_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4370,9 +4442,9 @@ define void @test_vsuxseg4_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4385,9 +4457,9 @@ define void @test_vsuxseg4_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4403,10 +4475,10 @@ define void @test_vsuxseg5_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4419,10 +4491,10 @@ define void @test_vsuxseg5_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4438,10 +4510,10 @@ define void @test_vsuxseg5_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4454,10 +4526,10 @@ define void @test_vsuxseg5_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4473,10 +4545,10 @@ define void @test_vsuxseg5_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4489,10 +4561,10 @@ define void @test_vsuxseg5_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4508,10 +4580,10 @@ define void @test_vsuxseg5_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4524,10 +4596,10 @@ define void @test_vsuxseg5_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4543,11 +4615,11 @@ define void @test_vsuxseg6_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4560,11 +4632,11 @@ define void @test_vsuxseg6_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4580,11 +4652,11 @@ define void @test_vsuxseg6_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4597,11 +4669,11 @@ define void @test_vsuxseg6_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4617,11 +4689,11 @@ define void @test_vsuxseg6_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4634,11 +4706,11 @@ define void @test_vsuxseg6_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4654,11 +4726,11 @@ define void @test_vsuxseg6_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4671,11 +4743,11 @@ define void @test_vsuxseg6_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4691,12 +4763,12 @@ define void @test_vsuxseg7_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4709,12 +4781,12 @@ define void @test_vsuxseg7_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4730,12 +4802,12 @@ define void @test_vsuxseg7_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4748,12 +4820,12 @@ define void @test_vsuxseg7_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4769,12 +4841,12 @@ define void @test_vsuxseg7_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4787,12 +4859,12 @@ define void @test_vsuxseg7_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4808,12 +4880,12 @@ define void @test_vsuxseg7_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4826,12 +4898,12 @@ define void @test_vsuxseg7_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4847,13 +4919,13 @@ define void @test_vsuxseg8_nxv1i16_nxv1i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4866,13 +4938,13 @@ define void @test_vsuxseg8_mask_nxv1i16_nxv1i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4888,13 +4960,13 @@ define void @test_vsuxseg8_nxv1i16_nxv1i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4907,13 +4979,13 @@ define void @test_vsuxseg8_mask_nxv1i16_nxv1i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4929,13 +5001,13 @@ define void @test_vsuxseg8_nxv1i16_nxv1i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4948,13 +5020,13 @@ define void @test_vsuxseg8_mask_nxv1i16_nxv1i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -4970,13 +5042,13 @@ define void @test_vsuxseg8_nxv1i16_nxv1i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -4989,13 +5061,13 @@ define void @test_vsuxseg8_mask_nxv1i16_nxv1i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5010,6 +5082,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5023,6 +5096,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i32_nxv2i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5039,6 +5113,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5052,6 +5127,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i32_nxv2i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5068,6 +5144,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5081,6 +5158,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i32_nxv2i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -5097,6 +5175,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10 @@ -5109,6 +5188,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i32_nxv2i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10, v0.t @@ -5125,8 +5205,8 @@ define void @test_vsuxseg3_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5139,8 +5219,8 @@ define void @test_vsuxseg3_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5156,8 +5236,8 @@ define void @test_vsuxseg3_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5170,8 +5250,8 @@ define void @test_vsuxseg3_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5187,8 +5267,8 @@ define void @test_vsuxseg3_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5201,8 +5281,8 @@ define void @test_vsuxseg3_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5217,11 +5297,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i64( %val, %val, %val, i32* %base, %index, i64 %vl) @@ -5231,11 +5311,11 @@ entry: define void @test_vsuxseg3_mask_nxv2i32_nxv2i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i64( %val, %val, %val, i32* %base, %index, %mask, i64 %vl) @@ -5249,9 +5329,9 @@ define void @test_vsuxseg4_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5264,9 +5344,9 @@ define void @test_vsuxseg4_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5282,9 +5362,9 @@ define void @test_vsuxseg4_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5297,9 +5377,9 @@ define void @test_vsuxseg4_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5315,9 +5395,9 @@ define void @test_vsuxseg4_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5330,9 +5410,9 @@ define void @test_vsuxseg4_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5348,9 +5428,9 @@ define void @test_vsuxseg4_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5363,9 +5443,9 @@ define void @test_vsuxseg4_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5381,10 +5461,10 @@ define void @test_vsuxseg5_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5397,10 +5477,10 @@ define void @test_vsuxseg5_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5416,10 +5496,10 @@ define void @test_vsuxseg5_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5432,10 +5512,10 @@ define void @test_vsuxseg5_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5451,10 +5531,10 @@ define void @test_vsuxseg5_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5467,10 +5547,10 @@ define void @test_vsuxseg5_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5486,10 +5566,10 @@ define void @test_vsuxseg5_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5502,10 +5582,10 @@ define void @test_vsuxseg5_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5521,11 +5601,11 @@ define void @test_vsuxseg6_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5538,11 +5618,11 @@ define void @test_vsuxseg6_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5558,11 +5638,11 @@ define void @test_vsuxseg6_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5575,11 +5655,11 @@ define void @test_vsuxseg6_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5595,11 +5675,11 @@ define void @test_vsuxseg6_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5612,11 +5692,11 @@ define void @test_vsuxseg6_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5632,11 +5712,11 @@ define void @test_vsuxseg6_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5649,11 +5729,11 @@ define void @test_vsuxseg6_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5669,12 +5749,12 @@ define void @test_vsuxseg7_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5687,12 +5767,12 @@ define void @test_vsuxseg7_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5708,12 +5788,12 @@ define void @test_vsuxseg7_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5726,12 +5806,12 @@ define void @test_vsuxseg7_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5747,12 +5827,12 @@ define void @test_vsuxseg7_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5765,12 +5845,12 @@ define void @test_vsuxseg7_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5786,12 +5866,12 @@ define void @test_vsuxseg7_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5804,12 +5884,12 @@ define void @test_vsuxseg7_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5825,13 +5905,13 @@ define void @test_vsuxseg8_nxv2i32_nxv2i32( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5844,13 +5924,13 @@ define void @test_vsuxseg8_mask_nxv2i32_nxv2i32( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5866,13 +5946,13 @@ define void @test_vsuxseg8_nxv2i32_nxv2i8( %val, i32* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5885,13 +5965,13 @@ define void @test_vsuxseg8_mask_nxv2i32_nxv2i8( %val, i32* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5907,13 +5987,13 @@ define void @test_vsuxseg8_nxv2i32_nxv2i16( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -5926,13 +6006,13 @@ define void @test_vsuxseg8_mask_nxv2i32_nxv2i16( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -5948,13 +6028,13 @@ define void @test_vsuxseg8_nxv2i32_nxv2i64( %val, i32* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -5967,13 +6047,13 @@ define void @test_vsuxseg8_mask_nxv2i32_nxv2i64( %val, i32* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -5988,6 +6068,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10 @@ -6000,6 +6081,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t @@ -6015,6 +6097,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6028,6 +6111,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i8_nxv8i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6044,6 +6128,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16 @@ -6056,6 +6141,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16, v0.t @@ -6071,6 +6157,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12 @@ -6083,6 +6170,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t @@ -6098,11 +6186,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -6112,11 +6200,11 @@ entry: define void @test_vsuxseg3_mask_nxv8i8_nxv8i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -6130,8 +6218,8 @@ define void @test_vsuxseg3_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6160,6 +6248,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6173,6 +6262,7 @@ entry: define void @test_vsuxseg3_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6189,6 +6279,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6202,6 +6293,7 @@ entry: define void @test_vsuxseg3_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu @@ -6219,9 +6311,9 @@ define void @test_vsuxseg4_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6252,9 +6344,9 @@ define void @test_vsuxseg4_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6284,6 +6376,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6298,6 +6391,7 @@ entry: define void @test_vsuxseg4_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6315,6 +6409,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6329,6 +6424,7 @@ entry: define void @test_vsuxseg4_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6347,10 +6443,10 @@ define void @test_vsuxseg5_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6382,10 +6478,10 @@ define void @test_vsuxseg5_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6416,6 +6512,7 @@ declare void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6431,6 +6528,7 @@ entry: define void @test_vsuxseg5_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6449,13 +6547,13 @@ declare void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei32.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg5ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, i64 %vl) @@ -6465,13 +6563,13 @@ entry: define void @test_vsuxseg5_mask_nxv8i8_nxv8i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg5ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32( %val, %val, %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -6485,11 +6583,11 @@ define void @test_vsuxseg6_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6522,11 +6620,11 @@ define void @test_vsuxseg6_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6558,6 +6656,7 @@ declare void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg6_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6574,6 +6673,7 @@ entry: define void @test_vsuxseg6_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6594,11 +6694,11 @@ define void @test_vsuxseg6_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -6631,12 +6731,12 @@ define void @test_vsuxseg7_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6670,12 +6770,12 @@ define void @test_vsuxseg7_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6708,6 +6808,7 @@ declare void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg7_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6725,6 +6826,7 @@ entry: define void @test_vsuxseg7_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6746,12 +6848,12 @@ define void @test_vsuxseg7_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -6785,13 +6887,13 @@ define void @test_vsuxseg8_nxv8i8_nxv8i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -6826,13 +6928,13 @@ define void @test_vsuxseg8_nxv8i8_nxv8i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -6866,6 +6968,7 @@ declare void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg8_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6884,6 +6987,7 @@ entry: define void @test_vsuxseg8_mask_nxv8i8_nxv8i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11_v12_v13_v14_v15 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -6906,13 +7010,13 @@ define void @test_vsuxseg8_nxv8i8_nxv8i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -6946,6 +7050,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i32(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -6959,6 +7064,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i64_nxv4i32( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -6975,6 +7081,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i8(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -6988,6 +7095,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i64_nxv4i8( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7004,6 +7112,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i64(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7017,6 +7126,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i64_nxv4i64( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7033,6 +7143,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i16(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7046,6 +7157,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i64_nxv4i16( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -7062,6 +7174,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10 @@ -7074,6 +7187,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t @@ -7089,6 +7203,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7102,6 +7217,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i16_nxv4i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7118,6 +7234,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12 @@ -7130,6 +7247,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12, v0.t @@ -7145,6 +7263,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7158,6 +7277,7 @@ entry: define void @test_vsuxseg2_mask_nxv4i16_nxv4i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7174,11 +7294,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, i64 %vl) @@ -7188,11 +7308,11 @@ entry: define void @test_vsuxseg3_mask_nxv4i16_nxv4i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32( %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -7206,8 +7326,8 @@ define void @test_vsuxseg3_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7220,8 +7340,8 @@ define void @test_vsuxseg3_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7236,6 +7356,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7249,6 +7370,7 @@ entry: define void @test_vsuxseg3_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -7266,8 +7388,8 @@ define void @test_vsuxseg3_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7280,8 +7402,8 @@ define void @test_vsuxseg3_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7297,9 +7419,9 @@ define void @test_vsuxseg4_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7312,9 +7434,9 @@ define void @test_vsuxseg4_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7330,9 +7452,9 @@ define void @test_vsuxseg4_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7345,9 +7467,9 @@ define void @test_vsuxseg4_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7362,6 +7484,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -7376,6 +7499,7 @@ entry: define void @test_vsuxseg4_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -7394,9 +7518,9 @@ define void @test_vsuxseg4_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7409,9 +7533,9 @@ define void @test_vsuxseg4_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7427,10 +7551,10 @@ define void @test_vsuxseg5_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7443,10 +7567,10 @@ define void @test_vsuxseg5_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7462,10 +7586,10 @@ define void @test_vsuxseg5_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7478,10 +7602,10 @@ define void @test_vsuxseg5_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7496,13 +7620,13 @@ declare void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei64.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg5ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, i16* %base, %index, i64 %vl) @@ -7512,13 +7636,13 @@ entry: define void @test_vsuxseg5_mask_nxv4i16_nxv4i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg5ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i64( %val, %val, %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -7532,10 +7656,10 @@ define void @test_vsuxseg5_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7548,10 +7672,10 @@ define void @test_vsuxseg5_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7567,11 +7691,11 @@ define void @test_vsuxseg6_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7584,11 +7708,11 @@ define void @test_vsuxseg6_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7604,11 +7728,11 @@ define void @test_vsuxseg6_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7621,11 +7745,11 @@ define void @test_vsuxseg6_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7641,11 +7765,11 @@ define void @test_vsuxseg6_nxv4i16_nxv4i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -7658,11 +7782,11 @@ define void @test_vsuxseg6_mask_nxv4i16_nxv4i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -7678,11 +7802,11 @@ define void @test_vsuxseg6_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7695,11 +7819,11 @@ define void @test_vsuxseg6_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7715,12 +7839,12 @@ define void @test_vsuxseg7_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7733,12 +7857,12 @@ define void @test_vsuxseg7_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7754,12 +7878,12 @@ define void @test_vsuxseg7_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7772,12 +7896,12 @@ define void @test_vsuxseg7_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7793,12 +7917,12 @@ define void @test_vsuxseg7_nxv4i16_nxv4i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -7811,12 +7935,12 @@ define void @test_vsuxseg7_mask_nxv4i16_nxv4i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -7832,12 +7956,12 @@ define void @test_vsuxseg7_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7850,12 +7974,12 @@ define void @test_vsuxseg7_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7871,13 +7995,13 @@ define void @test_vsuxseg8_nxv4i16_nxv4i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -7890,13 +8014,13 @@ define void @test_vsuxseg8_mask_nxv4i16_nxv4i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -7912,13 +8036,13 @@ define void @test_vsuxseg8_nxv4i16_nxv4i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -7931,13 +8055,13 @@ define void @test_vsuxseg8_mask_nxv4i16_nxv4i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -7953,13 +8077,13 @@ define void @test_vsuxseg8_nxv4i16_nxv4i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -7972,13 +8096,13 @@ define void @test_vsuxseg8_mask_nxv4i16_nxv4i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -7994,13 +8118,13 @@ define void @test_vsuxseg8_nxv4i16_nxv4i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -8013,13 +8137,13 @@ define void @test_vsuxseg8_mask_nxv4i16_nxv4i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8034,6 +8158,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8047,6 +8172,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i8_nxv1i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8063,6 +8189,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8076,6 +8203,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i8_nxv1i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8092,6 +8220,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8105,6 +8234,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i8_nxv1i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8121,6 +8251,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8134,6 +8265,7 @@ entry: define void @test_vsuxseg2_mask_nxv1i8_nxv1i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu @@ -8151,8 +8283,8 @@ define void @test_vsuxseg3_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8182,8 +8314,8 @@ define void @test_vsuxseg3_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8213,8 +8345,8 @@ define void @test_vsuxseg3_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8244,8 +8376,8 @@ define void @test_vsuxseg3_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8275,9 +8407,9 @@ define void @test_vsuxseg4_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8308,9 +8440,9 @@ define void @test_vsuxseg4_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8341,9 +8473,9 @@ define void @test_vsuxseg4_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8374,9 +8506,9 @@ define void @test_vsuxseg4_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8407,10 +8539,10 @@ define void @test_vsuxseg5_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8442,10 +8574,10 @@ define void @test_vsuxseg5_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8477,10 +8609,10 @@ define void @test_vsuxseg5_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8512,10 +8644,10 @@ define void @test_vsuxseg5_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8547,11 +8679,11 @@ define void @test_vsuxseg6_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8584,11 +8716,11 @@ define void @test_vsuxseg6_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8621,11 +8753,11 @@ define void @test_vsuxseg6_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8658,11 +8790,11 @@ define void @test_vsuxseg6_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8695,12 +8827,12 @@ define void @test_vsuxseg7_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8734,12 +8866,12 @@ define void @test_vsuxseg7_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8773,12 +8905,12 @@ define void @test_vsuxseg7_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8812,12 +8944,12 @@ define void @test_vsuxseg7_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8851,13 +8983,13 @@ define void @test_vsuxseg8_nxv1i8_nxv1i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8892,13 +9024,13 @@ define void @test_vsuxseg8_nxv1i8_nxv1i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8933,13 +9065,13 @@ define void @test_vsuxseg8_nxv1i8_nxv1i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -8974,13 +9106,13 @@ define void @test_vsuxseg8_nxv1i8_nxv1i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9014,6 +9146,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i32(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9027,6 +9160,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i8_nxv2i32( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9043,6 +9177,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9056,6 +9191,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i8_nxv2i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9072,6 +9208,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9085,6 +9222,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i8_nxv2i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu @@ -9101,6 +9239,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10 @@ -9113,6 +9252,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i8_nxv2i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10, v0.t @@ -9129,8 +9269,8 @@ define void @test_vsuxseg3_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9160,8 +9300,8 @@ define void @test_vsuxseg3_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9191,8 +9331,8 @@ define void @test_vsuxseg3_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9221,11 +9361,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i64(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i64( %val, %val, %val, i8* %base, %index, i64 %vl) @@ -9235,11 +9375,11 @@ entry: define void @test_vsuxseg3_mask_nxv2i8_nxv2i64( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i64( %val, %val, %val, i8* %base, %index, %mask, i64 %vl) @@ -9253,9 +9393,9 @@ define void @test_vsuxseg4_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9286,9 +9426,9 @@ define void @test_vsuxseg4_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9319,9 +9459,9 @@ define void @test_vsuxseg4_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9352,9 +9492,9 @@ define void @test_vsuxseg4_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9385,10 +9525,10 @@ define void @test_vsuxseg5_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9420,10 +9560,10 @@ define void @test_vsuxseg5_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9455,10 +9595,10 @@ define void @test_vsuxseg5_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9490,10 +9630,10 @@ define void @test_vsuxseg5_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9525,11 +9665,11 @@ define void @test_vsuxseg6_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9562,11 +9702,11 @@ define void @test_vsuxseg6_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9599,11 +9739,11 @@ define void @test_vsuxseg6_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9636,11 +9776,11 @@ define void @test_vsuxseg6_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9673,12 +9813,12 @@ define void @test_vsuxseg7_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9712,12 +9852,12 @@ define void @test_vsuxseg7_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9751,12 +9891,12 @@ define void @test_vsuxseg7_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9790,12 +9930,12 @@ define void @test_vsuxseg7_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9829,13 +9969,13 @@ define void @test_vsuxseg8_nxv2i8_nxv2i32( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9870,13 +10010,13 @@ define void @test_vsuxseg8_nxv2i8_nxv2i8( %val, i8* %base, %val, i8* %base, ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9911,13 +10051,13 @@ define void @test_vsuxseg8_nxv2i8_nxv2i16( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -9952,13 +10092,13 @@ define void @test_vsuxseg8_nxv2i8_nxv2i64( %val, i8* %base, %val, i8* %base ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -9992,6 +10132,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i16(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10005,6 +10146,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i32_nxv8i16( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10021,6 +10163,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i8(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10034,6 +10177,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i32_nxv8i8( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10050,6 +10194,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i64(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16 @@ -10062,6 +10207,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i32_nxv8i64( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16, v0.t @@ -10077,6 +10223,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i32(, %val, i32* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10090,6 +10237,7 @@ entry: define void @test_vsuxseg2_mask_nxv8i32_nxv8i32( %val, i32* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -10106,6 +10254,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i16(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16 @@ -10118,6 +10267,7 @@ entry: define void @test_vsuxseg2_mask_nxv32i8_nxv32i16( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv32i8_nxv32i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t @@ -10133,6 +10283,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i8(, %val, i8* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -10146,6 +10297,7 @@ entry: define void @test_vsuxseg2_mask_nxv32i8_nxv32i8( %val, i8* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv32i8_nxv32i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu @@ -10162,6 +10314,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i32(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10175,6 +10328,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i16_nxv2i32( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10191,6 +10345,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i8(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10204,6 +10359,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i16_nxv2i8( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10220,6 +10376,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i16(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10233,6 +10390,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i16_nxv2i16( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -10249,6 +10407,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10 @@ -10261,6 +10420,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i16_nxv2i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10, v0.t @@ -10277,8 +10437,8 @@ define void @test_vsuxseg3_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10291,8 +10451,8 @@ define void @test_vsuxseg3_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10308,8 +10468,8 @@ define void @test_vsuxseg3_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10322,8 +10482,8 @@ define void @test_vsuxseg3_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10339,8 +10499,8 @@ define void @test_vsuxseg3_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10353,8 +10513,8 @@ define void @test_vsuxseg3_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10369,11 +10529,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i64(, %val, i16* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i64( %val, %val, %val, i16* %base, %index, i64 %vl) @@ -10383,11 +10543,11 @@ entry: define void @test_vsuxseg3_mask_nxv2i16_nxv2i64( %val, i16* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i64( %val, %val, %val, i16* %base, %index, %mask, i64 %vl) @@ -10401,9 +10561,9 @@ define void @test_vsuxseg4_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10416,9 +10576,9 @@ define void @test_vsuxseg4_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10434,9 +10594,9 @@ define void @test_vsuxseg4_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10449,9 +10609,9 @@ define void @test_vsuxseg4_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10467,9 +10627,9 @@ define void @test_vsuxseg4_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10482,9 +10642,9 @@ define void @test_vsuxseg4_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10500,9 +10660,9 @@ define void @test_vsuxseg4_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10515,9 +10675,9 @@ define void @test_vsuxseg4_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10533,10 +10693,10 @@ define void @test_vsuxseg5_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10549,10 +10709,10 @@ define void @test_vsuxseg5_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10568,10 +10728,10 @@ define void @test_vsuxseg5_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10584,10 +10744,10 @@ define void @test_vsuxseg5_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10603,10 +10763,10 @@ define void @test_vsuxseg5_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10619,10 +10779,10 @@ define void @test_vsuxseg5_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10638,10 +10798,10 @@ define void @test_vsuxseg5_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10654,10 +10814,10 @@ define void @test_vsuxseg5_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10673,11 +10833,11 @@ define void @test_vsuxseg6_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10690,11 +10850,11 @@ define void @test_vsuxseg6_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10710,11 +10870,11 @@ define void @test_vsuxseg6_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10727,11 +10887,11 @@ define void @test_vsuxseg6_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10747,11 +10907,11 @@ define void @test_vsuxseg6_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10764,11 +10924,11 @@ define void @test_vsuxseg6_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10784,11 +10944,11 @@ define void @test_vsuxseg6_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10801,11 +10961,11 @@ define void @test_vsuxseg6_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10821,12 +10981,12 @@ define void @test_vsuxseg7_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10839,12 +10999,12 @@ define void @test_vsuxseg7_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10860,12 +11020,12 @@ define void @test_vsuxseg7_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10878,12 +11038,12 @@ define void @test_vsuxseg7_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10899,12 +11059,12 @@ define void @test_vsuxseg7_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10917,12 +11077,12 @@ define void @test_vsuxseg7_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -10938,12 +11098,12 @@ define void @test_vsuxseg7_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -10956,12 +11116,12 @@ define void @test_vsuxseg7_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -10977,13 +11137,13 @@ define void @test_vsuxseg8_nxv2i16_nxv2i32( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -10996,13 +11156,13 @@ define void @test_vsuxseg8_mask_nxv2i16_nxv2i32( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11018,13 +11178,13 @@ define void @test_vsuxseg8_nxv2i16_nxv2i8( %val, i16* %base, < ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11037,13 +11197,13 @@ define void @test_vsuxseg8_mask_nxv2i16_nxv2i8( %val, i16* %ba ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11059,13 +11219,13 @@ define void @test_vsuxseg8_nxv2i16_nxv2i16( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11078,13 +11238,13 @@ define void @test_vsuxseg8_mask_nxv2i16_nxv2i16( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11100,13 +11260,13 @@ define void @test_vsuxseg8_nxv2i16_nxv2i64( %val, i16* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11119,13 +11279,13 @@ define void @test_vsuxseg8_mask_nxv2i16_nxv2i64( %val, i16* %b ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11140,6 +11300,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i32(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11153,6 +11314,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i64_nxv2i32( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11169,6 +11331,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i8(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11182,6 +11345,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i64_nxv2i8( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11198,6 +11362,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i16(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11211,6 +11376,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i64_nxv2i16( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11227,6 +11393,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i64(, %val, i64* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11240,6 +11407,7 @@ entry: define void @test_vsuxseg2_mask_nxv2i64_nxv2i64( %val, i64* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -11257,8 +11425,8 @@ define void @test_vsuxseg3_nxv2i64_nxv2i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11271,8 +11439,8 @@ define void @test_vsuxseg3_mask_nxv2i64_nxv2i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11288,8 +11456,8 @@ define void @test_vsuxseg3_nxv2i64_nxv2i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg3_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11302,8 +11470,8 @@ define void @test_vsuxseg3_mask_nxv2i64_nxv2i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11319,8 +11487,8 @@ define void @test_vsuxseg3_nxv2i64_nxv2i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11333,8 +11501,8 @@ define void @test_vsuxseg3_mask_nxv2i64_nxv2i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11350,8 +11518,8 @@ define void @test_vsuxseg3_nxv2i64_nxv2i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11364,8 +11532,8 @@ define void @test_vsuxseg3_mask_nxv2i64_nxv2i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11381,9 +11549,9 @@ define void @test_vsuxseg4_nxv2i64_nxv2i32( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11396,9 +11564,9 @@ define void @test_vsuxseg4_mask_nxv2i64_nxv2i32( %val, i64* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11414,9 +11582,9 @@ define void @test_vsuxseg4_nxv2i64_nxv2i8( %val, i64* %base, < ; CHECK-LABEL: test_vsuxseg4_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11429,9 +11597,9 @@ define void @test_vsuxseg4_mask_nxv2i64_nxv2i8( %val, i64* %ba ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11447,9 +11615,9 @@ define void @test_vsuxseg4_nxv2i64_nxv2i16( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11462,9 +11630,9 @@ define void @test_vsuxseg4_mask_nxv2i64_nxv2i16( %val, i64* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11480,9 +11648,9 @@ define void @test_vsuxseg4_nxv2i64_nxv2i64( %val, i64* %base, ; CHECK-LABEL: test_vsuxseg4_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -11495,9 +11663,9 @@ define void @test_vsuxseg4_mask_nxv2i64_nxv2i64( %val, i64* %b ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -11512,6 +11680,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11525,6 +11694,7 @@ entry: define void @test_vsuxseg2_mask_nxv16f16_nxv16i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11541,6 +11711,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11554,6 +11725,7 @@ entry: define void @test_vsuxseg2_mask_nxv16f16_nxv16i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu @@ -11570,6 +11742,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16 @@ -11582,6 +11755,7 @@ entry: define void @test_vsuxseg2_mask_nxv16f16_nxv16i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t @@ -11597,6 +11771,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i32(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11610,6 +11785,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f64_nxv4i32( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11626,6 +11802,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i8(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11639,6 +11816,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f64_nxv4i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11655,6 +11833,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i64(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11668,6 +11847,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f64_nxv4i64( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11684,6 +11864,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i16(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11697,6 +11878,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f64_nxv4i16( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu @@ -11713,6 +11895,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i64(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11726,6 +11909,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f64_nxv1i64( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11742,6 +11926,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i32(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11755,6 +11940,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f64_nxv1i32( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11771,6 +11957,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i16(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11784,6 +11971,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f64_nxv1i16( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11800,6 +11988,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i8(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11813,6 +12002,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f64_nxv1i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu @@ -11830,8 +12020,8 @@ define void @test_vsuxseg3_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11844,8 +12034,8 @@ define void @test_vsuxseg3_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11861,8 +12051,8 @@ define void @test_vsuxseg3_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11875,8 +12065,8 @@ define void @test_vsuxseg3_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11892,8 +12082,8 @@ define void @test_vsuxseg3_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11906,8 +12096,8 @@ define void @test_vsuxseg3_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11923,8 +12113,8 @@ define void @test_vsuxseg3_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11937,8 +12127,8 @@ define void @test_vsuxseg3_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11954,9 +12144,9 @@ define void @test_vsuxseg4_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -11969,9 +12159,9 @@ define void @test_vsuxseg4_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -11987,9 +12177,9 @@ define void @test_vsuxseg4_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12002,9 +12192,9 @@ define void @test_vsuxseg4_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12020,9 +12210,9 @@ define void @test_vsuxseg4_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12035,9 +12225,9 @@ define void @test_vsuxseg4_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12053,9 +12243,9 @@ define void @test_vsuxseg4_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12068,9 +12258,9 @@ define void @test_vsuxseg4_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12086,10 +12276,10 @@ define void @test_vsuxseg5_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12102,10 +12292,10 @@ define void @test_vsuxseg5_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12121,10 +12311,10 @@ define void @test_vsuxseg5_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12137,10 +12327,10 @@ define void @test_vsuxseg5_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12156,10 +12346,10 @@ define void @test_vsuxseg5_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12172,10 +12362,10 @@ define void @test_vsuxseg5_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12191,10 +12381,10 @@ define void @test_vsuxseg5_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12207,10 +12397,10 @@ define void @test_vsuxseg5_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12226,11 +12416,11 @@ define void @test_vsuxseg6_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12243,11 +12433,11 @@ define void @test_vsuxseg6_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12263,11 +12453,11 @@ define void @test_vsuxseg6_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12280,11 +12470,11 @@ define void @test_vsuxseg6_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12300,11 +12490,11 @@ define void @test_vsuxseg6_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12317,11 +12507,11 @@ define void @test_vsuxseg6_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12337,11 +12527,11 @@ define void @test_vsuxseg6_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12354,11 +12544,11 @@ define void @test_vsuxseg6_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12374,12 +12564,12 @@ define void @test_vsuxseg7_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12392,12 +12582,12 @@ define void @test_vsuxseg7_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12413,12 +12603,12 @@ define void @test_vsuxseg7_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12431,12 +12621,12 @@ define void @test_vsuxseg7_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12452,12 +12642,12 @@ define void @test_vsuxseg7_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12470,12 +12660,12 @@ define void @test_vsuxseg7_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12491,12 +12681,12 @@ define void @test_vsuxseg7_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12509,12 +12699,12 @@ define void @test_vsuxseg7_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12530,13 +12720,13 @@ define void @test_vsuxseg8_nxv1f64_nxv1i64( %val, double* % ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12549,13 +12739,13 @@ define void @test_vsuxseg8_mask_nxv1f64_nxv1i64( %val, doub ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12571,13 +12761,13 @@ define void @test_vsuxseg8_nxv1f64_nxv1i32( %val, double* % ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12590,13 +12780,13 @@ define void @test_vsuxseg8_mask_nxv1f64_nxv1i32( %val, doub ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12612,13 +12802,13 @@ define void @test_vsuxseg8_nxv1f64_nxv1i16( %val, double* % ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12631,13 +12821,13 @@ define void @test_vsuxseg8_mask_nxv1f64_nxv1i16( %val, doub ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12653,13 +12843,13 @@ define void @test_vsuxseg8_nxv1f64_nxv1i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12672,13 +12862,13 @@ define void @test_vsuxseg8_mask_nxv1f64_nxv1i8( %val, doubl ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12693,6 +12883,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12706,6 +12897,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f32_nxv2i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12722,6 +12914,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12735,6 +12928,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f32_nxv2i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12751,6 +12945,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12764,6 +12959,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f32_nxv2i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu @@ -12780,6 +12976,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10 @@ -12792,6 +12989,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f32_nxv2i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10, v0.t @@ -12808,8 +13006,8 @@ define void @test_vsuxseg3_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12822,8 +13020,8 @@ define void @test_vsuxseg3_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12839,8 +13037,8 @@ define void @test_vsuxseg3_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12853,8 +13051,8 @@ define void @test_vsuxseg3_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12870,8 +13068,8 @@ define void @test_vsuxseg3_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12884,8 +13082,8 @@ define void @test_vsuxseg3_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12900,11 +13098,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i64( %val, %val, %val, float* %base, %index, i64 %vl) @@ -12914,11 +13112,11 @@ entry: define void @test_vsuxseg3_mask_nxv2f32_nxv2i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl) @@ -12932,9 +13130,9 @@ define void @test_vsuxseg4_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12947,9 +13145,9 @@ define void @test_vsuxseg4_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12965,9 +13163,9 @@ define void @test_vsuxseg4_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -12980,9 +13178,9 @@ define void @test_vsuxseg4_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -12998,9 +13196,9 @@ define void @test_vsuxseg4_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13013,9 +13211,9 @@ define void @test_vsuxseg4_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13031,9 +13229,9 @@ define void @test_vsuxseg4_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13046,9 +13244,9 @@ define void @test_vsuxseg4_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13064,10 +13262,10 @@ define void @test_vsuxseg5_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13080,10 +13278,10 @@ define void @test_vsuxseg5_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13099,10 +13297,10 @@ define void @test_vsuxseg5_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13115,10 +13313,10 @@ define void @test_vsuxseg5_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13134,10 +13332,10 @@ define void @test_vsuxseg5_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13150,10 +13348,10 @@ define void @test_vsuxseg5_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13169,10 +13367,10 @@ define void @test_vsuxseg5_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13185,10 +13383,10 @@ define void @test_vsuxseg5_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13204,11 +13402,11 @@ define void @test_vsuxseg6_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13221,11 +13419,11 @@ define void @test_vsuxseg6_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13241,11 +13439,11 @@ define void @test_vsuxseg6_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13258,11 +13456,11 @@ define void @test_vsuxseg6_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13278,11 +13476,11 @@ define void @test_vsuxseg6_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13295,11 +13493,11 @@ define void @test_vsuxseg6_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13315,11 +13513,11 @@ define void @test_vsuxseg6_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13332,11 +13530,11 @@ define void @test_vsuxseg6_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13352,12 +13550,12 @@ define void @test_vsuxseg7_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13370,12 +13568,12 @@ define void @test_vsuxseg7_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13391,12 +13589,12 @@ define void @test_vsuxseg7_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13409,12 +13607,12 @@ define void @test_vsuxseg7_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13430,12 +13628,12 @@ define void @test_vsuxseg7_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13448,12 +13646,12 @@ define void @test_vsuxseg7_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13469,12 +13667,12 @@ define void @test_vsuxseg7_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13487,12 +13685,12 @@ define void @test_vsuxseg7_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13508,13 +13706,13 @@ define void @test_vsuxseg8_nxv2f32_nxv2i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13527,13 +13725,13 @@ define void @test_vsuxseg8_mask_nxv2f32_nxv2i32( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13549,13 +13747,13 @@ define void @test_vsuxseg8_nxv2f32_nxv2i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13568,13 +13766,13 @@ define void @test_vsuxseg8_mask_nxv2f32_nxv2i8( %val, float* ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13590,13 +13788,13 @@ define void @test_vsuxseg8_nxv2f32_nxv2i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13609,13 +13807,13 @@ define void @test_vsuxseg8_mask_nxv2f32_nxv2i16( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13631,13 +13829,13 @@ define void @test_vsuxseg8_nxv2f32_nxv2i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -13650,13 +13848,13 @@ define void @test_vsuxseg8_mask_nxv2f32_nxv2i64( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -13671,6 +13869,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13684,6 +13883,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f16_nxv1i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13700,6 +13900,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13713,6 +13914,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f16_nxv1i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13729,6 +13931,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13742,6 +13945,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f16_nxv1i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13758,6 +13962,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13771,6 +13976,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f16_nxv1i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu @@ -13788,8 +13994,8 @@ define void @test_vsuxseg3_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13802,8 +14008,8 @@ define void @test_vsuxseg3_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13819,8 +14025,8 @@ define void @test_vsuxseg3_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13833,8 +14039,8 @@ define void @test_vsuxseg3_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13850,8 +14056,8 @@ define void @test_vsuxseg3_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13864,8 +14070,8 @@ define void @test_vsuxseg3_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13881,8 +14087,8 @@ define void @test_vsuxseg3_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13895,8 +14101,8 @@ define void @test_vsuxseg3_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13912,9 +14118,9 @@ define void @test_vsuxseg4_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13927,9 +14133,9 @@ define void @test_vsuxseg4_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13945,9 +14151,9 @@ define void @test_vsuxseg4_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13960,9 +14166,9 @@ define void @test_vsuxseg4_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -13978,9 +14184,9 @@ define void @test_vsuxseg4_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -13993,9 +14199,9 @@ define void @test_vsuxseg4_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14011,9 +14217,9 @@ define void @test_vsuxseg4_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14026,9 +14232,9 @@ define void @test_vsuxseg4_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14044,10 +14250,10 @@ define void @test_vsuxseg5_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14060,10 +14266,10 @@ define void @test_vsuxseg5_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14079,10 +14285,10 @@ define void @test_vsuxseg5_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14095,10 +14301,10 @@ define void @test_vsuxseg5_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14114,10 +14320,10 @@ define void @test_vsuxseg5_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14130,10 +14336,10 @@ define void @test_vsuxseg5_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14149,10 +14355,10 @@ define void @test_vsuxseg5_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14165,10 +14371,10 @@ define void @test_vsuxseg5_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14184,11 +14390,11 @@ define void @test_vsuxseg6_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14201,11 +14407,11 @@ define void @test_vsuxseg6_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14221,11 +14427,11 @@ define void @test_vsuxseg6_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14238,11 +14444,11 @@ define void @test_vsuxseg6_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14258,11 +14464,11 @@ define void @test_vsuxseg6_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14275,11 +14481,11 @@ define void @test_vsuxseg6_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14295,11 +14501,11 @@ define void @test_vsuxseg6_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14312,11 +14518,11 @@ define void @test_vsuxseg6_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14332,12 +14538,12 @@ define void @test_vsuxseg7_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14350,12 +14556,12 @@ define void @test_vsuxseg7_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14371,12 +14577,12 @@ define void @test_vsuxseg7_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14389,12 +14595,12 @@ define void @test_vsuxseg7_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14410,12 +14616,12 @@ define void @test_vsuxseg7_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14428,12 +14634,12 @@ define void @test_vsuxseg7_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14449,12 +14655,12 @@ define void @test_vsuxseg7_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14467,12 +14673,12 @@ define void @test_vsuxseg7_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14488,13 +14694,13 @@ define void @test_vsuxseg8_nxv1f16_nxv1i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14507,13 +14713,13 @@ define void @test_vsuxseg8_mask_nxv1f16_nxv1i64( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14529,13 +14735,13 @@ define void @test_vsuxseg8_nxv1f16_nxv1i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14548,13 +14754,13 @@ define void @test_vsuxseg8_mask_nxv1f16_nxv1i32( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14570,13 +14776,13 @@ define void @test_vsuxseg8_nxv1f16_nxv1i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14589,13 +14795,13 @@ define void @test_vsuxseg8_mask_nxv1f16_nxv1i16( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14611,13 +14817,13 @@ define void @test_vsuxseg8_nxv1f16_nxv1i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14630,13 +14836,13 @@ define void @test_vsuxseg8_mask_nxv1f16_nxv1i8( %val, half* % ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14651,6 +14857,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14664,6 +14871,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f32_nxv1i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14680,6 +14888,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14693,6 +14902,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f32_nxv1i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14709,6 +14919,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14722,6 +14933,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f32_nxv1i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14738,6 +14950,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14751,6 +14964,7 @@ entry: define void @test_vsuxseg2_mask_nxv1f32_nxv1i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu @@ -14768,8 +14982,8 @@ define void @test_vsuxseg3_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14782,8 +14996,8 @@ define void @test_vsuxseg3_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14799,8 +15013,8 @@ define void @test_vsuxseg3_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14813,8 +15027,8 @@ define void @test_vsuxseg3_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14830,8 +15044,8 @@ define void @test_vsuxseg3_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14844,8 +15058,8 @@ define void @test_vsuxseg3_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14861,8 +15075,8 @@ define void @test_vsuxseg3_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14875,8 +15089,8 @@ define void @test_vsuxseg3_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14892,9 +15106,9 @@ define void @test_vsuxseg4_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14907,9 +15121,9 @@ define void @test_vsuxseg4_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14925,9 +15139,9 @@ define void @test_vsuxseg4_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14940,9 +15154,9 @@ define void @test_vsuxseg4_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14958,9 +15172,9 @@ define void @test_vsuxseg4_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -14973,9 +15187,9 @@ define void @test_vsuxseg4_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -14991,9 +15205,9 @@ define void @test_vsuxseg4_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15006,9 +15220,9 @@ define void @test_vsuxseg4_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15024,10 +15238,10 @@ define void @test_vsuxseg5_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15040,10 +15254,10 @@ define void @test_vsuxseg5_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15059,10 +15273,10 @@ define void @test_vsuxseg5_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15075,10 +15289,10 @@ define void @test_vsuxseg5_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15094,10 +15308,10 @@ define void @test_vsuxseg5_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15110,10 +15324,10 @@ define void @test_vsuxseg5_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15129,10 +15343,10 @@ define void @test_vsuxseg5_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15145,10 +15359,10 @@ define void @test_vsuxseg5_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15164,11 +15378,11 @@ define void @test_vsuxseg6_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15181,11 +15395,11 @@ define void @test_vsuxseg6_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15201,11 +15415,11 @@ define void @test_vsuxseg6_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15218,11 +15432,11 @@ define void @test_vsuxseg6_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15238,11 +15452,11 @@ define void @test_vsuxseg6_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15255,11 +15469,11 @@ define void @test_vsuxseg6_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15275,11 +15489,11 @@ define void @test_vsuxseg6_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15292,11 +15506,11 @@ define void @test_vsuxseg6_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15312,12 +15526,12 @@ define void @test_vsuxseg7_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15330,12 +15544,12 @@ define void @test_vsuxseg7_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15351,12 +15565,12 @@ define void @test_vsuxseg7_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15369,12 +15583,12 @@ define void @test_vsuxseg7_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15390,12 +15604,12 @@ define void @test_vsuxseg7_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15408,12 +15622,12 @@ define void @test_vsuxseg7_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15429,12 +15643,12 @@ define void @test_vsuxseg7_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15447,12 +15661,12 @@ define void @test_vsuxseg7_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15468,13 +15682,13 @@ define void @test_vsuxseg8_nxv1f32_nxv1i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15487,13 +15701,13 @@ define void @test_vsuxseg8_mask_nxv1f32_nxv1i64( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15509,13 +15723,13 @@ define void @test_vsuxseg8_nxv1f32_nxv1i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15528,13 +15742,13 @@ define void @test_vsuxseg8_mask_nxv1f32_nxv1i32( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15550,13 +15764,13 @@ define void @test_vsuxseg8_nxv1f32_nxv1i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15569,13 +15783,13 @@ define void @test_vsuxseg8_mask_nxv1f32_nxv1i16( %val, float ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15591,13 +15805,13 @@ define void @test_vsuxseg8_nxv1f32_nxv1i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -15610,13 +15824,13 @@ define void @test_vsuxseg8_mask_nxv1f32_nxv1i8( %val, float* ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -15631,6 +15845,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15644,6 +15859,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f16_nxv8i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15660,6 +15876,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15673,6 +15890,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f16_nxv8i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15689,6 +15907,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16 @@ -15701,6 +15920,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f16_nxv8i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16, v0.t @@ -15716,6 +15936,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12 @@ -15728,6 +15949,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t @@ -15744,8 +15966,8 @@ define void @test_vsuxseg3_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15758,8 +15980,8 @@ define void @test_vsuxseg3_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15775,8 +15997,8 @@ define void @test_vsuxseg3_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15789,8 +16011,8 @@ define void @test_vsuxseg3_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15805,6 +16027,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15818,6 +16041,7 @@ entry: define void @test_vsuxseg3_mask_nxv8f16_nxv8i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu @@ -15834,11 +16058,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, i64 %vl) @@ -15848,11 +16072,11 @@ entry: define void @test_vsuxseg3_mask_nxv8f16_nxv8i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -15866,9 +16090,9 @@ define void @test_vsuxseg4_nxv8f16_nxv8i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15881,9 +16105,9 @@ define void @test_vsuxseg4_mask_nxv8f16_nxv8i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15899,9 +16123,9 @@ define void @test_vsuxseg4_nxv8f16_nxv8i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -15914,9 +16138,9 @@ define void @test_vsuxseg4_mask_nxv8f16_nxv8i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -15931,6 +16155,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv8f16.nxv8i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -15945,6 +16170,7 @@ entry: define void @test_vsuxseg4_mask_nxv8f16_nxv8i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vmv2r.v v12, v8 ; CHECK-NEXT: vmv2r.v v14, v8 @@ -15963,9 +16189,9 @@ define void @test_vsuxseg4_nxv8f16_nxv8i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -15978,9 +16204,9 @@ define void @test_vsuxseg4_mask_nxv8f16_nxv8i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -15995,6 +16221,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16008,6 +16235,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f32_nxv8i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16024,6 +16252,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16037,6 +16266,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f32_nxv8i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16053,6 +16283,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16 @@ -16065,6 +16296,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f32_nxv8i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v16, v0.t @@ -16080,6 +16312,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16093,6 +16326,7 @@ entry: define void @test_vsuxseg2_mask_nxv8f32_nxv8i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu @@ -16109,6 +16343,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i32(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16122,6 +16357,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f64_nxv2i32( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16138,6 +16374,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i8(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16151,6 +16388,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f64_nxv2i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16167,6 +16405,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i16(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16180,6 +16419,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f64_nxv2i16( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16196,6 +16436,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i64(, %val, double* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16209,6 +16450,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f64_nxv2i64( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu @@ -16226,8 +16468,8 @@ define void @test_vsuxseg3_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16240,8 +16482,8 @@ define void @test_vsuxseg3_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16257,8 +16499,8 @@ define void @test_vsuxseg3_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16271,8 +16513,8 @@ define void @test_vsuxseg3_mask_nxv2f64_nxv2i8( %val, doubl ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16288,8 +16530,8 @@ define void @test_vsuxseg3_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16302,8 +16544,8 @@ define void @test_vsuxseg3_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16319,8 +16561,8 @@ define void @test_vsuxseg3_nxv2f64_nxv2i64( %val, double* % ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16333,8 +16575,8 @@ define void @test_vsuxseg3_mask_nxv2f64_nxv2i64( %val, doub ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16350,9 +16592,9 @@ define void @test_vsuxseg4_nxv2f64_nxv2i32( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16365,9 +16607,9 @@ define void @test_vsuxseg4_mask_nxv2f64_nxv2i32( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16383,9 +16625,9 @@ define void @test_vsuxseg4_nxv2f64_nxv2i8( %val, double* %b ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16397,10 +16639,10 @@ entry: define void @test_vsuxseg4_mask_nxv2f64_nxv2i8( %val, double* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16416,9 +16658,9 @@ define void @test_vsuxseg4_nxv2f64_nxv2i16( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16431,9 +16673,9 @@ define void @test_vsuxseg4_mask_nxv2f64_nxv2i16( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16449,9 +16691,9 @@ define void @test_vsuxseg4_nxv2f64_nxv2i64( %val, double* % ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16464,9 +16706,9 @@ define void @test_vsuxseg4_mask_nxv2f64_nxv2i64( %val, doub ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16481,6 +16723,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10 @@ -16493,6 +16736,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t @@ -16508,6 +16752,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16521,6 +16766,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f16_nxv4i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16537,6 +16783,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12 @@ -16549,6 +16796,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12, v0.t @@ -16564,6 +16812,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16577,6 +16826,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f16_nxv4i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16593,11 +16843,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, i64 %vl) @@ -16607,11 +16857,11 @@ entry: define void @test_vsuxseg3_mask_nxv4f16_nxv4i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i32( %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -16625,8 +16875,8 @@ define void @test_vsuxseg3_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16639,8 +16889,8 @@ define void @test_vsuxseg3_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16655,6 +16905,7 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16668,6 +16919,7 @@ entry: define void @test_vsuxseg3_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu @@ -16685,8 +16937,8 @@ define void @test_vsuxseg3_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16699,8 +16951,8 @@ define void @test_vsuxseg3_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16716,9 +16968,9 @@ define void @test_vsuxseg4_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16731,9 +16983,9 @@ define void @test_vsuxseg4_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16749,9 +17001,9 @@ define void @test_vsuxseg4_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16764,9 +17016,9 @@ define void @test_vsuxseg4_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16781,6 +17033,7 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -16795,6 +17048,7 @@ entry: define void @test_vsuxseg4_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9_v10_v11 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vmv1r.v v10, v8 ; CHECK-NEXT: vmv1r.v v11, v8 @@ -16813,9 +17067,9 @@ define void @test_vsuxseg4_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16828,9 +17082,9 @@ define void @test_vsuxseg4_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16846,10 +17100,10 @@ define void @test_vsuxseg5_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -16862,10 +17116,10 @@ define void @test_vsuxseg5_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -16881,10 +17135,10 @@ define void @test_vsuxseg5_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16897,10 +17151,10 @@ define void @test_vsuxseg5_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16915,13 +17169,13 @@ declare void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei64.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg5ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, half* %base, %index, i64 %vl) @@ -16931,13 +17185,13 @@ entry: define void @test_vsuxseg5_mask_nxv4f16_nxv4i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu -; CHECK-NEXT: vsuxseg5ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg5ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i64( %val, %val, %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -16951,10 +17205,10 @@ define void @test_vsuxseg5_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -16967,10 +17221,10 @@ define void @test_vsuxseg5_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -16986,11 +17240,11 @@ define void @test_vsuxseg6_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17003,11 +17257,11 @@ define void @test_vsuxseg6_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17023,11 +17277,11 @@ define void @test_vsuxseg6_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17040,11 +17294,11 @@ define void @test_vsuxseg6_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17060,11 +17314,11 @@ define void @test_vsuxseg6_nxv4f16_nxv4i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -17077,11 +17331,11 @@ define void @test_vsuxseg6_mask_nxv4f16_nxv4i64( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -17097,11 +17351,11 @@ define void @test_vsuxseg6_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17114,11 +17368,11 @@ define void @test_vsuxseg6_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17134,12 +17388,12 @@ define void @test_vsuxseg7_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17152,12 +17406,12 @@ define void @test_vsuxseg7_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17173,12 +17427,12 @@ define void @test_vsuxseg7_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17191,12 +17445,12 @@ define void @test_vsuxseg7_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17212,12 +17466,12 @@ define void @test_vsuxseg7_nxv4f16_nxv4i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -17230,12 +17484,12 @@ define void @test_vsuxseg7_mask_nxv4f16_nxv4i64( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -17251,12 +17505,12 @@ define void @test_vsuxseg7_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17269,12 +17523,12 @@ define void @test_vsuxseg7_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17290,13 +17544,13 @@ define void @test_vsuxseg8_nxv4f16_nxv4i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17309,13 +17563,13 @@ define void @test_vsuxseg8_mask_nxv4f16_nxv4i32( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17331,13 +17585,13 @@ define void @test_vsuxseg8_nxv4f16_nxv4i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17350,13 +17604,13 @@ define void @test_vsuxseg8_mask_nxv4f16_nxv4i8( %val, half* % ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17372,13 +17626,13 @@ define void @test_vsuxseg8_nxv4f16_nxv4i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -17391,13 +17645,13 @@ define void @test_vsuxseg8_mask_nxv4f16_nxv4i64( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 -; CHECK-NEXT: vmv1r.v v20, v8 -; CHECK-NEXT: vmv1r.v v21, v8 -; CHECK-NEXT: vmv1r.v v22, v8 -; CHECK-NEXT: vmv1r.v v23, v8 +; CHECK-NEXT: vmv1r.v v17, v16 +; CHECK-NEXT: vmv1r.v v18, v16 +; CHECK-NEXT: vmv1r.v v19, v16 +; CHECK-NEXT: vmv1r.v v20, v16 +; CHECK-NEXT: vmv1r.v v21, v16 +; CHECK-NEXT: vmv1r.v v22, v16 +; CHECK-NEXT: vmv1r.v v23, v16 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -17413,13 +17667,13 @@ define void @test_vsuxseg8_nxv4f16_nxv4i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17432,13 +17686,13 @@ define void @test_vsuxseg8_mask_nxv4f16_nxv4i16( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17453,6 +17707,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i32(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17466,6 +17721,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f16_nxv2i32( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17482,6 +17738,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17495,6 +17752,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f16_nxv2i8( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17511,6 +17769,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i16(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17524,6 +17783,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f16_nxv2i16( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9 ; CHECK-NEXT: vmv1r.v v10, v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu @@ -17540,6 +17800,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10 @@ -17552,6 +17813,7 @@ entry: define void @test_vsuxseg2_mask_nxv2f16_nxv2i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8 killed $v8 def $v8_v9 ; CHECK-NEXT: vmv1r.v v9, v8 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v10, v0.t @@ -17568,8 +17830,8 @@ define void @test_vsuxseg3_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17582,8 +17844,8 @@ define void @test_vsuxseg3_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17599,8 +17861,8 @@ define void @test_vsuxseg3_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17613,8 +17875,8 @@ define void @test_vsuxseg3_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17630,8 +17892,8 @@ define void @test_vsuxseg3_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17644,8 +17906,8 @@ define void @test_vsuxseg3_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17660,11 +17922,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i64(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12 +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv2f16.nxv2i64( %val, %val, %val, half* %base, %index, i64 %vl) @@ -17674,11 +17936,11 @@ entry: define void @test_vsuxseg3_mask_nxv2f16_nxv2i64( %val, half* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v9, v8 -; CHECK-NEXT: vmv2r.v v12, v10 -; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v12, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v12, v0.t +; CHECK-NEXT: vsuxseg3ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i64( %val, %val, %val, half* %base, %index, %mask, i64 %vl) @@ -17692,9 +17954,9 @@ define void @test_vsuxseg4_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17707,9 +17969,9 @@ define void @test_vsuxseg4_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17724,10 +17986,10 @@ declare void @llvm.riscv.vsuxseg4.mask.nxv2f16.nxv2i8(, %val, half* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v10, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17740,9 +18002,9 @@ define void @test_vsuxseg4_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17758,9 +18020,9 @@ define void @test_vsuxseg4_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17773,9 +18035,9 @@ define void @test_vsuxseg4_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17791,9 +18053,9 @@ define void @test_vsuxseg4_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17806,9 +18068,9 @@ define void @test_vsuxseg4_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17824,10 +18086,10 @@ define void @test_vsuxseg5_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17840,10 +18102,10 @@ define void @test_vsuxseg5_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17859,10 +18121,10 @@ define void @test_vsuxseg5_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17875,10 +18137,10 @@ define void @test_vsuxseg5_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17894,10 +18156,10 @@ define void @test_vsuxseg5_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17910,10 +18172,10 @@ define void @test_vsuxseg5_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -17929,10 +18191,10 @@ define void @test_vsuxseg5_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -17945,10 +18207,10 @@ define void @test_vsuxseg5_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg5ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -17964,11 +18226,11 @@ define void @test_vsuxseg6_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -17981,11 +18243,11 @@ define void @test_vsuxseg6_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18001,11 +18263,11 @@ define void @test_vsuxseg6_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18018,11 +18280,11 @@ define void @test_vsuxseg6_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18038,11 +18300,11 @@ define void @test_vsuxseg6_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18055,11 +18317,11 @@ define void @test_vsuxseg6_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18075,11 +18337,11 @@ define void @test_vsuxseg6_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18092,11 +18354,11 @@ define void @test_vsuxseg6_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg6ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18112,12 +18374,12 @@ define void @test_vsuxseg7_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18130,12 +18392,12 @@ define void @test_vsuxseg7_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18151,12 +18413,12 @@ define void @test_vsuxseg7_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18169,12 +18431,12 @@ define void @test_vsuxseg7_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18190,12 +18452,12 @@ define void @test_vsuxseg7_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18208,12 +18470,12 @@ define void @test_vsuxseg7_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18229,12 +18491,12 @@ define void @test_vsuxseg7_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18247,12 +18509,12 @@ define void @test_vsuxseg7_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg7ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18268,13 +18530,13 @@ define void @test_vsuxseg8_nxv2f16_nxv2i32( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18287,13 +18549,13 @@ define void @test_vsuxseg8_mask_nxv2f16_nxv2i32( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18309,13 +18571,13 @@ define void @test_vsuxseg8_nxv2f16_nxv2i8( %val, half* %base, ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18328,13 +18590,13 @@ define void @test_vsuxseg8_mask_nxv2f16_nxv2i8( %val, half* % ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18350,13 +18612,13 @@ define void @test_vsuxseg8_nxv2f16_nxv2i16( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9 ; CHECK-NEXT: ret @@ -18369,13 +18631,13 @@ define void @test_vsuxseg8_mask_nxv2f16_nxv2i16( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v10, v8 -; CHECK-NEXT: vmv1r.v v11, v8 -; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 +; CHECK-NEXT: vmv1r.v v11, v10 +; CHECK-NEXT: vmv1r.v v12, v10 +; CHECK-NEXT: vmv1r.v v13, v10 +; CHECK-NEXT: vmv1r.v v14, v10 +; CHECK-NEXT: vmv1r.v v15, v10 +; CHECK-NEXT: vmv1r.v v16, v10 +; CHECK-NEXT: vmv1r.v v17, v10 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t ; CHECK-NEXT: ret @@ -18391,13 +18653,13 @@ define void @test_vsuxseg8_nxv2f16_nxv2i64( %val, half* %base ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18410,13 +18672,13 @@ define void @test_vsuxseg8_mask_nxv2f16_nxv2i64( %val, half* ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv1r.v v12, v8 -; CHECK-NEXT: vmv1r.v v13, v8 -; CHECK-NEXT: vmv1r.v v14, v8 -; CHECK-NEXT: vmv1r.v v15, v8 -; CHECK-NEXT: vmv1r.v v16, v8 -; CHECK-NEXT: vmv1r.v v17, v8 -; CHECK-NEXT: vmv1r.v v18, v8 -; CHECK-NEXT: vmv1r.v v19, v8 +; CHECK-NEXT: vmv1r.v v13, v12 +; CHECK-NEXT: vmv1r.v v14, v12 +; CHECK-NEXT: vmv1r.v v15, v12 +; CHECK-NEXT: vmv1r.v v16, v12 +; CHECK-NEXT: vmv1r.v v17, v12 +; CHECK-NEXT: vmv1r.v v18, v12 +; CHECK-NEXT: vmv1r.v v19, v12 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu ; CHECK-NEXT: vsuxseg8ei64.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18431,6 +18693,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i32(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18444,6 +18707,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f32_nxv4i32( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18460,6 +18724,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i8(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18473,6 +18738,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f32_nxv4i8( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18489,6 +18755,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12 @@ -18501,6 +18768,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f32_nxv4i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12, v0.t @@ -18516,6 +18784,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i16(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18529,6 +18798,7 @@ entry: define void @test_vsuxseg2_mask_nxv4f32_nxv4i16( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2 ; CHECK-NEXT: vmv1r.v v12, v10 ; CHECK-NEXT: vmv2r.v v10, v8 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu @@ -18546,8 +18816,8 @@ define void @test_vsuxseg3_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18560,8 +18830,8 @@ define void @test_vsuxseg3_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18577,8 +18847,8 @@ define void @test_vsuxseg3_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18591,8 +18861,8 @@ define void @test_vsuxseg3_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18607,11 +18877,11 @@ declare void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i64(, %val, float* %base, %index, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v16 +; CHECK-NEXT: vsuxseg3ei64.v v16, (a0), v12 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i64( %val, %val, %val, float* %base, %index, i64 %vl) @@ -18621,11 +18891,11 @@ entry: define void @test_vsuxseg3_mask_nxv4f32_nxv4i64( %val, float* %base, %index, %mask, i64 %vl) { ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry -; CHECK-NEXT: vmv2r.v v10, v8 -; CHECK-NEXT: vmv4r.v v16, v12 -; CHECK-NEXT: vmv2r.v v12, v8 +; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu -; CHECK-NEXT: vsuxseg3ei64.v v8, (a0), v16, v0.t +; CHECK-NEXT: vsuxseg3ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i64( %val, %val, %val, float* %base, %index, %mask, i64 %vl) @@ -18639,8 +18909,8 @@ define void @test_vsuxseg3_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18653,8 +18923,8 @@ define void @test_vsuxseg3_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18670,9 +18940,9 @@ define void @test_vsuxseg4_nxv4f32_nxv4i32( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18685,9 +18955,9 @@ define void @test_vsuxseg4_mask_nxv4f32_nxv4i32( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18703,9 +18973,9 @@ define void @test_vsuxseg4_nxv4f32_nxv4i8( %val, float* %bas ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18718,9 +18988,9 @@ define void @test_vsuxseg4_mask_nxv4f32_nxv4i8( %val, float* ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret @@ -18736,9 +19006,9 @@ define void @test_vsuxseg4_nxv4f32_nxv4i64( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v16, (a0), v12 ; CHECK-NEXT: ret @@ -18751,9 +19021,9 @@ define void @test_vsuxseg4_mask_nxv4f32_nxv4i64( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 -; CHECK-NEXT: vmv2r.v v20, v8 -; CHECK-NEXT: vmv2r.v v22, v8 +; CHECK-NEXT: vmv2r.v v18, v16 +; CHECK-NEXT: vmv2r.v v20, v16 +; CHECK-NEXT: vmv2r.v v22, v16 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei64.v v16, (a0), v12, v0.t ; CHECK-NEXT: ret @@ -18769,9 +19039,9 @@ define void @test_vsuxseg4_nxv4f32_nxv4i16( %val, float* %ba ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10 ; CHECK-NEXT: ret @@ -18784,9 +19054,9 @@ define void @test_vsuxseg4_mask_nxv4f32_nxv4i16( %val, float ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv2r.v v12, v8 -; CHECK-NEXT: vmv2r.v v14, v8 -; CHECK-NEXT: vmv2r.v v16, v8 -; CHECK-NEXT: vmv2r.v v18, v8 +; CHECK-NEXT: vmv2r.v v14, v12 +; CHECK-NEXT: vmv2r.v v16, v12 +; CHECK-NEXT: vmv2r.v v18, v12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t ; CHECK-NEXT: ret diff --git a/llvm/test/CodeGen/RISCV/rvv/zvlsseg-zero-vl.ll b/llvm/test/CodeGen/RISCV/rvv/zvlsseg-zero-vl.ll index 91b1d782440df..07998a40711e6 100644 --- a/llvm/test/CodeGen/RISCV/rvv/zvlsseg-zero-vl.ll +++ b/llvm/test/CodeGen/RISCV/rvv/zvlsseg-zero-vl.ll @@ -15,6 +15,7 @@ define @test_vlseg2_mask_nxv16i16(i16* %base, ,} @llvm.riscv.vlseg2.nxv16i16( undef, undef, i16* %base, i64 0) @@ -34,6 +35,7 @@ define @test_vlsseg2_mask_nxv16i16(i16* %base, i64 %offset, ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16( undef, undef, i16* %base, i64 %offset, i64 0) @@ -92,6 +94,7 @@ define @test_vlseg2ff_nxv16i16(i16* %base, i64* %outvl) { ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a1) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv16i16( undef, undef, i16* %base, i64 0) @@ -109,6 +112,7 @@ define @test_vlseg2ff_mask_nxv16i16( %val ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a1) +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16( %val, %val, i16* %base, %mask, i64 0, i64 1) @@ -124,6 +128,7 @@ declare void @llvm.riscv.vsseg2.mask.nxv16i16(, %val, i16* %base) { ; CHECK-LABEL: test_vsseg2_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0) @@ -136,6 +141,7 @@ entry: define void @test_vsseg2_mask_nxv16i16( %val, i16* %base, %mask) { ; CHECK-LABEL: test_vsseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t @@ -151,6 +157,7 @@ declare void @llvm.riscv.vssseg2.mask.nxv16i16(, %val, i16* %base, i64 %offset) { ; CHECK-LABEL: test_vssseg2_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 @@ -163,6 +170,7 @@ entry: define void @test_vssseg2_mask_nxv16i16( %val, i16* %base, i64 %offset, %mask) { ; CHECK-LABEL: test_vssseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t @@ -178,6 +186,7 @@ declare void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i16(, %val, i16* %base, %index) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu @@ -191,6 +200,7 @@ entry: define void @test_vsoxseg2_mask_nxv16i16_nxv16i16( %val, i16* %base, %index, %mask) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu @@ -207,6 +217,7 @@ declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(, %val, i16* %base, %index) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu @@ -220,6 +231,7 @@ entry: define void @test_vsuxseg2_mask_nxv16i16_nxv16i16( %val, i16* %base, %index, %mask) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry +; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu