diff --git a/llvm/lib/Target/RISCV/RISCVSchedAndes45.td b/llvm/lib/Target/RISCV/RISCVSchedAndes45.td index 8cf15fa26e22d..d5f523711100a 100644 --- a/llvm/lib/Target/RISCV/RISCVSchedAndes45.td +++ b/llvm/lib/Target/RISCV/RISCVSchedAndes45.td @@ -8,7 +8,20 @@ //===----------------------------------------------------------------------===// -// FIXME: Implement sheduling model for V and other extensions. +// The worst case LMUL is the largest LMUL. +class Andes45IsWorstCaseMX MxList> { + defvar LLMUL = LargestLMUL.r; + bit c = !eq(mx, LLMUL); +} + +// The worst case is the largest LMUL with the smallest SEW. +class Andes45IsWorstCaseMXSEW MxList, + bit isF = 0> { + defvar LLMUL = LargestLMUL.r; + defvar SSEW = SmallestSEW.r; + bit c = !and(!eq(mx, LLMUL), !eq(sew, SSEW)); +} + def Andes45Model : SchedMachineModel { let MicroOpBufferSize = 0; // Andes45 is in-order processor let IssueWidth = 2; // 2 micro-ops dispatched per cycle @@ -32,6 +45,15 @@ let SchedModel = Andes45Model in { // - Floating Point Divide / SQRT Unit (FDIV) // - Floating Point Move Unit (FMV) // - Floating Point Misc Unit (FMISC) +// +// Andes 45 series VPU +// - Vector Arithmetic and Logical Unit (VALU) +// - Vector Multiply Accumulate Unit (VMAC) +// - Vector Divide Unit (VDIV) +// - Vector Permutation Unit (VPERMUT) +// - Vector Mask Unit (VMASK) +// - Vector Floating-Point Miscellaneous Unit (VFMIS) +// - Vector Floating-Point Divide Unit (VFDIV) //===----------------------------------------------------------------------===// let BufferSize = 0 in { @@ -44,6 +66,15 @@ def Andes45FMAC : ProcResource<1>; def Andes45FDIV : ProcResource<1>; def Andes45FMV : ProcResource<1>; def Andes45FMISC : ProcResource<1>; + +def Andes45VALU : ProcResource<1>; +def Andes45VMAC : ProcResource<1>; +def Andes45VFMIS : ProcResource<1>; +def Andes45VPERMUT : ProcResource<1>; +def Andes45VDIV : ProcResource<1>; +def Andes45VFDIV : ProcResource<1>; +def Andes45VMASK : ProcResource<1>; +def Andes45VLSU : ProcResource<1>; } // Integer arithmetic and logic @@ -333,10 +364,526 @@ def : ReadAdvance; def : ReadAdvance; def : ReadAdvance; +// RVV Scheduling + +// 6. Configuration-Setting Instructions +def : WriteRes; +def : WriteRes; +def : WriteRes; + +// 7. Vector Loads and Stores +foreach mx = SchedMxList in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + // Unit-stride loads and stores + defm "" : LMULWriteResMX<"WriteVLDE", [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVLDFF", [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSTE", [Andes45VLSU], mx, IsWorstCase>; + + // Mask loads and stores + defm "" : LMULWriteResMX<"WriteVLDM", [Andes45VLSU], mx, IsWorstCase=!eq(mx, "M1")>; + defm "" : LMULWriteResMX<"WriteVSTM", [Andes45VLSU], mx, IsWorstCase=!eq(mx, "M1")>; + + // Strided and indexed loads and stores + foreach eew = [8, 16, 32, 64] in { + defm "" : LMULWriteResMX<"WriteVLDS" # eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVLDUX" # eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVLDOX" # eew, [Andes45VLSU], mx, IsWorstCase>; + + defm "" : LMULWriteResMX<"WriteVSTS" # eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSTUX" # eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSTOX" # eew, [Andes45VLSU], mx, IsWorstCase>; + } +} + +// Segmented loads and stores +foreach mx = SchedMxList in { + foreach nf=2-8 in { + foreach eew = [8, 16, 32, 64] in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + // Unit-stride segmented + defm "" : LMULWriteResMX<"WriteVLSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVLSEGFF" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + + // Strided/indexed segmented + defm "" : LMULWriteResMX<"WriteVLSSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSSSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + + // Indexed segmented + defm "" : LMULWriteResMX<"WriteVLOXSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVLUXSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSUXSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSOXSEG" # nf # "e" #eew, [Andes45VLSU], mx, IsWorstCase>; + } + } +} + +// Whole register move/load/store +foreach LMul = [1, 2, 4, 8] in { + def : WriteRes("WriteVLD" # LMul # "R"), [Andes45VLSU]>; + def : WriteRes("WriteVST" # LMul # "R"), [Andes45VLSU]>; + + def : WriteRes("WriteVMov" # LMul # "V"), [Andes45VPERMUT]>; +} + +// 11. Vector Integer Arithmetic Instructions +foreach mx = SchedMxList in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVIALUV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIALUX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIALUI", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVExtV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICALUV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICALUX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICALUI", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICALUMV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICALUMX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICALUMI", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICmpV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICmpX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVICmpI", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMinMaxV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMinMaxX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMergeV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMergeX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMergeI", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMovV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMovX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMovI", [Andes45VALU], mx, IsWorstCase>; + + defm "" : LMULWriteResMX<"WriteVShiftV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVShiftX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVShiftI", [Andes45VALU], mx, IsWorstCase>; + + defm "" : LMULWriteResMX<"WriteVIMulV", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMulX", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMulAddV", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIMulAddX", [Andes45VMAC], mx, IsWorstCase>; +} + +// Widening +foreach mx = SchedMxListW in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVIWALUV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIWALUX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIWALUI", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIWMulV", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIWMulX", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIWMulAddV", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIWMulAddX", [Andes45VMAC], mx, IsWorstCase>; +} + +// Vector Integer Division and Remainder +foreach mx = SchedMxList in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVIDivV", [Andes45VDIV], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVIDivX", [Andes45VDIV], mx, sew, IsWorstCase>; + } +} + +// Narrowing Shift +foreach mx = SchedMxListW in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVNShiftV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVNShiftX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVNShiftI", [Andes45VALU], mx, IsWorstCase>; +} + +// 12. Vector Fixed-Point Arithmetic Instructions +foreach mx = SchedMxList in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVSALUV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSALUX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSALUI", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVAALUV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVAALUX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSMulV", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSMulX", [Andes45VMAC], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSShiftV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSShiftX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSShiftI", [Andes45VALU], mx, IsWorstCase>; +} + +// Narrowing +foreach mx = SchedMxListW in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVNClipV", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVNClipX", [Andes45VALU], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVNClipI", [Andes45VALU], mx, IsWorstCase>; +} + +// 13. Vector Floating-Point Instructions +foreach mx = SchedMxListF in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFALUV", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFALUF", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFMulV", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFMulF", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFMulAddV", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFMulAddF", [Andes45VMAC], mx, sew, IsWorstCase>; + } +} + +foreach mx = SchedMxListF in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFRecpV", [Andes45VFDIV], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFSgnjV", [Andes45VFMIS], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFSgnjF", [Andes45VFMIS], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFMinMaxV", [Andes45VFMIS], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFMinMaxF", [Andes45VFMIS], mx, sew, IsWorstCase>; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFCvtIToFV", [Andes45VFMIS], mx, sew, IsWorstCase>; + } +} + +foreach mx = SchedMxList in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVFCmpV", [Andes45VFMIS], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVFCmpF", [Andes45VFMIS], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVFClassV", [Andes45VFMIS], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVFMergeV", [Andes45VFMIS], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVFMovV", [Andes45VFMIS], mx, IsWorstCase>; + + defm "" : LMULWriteResMX<"WriteVFCvtFToIV", [Andes45VFMIS], mx, IsWorstCase>; +} + +// Widening +foreach mx = SchedMxListW in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFWCvtIToFV", [Andes45VFMIS], mx, sew, IsWorstCase>; + } +} + +foreach mx = SchedMxListFW in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVFWCvtFToIV", [Andes45VFMIS], mx, IsWorstCase>; +} + +foreach mx = SchedMxListFW in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFWALUV", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFWALUF", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFWMulV", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFWMulF", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFWMulAddV", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFWMulAddF", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFWCvtFToFV", [Andes45VFMIS], mx, sew, IsWorstCase>; + } +} + +// Narrowing +foreach mx = SchedMxListW in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVFNCvtFToIV", [Andes45VFMIS], mx, IsWorstCase>; +} + +foreach mx = SchedMxListFW in { + foreach sew = SchedSEWSet.val in { + + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + defm "" : LMULSEWWriteResMXSEW<"WriteVFNCvtIToFV", [Andes45VFMIS], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFNCvtFToFV", [Andes45VFMIS], mx, sew, IsWorstCase>; + } +} + +// Vector Floating-Point Division and Square Root +foreach mx = SchedMxListF in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFDivV", [Andes45VFDIV], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFDivF", [Andes45VFDIV], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFSqrtV", [Andes45VFDIV], mx, sew, IsWorstCase>; + } +} + +// 14. Vector Reduction Operations +foreach mx = SchedMxList in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVIRedV_From", [Andes45VALU], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVIRedMinMaxV_From", [Andes45VALU], mx, sew, IsWorstCase>; + } +} + +foreach mx = SchedMxListWRed in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVIWRedV_From", [Andes45VALU], mx, sew, IsWorstCase>; + } +} + +foreach mx = SchedMxListF in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFRedV_From", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFRedOV_From", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFRedMinMaxV_From", [Andes45VFMIS], mx, sew, IsWorstCase>; + } +} + +foreach mx = SchedMxListFWRed in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + + defm "" : LMULSEWWriteResMXSEW<"WriteVFWRedV_From", [Andes45VMAC], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVFWRedOV_From", [Andes45VMAC], mx, sew, IsWorstCase>; + } +} + +// 15. Vector Mask Instructions +foreach mx = SchedMxList in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVMALUV", [Andes45VMASK], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVMPopV", [Andes45VMASK], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVMFFSV", [Andes45VMASK], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVMSFSV", [Andes45VMASK], mx, IsWorstCase>; + + defm "" : LMULWriteResMX<"WriteVIotaV", [Andes45VMASK], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVIdxV", [Andes45VMASK], mx, IsWorstCase>; +} + +// 16. Vector Permutation Instructions +foreach mx = SchedMxList in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVSlideI", [Andes45VPERMUT], mx, IsWorstCase>; + + defm "" : LMULWriteResMX<"WriteVISlide1X", [Andes45VPERMUT], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVFSlide1F", [Andes45VPERMUT], mx, IsWorstCase>; + + defm "" : LMULWriteResMX<"WriteVSlideUpX", [Andes45VPERMUT], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVSlideDownX", [Andes45VPERMUT], mx, IsWorstCase>; +} + +def : WriteRes; +def : WriteRes; + +def : WriteRes; +def : WriteRes; + +// Gather and Compress +foreach mx = SchedMxList in { + foreach sew = SchedSEWSet.val in { + defvar IsWorstCase = Andes45IsWorstCaseMXSEW.c; + defm "" : LMULSEWWriteResMXSEW<"WriteVRGatherVV", [Andes45VPERMUT], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVRGatherEI16VV", [Andes45VPERMUT], mx, sew, IsWorstCase>; + defm "" : LMULSEWWriteResMXSEW<"WriteVCompressV", [Andes45VPERMUT], mx, sew, IsWorstCase>; + } +} + +foreach mx = SchedMxList in { + defvar IsWorstCase = Andes45IsWorstCaseMX.c; + + defm "" : LMULWriteResMX<"WriteVRGatherVX", [Andes45VPERMUT], mx, IsWorstCase>; + defm "" : LMULWriteResMX<"WriteVRGatherVI", [Andes45VPERMUT], mx, IsWorstCase>; +} + +// Others +def : WriteRes; + +// 6. Configuration-Setting Instructions +def : ReadAdvance; +def : ReadAdvance; + +// 7. Vector Loads and Stores +def : ReadAdvance; +def : ReadAdvance; +defm "" : LMULReadAdvance<"ReadVSTEV", 0>; +defm "" : LMULReadAdvance<"ReadVSTM", 0>; +def : ReadAdvance; +def : ReadAdvance; +defm "" : LMULReadAdvance<"ReadVSTS8V", 0>; +defm "" : LMULReadAdvance<"ReadVSTS16V", 0>; +defm "" : LMULReadAdvance<"ReadVSTS32V", 0>; +defm "" : LMULReadAdvance<"ReadVSTS64V", 0>; +defm "" : LMULReadAdvance<"ReadVLDUXV", 0>; +defm "" : LMULReadAdvance<"ReadVLDOXV", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX8", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX16", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX32", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX64", 0>; +defm "" : LMULReadAdvance<"ReadVSTUXV", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX8V", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX16V", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX32V", 0>; +defm "" : LMULReadAdvance<"ReadVSTUX64V", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX8", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX16", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX32", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX64", 0>; +defm "" : LMULReadAdvance<"ReadVSTOXV", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX8V", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX16V", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX32V", 0>; +defm "" : LMULReadAdvance<"ReadVSTOX64V", 0>; +// LMUL Aware +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; + +// 11. Vector Integer Arithmetic Instructions +defm : LMULReadAdvance<"ReadVIALUV", 0>; +defm : LMULReadAdvance<"ReadVIALUX", 0>; +defm : LMULReadAdvanceW<"ReadVIWALUV", 0>; +defm : LMULReadAdvanceW<"ReadVIWALUX", 0>; +defm : LMULReadAdvance<"ReadVExtV", 0>; +defm : LMULReadAdvance<"ReadVICALUV", 0>; +defm : LMULReadAdvance<"ReadVICALUX", 0>; +defm : LMULReadAdvance<"ReadVShiftV", 0>; +defm : LMULReadAdvance<"ReadVShiftX", 0>; +defm : LMULReadAdvanceW<"ReadVNShiftV", 0>; +defm : LMULReadAdvanceW<"ReadVNShiftX", 0>; +defm : LMULReadAdvance<"ReadVICmpV", 0>; +defm : LMULReadAdvance<"ReadVICmpX", 0>; +defm : LMULReadAdvance<"ReadVIMinMaxV", 0>; +defm : LMULReadAdvance<"ReadVIMinMaxX", 0>; +defm : LMULReadAdvance<"ReadVIMulV", 0>; +defm : LMULReadAdvance<"ReadVIMulX", 0>; +defm : LMULSEWReadAdvance<"ReadVIDivV", 0>; +defm : LMULSEWReadAdvance<"ReadVIDivX", 0>; +defm : LMULReadAdvanceW<"ReadVIWMulV", 0>; +defm : LMULReadAdvanceW<"ReadVIWMulX", 0>; +defm : LMULReadAdvance<"ReadVIMulAddV", 0>; +defm : LMULReadAdvance<"ReadVIMulAddX", 0>; +defm : LMULReadAdvanceW<"ReadVIWMulAddV", 0>; +defm : LMULReadAdvanceW<"ReadVIWMulAddX", 0>; +defm : LMULReadAdvance<"ReadVIMergeV", 0>; +defm : LMULReadAdvance<"ReadVIMergeX", 0>; +defm : LMULReadAdvance<"ReadVIMovV", 0>; +defm : LMULReadAdvance<"ReadVIMovX", 0>; + +// 12. Vector Fixed-Point Arithmetic Instructions +defm "" : LMULReadAdvance<"ReadVSALUV", 0>; +defm "" : LMULReadAdvance<"ReadVSALUX", 0>; +defm "" : LMULReadAdvance<"ReadVAALUV", 0>; +defm "" : LMULReadAdvance<"ReadVAALUX", 0>; +defm "" : LMULReadAdvance<"ReadVSMulV", 0>; +defm "" : LMULReadAdvance<"ReadVSMulX", 0>; +defm "" : LMULReadAdvance<"ReadVSShiftV", 0>; +defm "" : LMULReadAdvance<"ReadVSShiftX", 0>; +defm "" : LMULReadAdvanceW<"ReadVNClipV", 0>; +defm "" : LMULReadAdvanceW<"ReadVNClipX", 0>; + +// 13. Vector Floating-Point Instructions +defm "" : LMULSEWReadAdvanceF<"ReadVFALUV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFALUF", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFWALUV", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFWALUF", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFMulV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFMulF", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFDivV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFDivF", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFWMulV", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFWMulF", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFMulAddV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFMulAddF", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFWMulAddV", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFWMulAddF", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFSqrtV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFRecpV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFMinMaxV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFMinMaxF", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFSgnjV", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFSgnjF", 0>; +defm "" : LMULReadAdvance<"ReadVFCmpV", 0>; +defm "" : LMULReadAdvance<"ReadVFCmpF", 0>; +defm "" : LMULReadAdvance<"ReadVFClassV", 0>; +defm "" : LMULReadAdvance<"ReadVFMergeV", 0>; +defm "" : LMULReadAdvance<"ReadVFMergeF", 0>; +defm "" : LMULReadAdvance<"ReadVFMovF", 0>; +defm "" : LMULSEWReadAdvanceF<"ReadVFCvtIToFV", 0>; +defm "" : LMULReadAdvance<"ReadVFCvtFToIV", 0>; +defm "" : LMULSEWReadAdvanceW<"ReadVFWCvtIToFV", 0>; +defm "" : LMULReadAdvanceFW<"ReadVFWCvtFToIV", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFWCvtFToFV", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFNCvtIToFV", 0>; +defm "" : LMULReadAdvanceW<"ReadVFNCvtFToIV", 0>; +defm "" : LMULSEWReadAdvanceFW<"ReadVFNCvtFToFV", 0>; + +// 14. Vector Reduction Operations +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; + +// 15. Vector Mask Instructions +defm "" : LMULReadAdvance<"ReadVMALUV", 0>; +defm "" : LMULReadAdvance<"ReadVMPopV", 0>; +defm "" : LMULReadAdvance<"ReadVMFFSV", 0>; +defm "" : LMULReadAdvance<"ReadVMSFSV", 0>; +defm "" : LMULReadAdvance<"ReadVIotaV", 0>; + +// 16. Vector Permutation Instructions +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +defm "" : LMULReadAdvance<"ReadVISlideV", 0>; +defm "" : LMULReadAdvance<"ReadVISlideX", 0>; +defm "" : LMULReadAdvance<"ReadVFSlideV", 0>; +defm "" : LMULReadAdvance<"ReadVFSlideF", 0>; +defm "" : LMULSEWReadAdvance<"ReadVRGatherVV_data", 0>; +defm "" : LMULSEWReadAdvance<"ReadVRGatherVV_index", 0>; +defm "" : LMULSEWReadAdvance<"ReadVRGatherEI16VV_data", 0>; +defm "" : LMULSEWReadAdvance<"ReadVRGatherEI16VV_index", 0>; +defm "" : LMULReadAdvance<"ReadVRGatherVX_data", 0>; +defm "" : LMULReadAdvance<"ReadVRGatherVX_index", 0>; +defm "" : LMULReadAdvance<"ReadVRGatherVI_data", 0>; +defm "" : LMULSEWReadAdvance<"ReadVCompressV", 0>; +// LMUL Aware +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; +def : ReadAdvance; + +// Others +def : ReadAdvance; +def : ReadAdvance; +foreach mx = SchedMxList in { + def : ReadAdvance("ReadVPassthru_" # mx), 0>; + foreach sew = SchedSEWSet.val in + def : ReadAdvance("ReadVPassthru_" # mx # "_E" # sew), 0>; +} + //===----------------------------------------------------------------------===// // Unsupported extensions defm : UnsupportedSchedQ; -defm : UnsupportedSchedV; defm : UnsupportedSchedZabha; defm : UnsupportedSchedZbkb; defm : UnsupportedSchedZbkx; diff --git a/llvm/test/tools/llvm-mca/RISCV/Andes45/fpr.s b/llvm/test/tools/llvm-mca/RISCV/Andes45/fpr.s index d1ab4b3b6a7e0..1a7812e70438d 100644 --- a/llvm/test/tools/llvm-mca/RISCV/Andes45/fpr.s +++ b/llvm/test/tools/llvm-mca/RISCV/Andes45/fpr.s @@ -67,26 +67,34 @@ fcvt.s.w ft0, a0 # CHECK-NEXT: [5] - Andes45FMV # CHECK-NEXT: [6] - Andes45LSU # CHECK-NEXT: [7] - Andes45MDU +# CHECK-NEXT: [8] - Andes45VALU +# CHECK-NEXT: [9] - Andes45VDIV +# CHECK-NEXT: [10] - Andes45VFDIV +# CHECK-NEXT: [11] - Andes45VFMIS +# CHECK-NEXT: [12] - Andes45VLSU +# CHECK-NEXT: [13] - Andes45VMAC +# CHECK-NEXT: [14] - Andes45VMASK +# CHECK-NEXT: [15] - Andes45VPERMUT # CHECK: Resource pressure per iteration: -# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] -# CHECK-NEXT: - - - 56.00 4.00 4.00 2.00 - - +# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] [8] [9] [10] [11] [12] [13] [14] [15] +# CHECK-NEXT: - - - 56.00 4.00 4.00 2.00 - - - - - - - - - - # CHECK: Resource pressure by instruction: -# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] Instructions: -# CHECK-NEXT: - - - - 1.00 - - - - fadd.s ft0, fa0, fa1 -# CHECK-NEXT: - - - 19.00 - - - - - fdiv.s ft0, fa0, fa1 -# CHECK-NEXT: - - - - 1.00 - - - - fadd.s ft0, fa0, fa1 -# CHECK-NEXT: - - - - 1.00 - - - - fmul.s ft0, fa0, fa1 -# CHECK-NEXT: - - - - 1.00 - - - - fmadd.s ft0, fa0, fa1, fa2 -# CHECK-NEXT: - - - 19.00 - - - - - fdiv.s ft0, fa0, fa1 -# CHECK-NEXT: - - - 18.00 - - - - - fsqrt.s ft0, fa0 -# CHECK-NEXT: - - - - - - 1.00 - - fsgnj.s ft0, fa0, fa1 -# CHECK-NEXT: - - - - - - 1.00 - - fmv.x.w a0, fa0 -# CHECK-NEXT: - - - - - 1.00 - - - fmin.s ft0, fa0, fa1 -# CHECK-NEXT: - - - - - 1.00 - - - fclass.s a0, fa0 -# CHECK-NEXT: - - - - - 1.00 - - - feq.s a0, fa0, fa1 -# CHECK-NEXT: - - - - - 1.00 - - - fcvt.s.w ft0, a0 +# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] [8] [9] [10] [11] [12] [13] [14] [15] Instructions: +# CHECK-NEXT: - - - - 1.00 - - - - - - - - - - - - fadd.s ft0, fa0, fa1 +# CHECK-NEXT: - - - 19.00 - - - - - - - - - - - - - fdiv.s ft0, fa0, fa1 +# CHECK-NEXT: - - - - 1.00 - - - - - - - - - - - - fadd.s ft0, fa0, fa1 +# CHECK-NEXT: - - - - 1.00 - - - - - - - - - - - - fmul.s ft0, fa0, fa1 +# CHECK-NEXT: - - - - 1.00 - - - - - - - - - - - - fmadd.s ft0, fa0, fa1, fa2 +# CHECK-NEXT: - - - 19.00 - - - - - - - - - - - - - fdiv.s ft0, fa0, fa1 +# CHECK-NEXT: - - - 18.00 - - - - - - - - - - - - - fsqrt.s ft0, fa0 +# CHECK-NEXT: - - - - - - 1.00 - - - - - - - - - - fsgnj.s ft0, fa0, fa1 +# CHECK-NEXT: - - - - - - 1.00 - - - - - - - - - - fmv.x.w a0, fa0 +# CHECK-NEXT: - - - - - 1.00 - - - - - - - - - - - fmin.s ft0, fa0, fa1 +# CHECK-NEXT: - - - - - 1.00 - - - - - - - - - - - fclass.s a0, fa0 +# CHECK-NEXT: - - - - - 1.00 - - - - - - - - - - - feq.s a0, fa0, fa1 +# CHECK-NEXT: - - - - - 1.00 - - - - - - - - - - - fcvt.s.w ft0, a0 # CHECK: Timeline view: # CHECK-NEXT: 0123456789 0123456789 0123456789 diff --git a/llvm/test/tools/llvm-mca/RISCV/Andes45/gpr.s b/llvm/test/tools/llvm-mca/RISCV/Andes45/gpr.s index d90dce8c5c3fc..3227ecfa4a372 100644 --- a/llvm/test/tools/llvm-mca/RISCV/Andes45/gpr.s +++ b/llvm/test/tools/llvm-mca/RISCV/Andes45/gpr.s @@ -133,55 +133,63 @@ bext a0, a0, a0 # CHECK-NEXT: [5] - Andes45FMV # CHECK-NEXT: [6] - Andes45LSU # CHECK-NEXT: [7] - Andes45MDU +# CHECK-NEXT: [8] - Andes45VALU +# CHECK-NEXT: [9] - Andes45VDIV +# CHECK-NEXT: [10] - Andes45VFDIV +# CHECK-NEXT: [11] - Andes45VFMIS +# CHECK-NEXT: [12] - Andes45VLSU +# CHECK-NEXT: [13] - Andes45VMAC +# CHECK-NEXT: [14] - Andes45VMASK +# CHECK-NEXT: [15] - Andes45VPERMUT # CHECK: Resource pressure per iteration: -# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] -# CHECK-NEXT: 10.00 11.00 1.00 - - - - 16.00 80.00 +# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] [8] [9] [10] [11] [12] [13] [14] [15] +# CHECK-NEXT: 10.00 11.00 1.00 - - - - 16.00 80.00 - - - - - - - - # CHECK: Resource pressure by instruction: -# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] Instructions: -# CHECK-NEXT: - 1.00 - - - - - - - add a0, a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - sub a1, a1, a1 -# CHECK-NEXT: - 1.00 - - - - - - - addw a0, a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - subw a0, a0, a0 -# CHECK-NEXT: - 1.00 - - - - - - - slli a0, a0, 4 -# CHECK-NEXT: 1.00 - - - - - - - - slliw a0, a0, 4 -# CHECK-NEXT: - 1.00 - - - - - - - srl a0, a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - srlw a0, a0, a0 -# CHECK-NEXT: - - - - - - - - 1.00 mul a0, a0, a0 -# CHECK-NEXT: - - - - - - - - 1.00 mulw a0, a0, a0 -# CHECK-NEXT: - - - - - - - - 39.00 div a0, a0, a0 -# CHECK-NEXT: - - - - - - - - 39.00 divw a0, a0, a0 -# CHECK-NEXT: - - - - - - - 1.00 - lb a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - lh a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - lw a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - ld a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - flw fa0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - fld fa0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - sb a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - sh a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - sw a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - sd a0, 4(a1) -# CHECK-NEXT: - - - - - - - 1.00 - amoswap.w a0, a1, (a0) -# CHECK-NEXT: - - - - - - - 1.00 - amoswap.d a0, a1, (a0) -# CHECK-NEXT: - - - - - - - 1.00 - lr.w a0, (a0) -# CHECK-NEXT: - - - - - - - 1.00 - lr.d a0, (a0) -# CHECK-NEXT: - - - - - - - 1.00 - sc.w a0, a1, (a0) -# CHECK-NEXT: - - - - - - - 1.00 - sc.d a0, a1, (a0) -# CHECK-NEXT: - - 1.00 - - - - - - csrrw a0, mstatus, zero -# CHECK-NEXT: - 1.00 - - - - - - - sh1add a0, a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - sh1add.uw a0, a0, a0 -# CHECK-NEXT: - 1.00 - - - - - - - rori a0, a0, 4 -# CHECK-NEXT: 1.00 - - - - - - - - roriw a0, a0, 4 -# CHECK-NEXT: - 1.00 - - - - - - - rol a0, a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - rolw a0, a0, a0 -# CHECK-NEXT: - 1.00 - - - - - - - clz a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - clzw a0, a0 -# CHECK-NEXT: - 1.00 - - - - - - - clmul a0, a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - bclri a0, a0, 4 -# CHECK-NEXT: - 1.00 - - - - - - - bclr a0, a0, a0 -# CHECK-NEXT: 1.00 - - - - - - - - bexti a0, a0, 4 -# CHECK-NEXT: - 1.00 - - - - - - - bext a0, a0, a0 +# CHECK-NEXT: [0.0] [0.1] [1] [2] [3] [4] [5] [6] [7] [8] [9] [10] [11] [12] [13] [14] [15] Instructions: +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - add a0, a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - sub a1, a1, a1 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - addw a0, a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - subw a0, a0, a0 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - slli a0, a0, 4 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - slliw a0, a0, 4 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - srl a0, a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - srlw a0, a0, a0 +# CHECK-NEXT: - - - - - - - - 1.00 - - - - - - - - mul a0, a0, a0 +# CHECK-NEXT: - - - - - - - - 1.00 - - - - - - - - mulw a0, a0, a0 +# CHECK-NEXT: - - - - - - - - 39.00 - - - - - - - - div a0, a0, a0 +# CHECK-NEXT: - - - - - - - - 39.00 - - - - - - - - divw a0, a0, a0 +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - lb a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - lh a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - lw a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - ld a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - flw fa0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - fld fa0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - sb a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - sh a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - sw a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - sd a0, 4(a1) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - amoswap.w a0, a1, (a0) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - amoswap.d a0, a1, (a0) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - lr.w a0, (a0) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - lr.d a0, (a0) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - sc.w a0, a1, (a0) +# CHECK-NEXT: - - - - - - - 1.00 - - - - - - - - - sc.d a0, a1, (a0) +# CHECK-NEXT: - - 1.00 - - - - - - - - - - - - - - csrrw a0, mstatus, zero +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - sh1add a0, a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - sh1add.uw a0, a0, a0 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - rori a0, a0, 4 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - roriw a0, a0, 4 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - rol a0, a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - rolw a0, a0, a0 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - clz a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - clzw a0, a0 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - clmul a0, a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - bclri a0, a0, 4 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - bclr a0, a0, a0 +# CHECK-NEXT: 1.00 - - - - - - - - - - - - - - - - bexti a0, a0, 4 +# CHECK-NEXT: - 1.00 - - - - - - - - - - - - - - - bext a0, a0, a0 # CHECK: Timeline view: # CHECK-NEXT: 0123456789 0123456789 012 diff --git a/llvm/test/tools/llvm-mca/RISCV/Andes45/rvv-arithmetic.s b/llvm/test/tools/llvm-mca/RISCV/Andes45/rvv-arithmetic.s new file mode 100644 index 0000000000000..dc4122ab19293 --- /dev/null +++ b/llvm/test/tools/llvm-mca/RISCV/Andes45/rvv-arithmetic.s @@ -0,0 +1,6837 @@ +# NOTE: Assertions have been autogenerated by utils/update_mca_test_checks.py +# RUN: llvm-mca -mtriple=riscv64 -mcpu=andes-ax45mpv -iterations=1 -instruction-tables=full < %s | FileCheck %s + +# Basic arithmetic operations + +vsetvli x28, x0, e8, mf2, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e8, mf4, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e8, mf8, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m1, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m2, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m4, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m8, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e16, mf2, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e16, mf4, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m1, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m2, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m4, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m8, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e32, mf2, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m1, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m2, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m4, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m8, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m1, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m2, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m4, tu, mu +vadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m8, tu, mu +vadd.vi v8, v8, 12 + +vsetvli x28, x0, e8, mf2, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vadd.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vadd.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vsub.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vsub.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vsub.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m1, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m2, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m4, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m8, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m1, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m2, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m4, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m8, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m1, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m2, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m4, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m8, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m1, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m2, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m4, tu, mu +vadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m8, tu, mu +vadc.vvm v8, v8, v8, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m1, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m2, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m4, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m8, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m1, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m2, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m4, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m8, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m1, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m2, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m4, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m8, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m1, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m2, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m4, tu, mu +vadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m8, tu, mu +vadc.vxm v8, v8, x30, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m1, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m2, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m4, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m8, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m1, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m2, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m4, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m8, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m1, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m2, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m4, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m8, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m1, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m2, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m4, tu, mu +vadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m8, tu, mu +vadc.vim v8, v8, 12, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m1, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m2, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m4, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m8, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m1, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m2, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m4, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m8, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m1, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m2, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m4, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m8, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m1, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m2, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m4, tu, mu +vsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m8, tu, mu +vsbc.vvm v8, v8, v8, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m1, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m2, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m4, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m8, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m1, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m2, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m4, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m8, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m1, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m2, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m4, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m8, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m1, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m2, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m4, tu, mu +vsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m8, tu, mu +vsbc.vxm v8, v8, x30, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwaddu.vv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwaddu.vv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwaddu.vx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwaddu.vx v8, v16, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwadd.vv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwadd.vv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwadd.vx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwadd.vx v8, v16, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwsubu.vv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwsubu.vv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwsubu.vx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwsubu.vx v8, v16, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwsub.vv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwsub.vv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwsub.vx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwsub.vx v8, v16, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vaaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vaaddu.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vaaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vaaddu.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vaadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vaadd.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vaadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vaadd.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vasubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vasubu.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vasubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vasubu.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vasub.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vasub.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vasub.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vasub.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e8, mf4, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e8, mf8, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e8, m1, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e8, m2, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e8, m4, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e8, m8, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e16, mf2, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e16, mf4, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e16, m1, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e16, m2, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e16, m4, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e16, m8, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e32, mf2, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e32, m1, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e32, m2, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e32, m4, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e32, m8, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e64, m1, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e64, m2, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e64, m4, tu, mu +vmadc.vi v8, v8, 12 +vsetvli x28, x0, e64, m8, tu, mu +vmadc.vi v8, v8, 12 + +vsetvli x28, x0, e8, mf2, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m1, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m2, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m4, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e8, m8, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m1, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m2, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m4, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e16, m8, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m1, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m2, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m4, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e32, m8, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m1, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m2, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m4, tu, mu +vmadc.vim v8, v8, 12, v0 +vsetvli x28, x0, e64, m8, tu, mu +vmadc.vim v8, v8, 12, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vmadc.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vmadc.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m1, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m2, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m4, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m8, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m1, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m2, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m4, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m8, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m1, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m2, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m4, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m8, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m1, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m2, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m4, tu, mu +vmadc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m8, tu, mu +vmadc.vvm v8, v8, v8, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vmadc.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vmadc.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m1, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m2, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m4, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m8, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m1, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m2, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m4, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m8, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m1, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m2, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m4, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m8, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m1, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m2, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m4, tu, mu +vmadc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m8, tu, mu +vmadc.vxm v8, v8, x30, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vmsbc.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vmsbc.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m1, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m2, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m4, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e8, m8, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m1, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m2, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m4, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e16, m8, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m1, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m2, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m4, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e32, m8, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m1, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m2, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m4, tu, mu +vmsbc.vvm v8, v8, v8, v0 +vsetvli x28, x0, e64, m8, tu, mu +vmsbc.vvm v8, v8, v8, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vmsbc.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vmsbc.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf4, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, mf8, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m1, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m2, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m4, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e8, m8, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf2, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, mf4, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m1, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m2, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m4, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e16, m8, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, mf2, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m1, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m2, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m4, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e32, m8, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m1, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m2, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m4, tu, mu +vmsbc.vxm v8, v8, x30, v0 +vsetvli x28, x0, e64, m8, tu, mu +vmsbc.vxm v8, v8, x30, v0 + +vsetvli x28, x0, e8, mf2, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e8, mf4, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e8, mf8, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e8, m1, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e8, m2, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e8, m4, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e8, m8, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e16, mf2, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e16, mf4, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e16, m1, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e16, m2, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e16, m4, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e16, m8, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e32, mf2, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e32, m1, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e32, m2, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e32, m4, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e32, m8, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e64, m1, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e64, m2, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e64, m4, tu, mu +vrsub.vi v8, v8, 12 +vsetvli x28, x0, e64, m8, tu, mu +vrsub.vi v8, v8, 12 + +vsetvli x28, x0, e8, mf2, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vrsub.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vrsub.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e8, mf4, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e8, mf8, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e8, m1, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e8, m2, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e8, m4, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e8, m8, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e16, mf2, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e16, mf4, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e16, m1, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e16, m2, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e16, m4, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e16, m8, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e32, mf2, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e32, m1, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e32, m2, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e32, m4, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e32, m8, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e64, m1, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e64, m2, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e64, m4, tu, mu +vsaddu.vi v8, v8, 12 +vsetvli x28, x0, e64, m8, tu, mu +vsaddu.vi v8, v8, 12 + +vsetvli x28, x0, e8, mf2, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vsaddu.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vsaddu.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vsaddu.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vsaddu.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e8, mf4, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e8, mf8, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m1, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m2, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m4, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e8, m8, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e16, mf2, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e16, mf4, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m1, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m2, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m4, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e16, m8, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e32, mf2, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m1, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m2, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m4, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e32, m8, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m1, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m2, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m4, tu, mu +vsadd.vi v8, v8, 12 +vsetvli x28, x0, e64, m8, tu, mu +vsadd.vi v8, v8, 12 + +vsetvli x28, x0, e8, mf2, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vsadd.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vsadd.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vsadd.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vsadd.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vssubu.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vssubu.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vssubu.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vssubu.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e8, mf4, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e8, mf8, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e8, m1, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e8, m2, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e8, m4, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e8, m8, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e16, mf2, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e16, mf4, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e16, m1, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e16, m2, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e16, m4, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e16, m8, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e32, mf2, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e32, m1, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e32, m2, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e32, m4, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e32, m8, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e64, m1, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e64, m2, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e64, m4, tu, mu +vssub.vv v8, v8, v8 +vsetvli x28, x0, e64, m8, tu, mu +vssub.vv v8, v8, v8 + +vsetvli x28, x0, e8, mf2, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e8, m1, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e8, m2, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e8, m4, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e8, m8, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e16, m1, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e16, m2, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e16, m4, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e16, m8, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e32, m1, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e32, m2, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e32, m4, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e32, m8, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e64, m1, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e64, m2, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e64, m4, tu, mu +vssub.vx v8, v8, x30 +vsetvli x28, x0, e64, m8, tu, mu +vssub.vx v8, v8, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwaddu.wv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwaddu.wv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwaddu.wx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwaddu.wx v8, v16, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwadd.wv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwadd.wv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwadd.wx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwadd.wx v8, v16, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwsubu.wv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwsubu.wv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwsubu.wx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwsubu.wx v8, v16, x30 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e8, mf4, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e8, mf8, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e8, m1, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e8, m2, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e8, m4, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e16, mf2, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e16, mf4, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e16, m1, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e16, m2, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e16, m4, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e32, mf2, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e32, m1, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e32, m2, tu, mu +vwsub.wv v8, v16, v24 +vsetvli x28, x0, e32, m4, tu, mu +vwsub.wv v8, v16, v24 + +vsetvli x28, x0, e8, mf2, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e8, mf4, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e8, mf8, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e8, m1, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e8, m2, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e8, m4, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e16, mf2, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e16, mf4, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e16, m1, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e16, m2, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e16, m4, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e32, mf2, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e32, m1, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e32, m2, tu, mu +vwsub.wx v8, v16, x30 +vsetvli x28, x0, e32, m4, tu, mu +vwsub.wx v8, v16, x30 + +# CHECK: Resources: +# CHECK-NEXT: [0] - Andes45ALU:2 +# CHECK-NEXT: [1] - Andes45CSR:1 +# CHECK-NEXT: [2] - Andes45FDIV:1 +# CHECK-NEXT: [3] - Andes45FMAC:1 +# CHECK-NEXT: [4] - Andes45FMISC:1 +# CHECK-NEXT: [5] - Andes45FMV:1 +# CHECK-NEXT: [6] - Andes45LSU:1 +# CHECK-NEXT: [7] - Andes45MDU:1 +# CHECK-NEXT: [8] - Andes45VALU:1 +# CHECK-NEXT: [9] - Andes45VDIV:1 +# CHECK-NEXT: [10] - Andes45VFDIV:1 +# CHECK-NEXT: [11] - Andes45VFMIS:1 +# CHECK-NEXT: [12] - Andes45VLSU:1 +# CHECK-NEXT: [13] - Andes45VMAC:1 +# CHECK-NEXT: [14] - Andes45VMASK:1 +# CHECK-NEXT: [15] - Andes45VPERMUT:1 + +# CHECK: Instruction Info: +# CHECK-NEXT: [1]: #uOps +# CHECK-NEXT: [2]: Latency +# CHECK-NEXT: [3]: RThroughput +# CHECK-NEXT: [4]: MayLoad +# CHECK-NEXT: [5]: MayStore +# CHECK-NEXT: [6]: HasSideEffects (U) +# CHECK-NEXT: [7]: Bypass Latency +# CHECK-NEXT: [8]: Resources ( | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [, | [] | [,