diff --git a/llvm/lib/Target/X86/X86InstrAVX512.td b/llvm/lib/Target/X86/X86InstrAVX512.td index 66646714ca774..fe7d90fbcdf70 100644 --- a/llvm/lib/Target/X86/X86InstrAVX512.td +++ b/llvm/lib/Target/X86/X86InstrAVX512.td @@ -448,7 +448,7 @@ multiclass vinsert_for_type, X86VectorVTInfo< 4, EltVT64, VR256X>, null_frag, vinsert128_insert, sched>, - VEX_W1X, EVEX_V256; + EVEX_V256, REX_W; // Even with DQI we'd like to only use these instructions for masking. let Predicates = [HasDQI] in { @@ -750,7 +750,7 @@ multiclass vextract_for_type, X86VectorVTInfo< 2, EltVT64, VR128X>, null_frag, vextract128_extract, SchedRR, SchedMR>, - VEX_W1X, EVEX_V256, EVEX_CD8<64, CD8VT2>; + EVEX_V256, EVEX_CD8<64, CD8VT2>, REX_W; // Even with DQI we'd like to only use these instructions for masking. let Predicates = [HasDQI] in { @@ -1161,7 +1161,7 @@ multiclass avx512_fp_broadcast_ss opc, string OpcodeStr, defm VBROADCASTSS : avx512_fp_broadcast_ss<0x18, "vbroadcastss", avx512vl_f32_info>; defm VBROADCASTSD : avx512_fp_broadcast_sd<0x19, "vbroadcastsd", - avx512vl_f64_info>, VEX_W1X; + avx512vl_f64_info>, REX_W; multiclass avx512_int_broadcast_reg opc, SchedWrite SchedRR, X86VectorVTInfo _, SDPatternOperator OpNode, @@ -1267,7 +1267,7 @@ defm VPBROADCASTW : avx512_int_broadcast_rm_vl<0x79, "vpbroadcastw", defm VPBROADCASTD : avx512_int_broadcast_rm_vl<0x58, "vpbroadcastd", avx512vl_i32_info, HasAVX512, 1>; defm VPBROADCASTQ : avx512_int_broadcast_rm_vl<0x59, "vpbroadcastq", - avx512vl_i64_info, HasAVX512, 1>, VEX_W1X; + avx512vl_i64_info, HasAVX512, 1>, REX_W; multiclass avx512_subvec_broadcast_rm opc, string OpcodeStr, SDPatternOperator OpNode, @@ -1460,11 +1460,11 @@ let Predicates = [HasBF16, HasVLX] in let Predicates = [HasVLX, HasDQI] in { defm VBROADCASTI64X2Z128 : avx512_subvec_broadcast_rm_dq<0x5a, "vbroadcasti64x2", - X86SubVBroadcastld128, v4i64x_info, v2i64x_info>, VEX_W1X, - EVEX_V256, EVEX_CD8<64, CD8VT2>; + X86SubVBroadcastld128, v4i64x_info, v2i64x_info>, + EVEX_V256, EVEX_CD8<64, CD8VT2>, REX_W; defm VBROADCASTF64X2Z128 : avx512_subvec_broadcast_rm_dq<0x1a, "vbroadcastf64x2", - X86SubVBroadcastld128, v4f64x_info, v2f64x_info>, VEX_W1X, - EVEX_V256, EVEX_CD8<64, CD8VT2>; + X86SubVBroadcastld128, v4f64x_info, v2f64x_info>, + EVEX_V256, EVEX_CD8<64, CD8VT2>, REX_W; // Patterns for selects of bitcasted operations. def : Pat<(vselect_mask VK4WM:$mask, @@ -6396,7 +6396,7 @@ defm VPERMILPS : avx512_permil<"vpermilps", 0x04, 0x0C, avx512vl_f32_info, avx512vl_i32_info>; let ExeDomain = SSEPackedDouble in defm VPERMILPD : avx512_permil<"vpermilpd", 0x05, 0x0D, avx512vl_f64_info, - avx512vl_i64_info>, VEX_W1X; + avx512vl_i64_info>, REX_W; //===----------------------------------------------------------------------===// // AVX-512 - VPSHUFD, VPSHUFLW, VPSHUFHW diff --git a/llvm/lib/Target/X86/X86InstrFormats.td b/llvm/lib/Target/X86/X86InstrFormats.td index 9f0b732445346..8798b13a17612 100644 --- a/llvm/lib/Target/X86/X86InstrFormats.td +++ b/llvm/lib/Target/X86/X86InstrFormats.td @@ -247,8 +247,6 @@ class X86Inst opcod, Format f, ImmType i, dag outs, dag ins, bit hasREPPrefix = 0; // Does this inst have a REP prefix? bits<2> OpEncBits = OpEnc.Value; bit IgnoresW = 0; // Does this inst ignore REX_W field? - bit EVEX_W1_VEX_W0 = 0; // This EVEX inst with VEX.W==1 can become a VEX - // instruction with VEX.W == 0. bit hasVEX_4V = 0; // Does this inst require the VEX.VVVV field? bit hasVEX_L = 0; // Does this inst use large (256-bit) registers? bit ignoresVEX_L = 0; // Does this instruction ignore the L-bit diff --git a/llvm/lib/Target/X86/X86InstrUtils.td b/llvm/lib/Target/X86/X86InstrUtils.td index 9183bcd7017f9..f4ae15837fbf5 100644 --- a/llvm/lib/Target/X86/X86InstrUtils.td +++ b/llvm/lib/Target/X86/X86InstrUtils.td @@ -43,8 +43,6 @@ class XOP { Encoding OpEnc = EncXOP; } class VEX { Encoding OpEnc = EncVEX; } class EVEX { Encoding OpEnc = EncEVEX; } class WIG { bit IgnoresW = 1; } -// Special version of REX_W that can be changed to VEX.W==0 for EVEX2VEX. -class VEX_W1X { bit hasREX_W = 1; bit EVEX_W1_VEX_W0 = 1; } class VEX_L { bit hasVEX_L = 1; } class VEX_LIG { bit ignoresVEX_L = 1; } class VVVV { bit hasVEX_4V = 1; } diff --git a/llvm/utils/TableGen/X86CompressEVEXTablesEmitter.cpp b/llvm/utils/TableGen/X86CompressEVEXTablesEmitter.cpp index 3a26732cf3230..a45e87af4a3f3 100644 --- a/llvm/utils/TableGen/X86CompressEVEXTablesEmitter.cpp +++ b/llvm/utils/TableGen/X86CompressEVEXTablesEmitter.cpp @@ -95,34 +95,23 @@ static inline uint64_t getValueFromBitsInit(const BitsInit *B) { return Value; } -// Function object - Operator() returns true if the given VEX instruction -// matches the EVEX instruction of this object. class IsMatch { - const CodeGenInstruction *EVEXInst; + const CodeGenInstruction *OldInst; public: - IsMatch(const CodeGenInstruction *EVEXInst) : EVEXInst(EVEXInst) {} - - bool operator()(const CodeGenInstruction *VEXInst) { - RecognizableInstrBase VEXRI(*VEXInst); - RecognizableInstrBase EVEXRI(*EVEXInst); - bool VEX_W = VEXRI.HasREX_W; - bool EVEX_W = EVEXRI.HasREX_W; - bool VEX_WIG = VEXRI.IgnoresW; - bool EVEX_WIG = EVEXRI.IgnoresW; - bool EVEX_W1_VEX_W0 = EVEXInst->TheDef->getValueAsBit("EVEX_W1_VEX_W0"); - - if (VEXRI.IsCodeGenOnly != EVEXRI.IsCodeGenOnly || - // VEX/EVEX fields - VEXRI.OpPrefix != EVEXRI.OpPrefix || VEXRI.OpMap != EVEXRI.OpMap || - VEXRI.HasVEX_4V != EVEXRI.HasVEX_4V || - VEXRI.HasVEX_L != EVEXRI.HasVEX_L || - // Match is allowed if either is VEX_WIG, or they match, or EVEX - // is VEX_W1X and VEX is VEX_W0. - (!(VEX_WIG || (!EVEX_WIG && EVEX_W == VEX_W) || - (EVEX_W1_VEX_W0 && EVEX_W && !VEX_W))) || - // Instruction's format - VEXRI.Form != EVEXRI.Form) + IsMatch(const CodeGenInstruction *OldInst) : OldInst(OldInst) {} + + bool operator()(const CodeGenInstruction *NewInst) { + RecognizableInstrBase NewRI(*NewInst); + RecognizableInstrBase OldRI(*OldInst); + + // Return false if any of the following fields of does not match. + if (std::make_tuple(OldRI.IsCodeGenOnly, OldRI.OpMap, NewRI.OpPrefix, + OldRI.HasVEX_4V, OldRI.HasVEX_L, OldRI.HasREX_W, + OldRI.Form) != + std::make_tuple(NewRI.IsCodeGenOnly, NewRI.OpMap, OldRI.OpPrefix, + NewRI.HasVEX_4V, NewRI.HasVEX_L, NewRI.HasREX_W, + NewRI.Form)) return false; // This is needed for instructions with intrinsic version (_Int). @@ -131,9 +120,9 @@ class IsMatch { // Also for instructions that their EVEX version was upgraded to work with // k-registers. For example VPCMPEQBrm (xmm output register) and // VPCMPEQBZ128rm (k register output register). - for (unsigned i = 0, e = EVEXInst->Operands.size(); i < e; i++) { - Record *OpRec1 = EVEXInst->Operands[i].Rec; - Record *OpRec2 = VEXInst->Operands[i].Rec; + for (unsigned i = 0, e = OldInst->Operands.size(); i < e; i++) { + Record *OpRec1 = OldInst->Operands[i].Rec; + Record *OpRec2 = NewInst->Operands[i].Rec; if (OpRec1 == OpRec2) continue; diff --git a/llvm/utils/TableGen/X86FoldTablesEmitter.cpp b/llvm/utils/TableGen/X86FoldTablesEmitter.cpp index 101b75e2f087e..8a860d0945bb1 100644 --- a/llvm/utils/TableGen/X86FoldTablesEmitter.cpp +++ b/llvm/utils/TableGen/X86FoldTablesEmitter.cpp @@ -374,8 +374,7 @@ class IsMatch { RegRI.HasEVEX_L2, RegRI.HasEVEX_NF, RegRec->getValueAsBit("hasEVEX_RC"), RegRec->getValueAsBit("hasLockPrefix"), - RegRec->getValueAsBit("hasNoTrackPrefix"), - RegRec->getValueAsBit("EVEX_W1_VEX_W0")) != + RegRec->getValueAsBit("hasNoTrackPrefix")) != std::make_tuple(MemRI.Encoding, MemRI.Opcode, MemRI.OpPrefix, MemRI.OpMap, MemRI.OpSize, MemRI.AdSize, MemRI.HasREX_W, MemRI.HasVEX_4V, MemRI.HasVEX_L, MemRI.IgnoresVEX_L, @@ -383,8 +382,7 @@ class IsMatch { MemRI.HasEVEX_L2, MemRI.HasEVEX_NF, MemRec->getValueAsBit("hasEVEX_RC"), MemRec->getValueAsBit("hasLockPrefix"), - MemRec->getValueAsBit("hasNoTrackPrefix"), - MemRec->getValueAsBit("EVEX_W1_VEX_W0"))) + MemRec->getValueAsBit("hasNoTrackPrefix"))) return false; // Make sure the sizes of the operands of both instructions suit each other. diff --git a/llvm/utils/TableGen/X86ManualCompressEVEXTables.def b/llvm/utils/TableGen/X86ManualCompressEVEXTables.def index 0da32f92502cc..58ca10e9e10f8 100644 --- a/llvm/utils/TableGen/X86ManualCompressEVEXTables.def +++ b/llvm/utils/TableGen/X86ManualCompressEVEXTables.def @@ -85,4 +85,247 @@ ENTRY(VSHUFI32X4Z256rmi, VPERM2I128rm) ENTRY(VSHUFI32X4Z256rri, VPERM2I128rr) ENTRY(VSHUFI64X2Z256rmi, VPERM2I128rm) ENTRY(VSHUFI64X2Z256rri, VPERM2I128rr) +// W bit does not match +ENTRY(VADDPDZ128rm, VADDPDrm) +ENTRY(VADDPDZ128rr, VADDPDrr) +ENTRY(VADDSDZrm, VADDSDrm) +ENTRY(VADDSDZrm_Int, VADDSDrm_Int) +ENTRY(VADDSDZrr, VADDSDrr) +ENTRY(VADDSDZrr_Int, VADDSDrr_Int) +ENTRY(VANDNPDZ128rm, VANDNPDrm) +ENTRY(VANDNPDZ128rr, VANDNPDrr) +ENTRY(VANDPDZ128rm, VANDPDrm) +ENTRY(VANDPDZ128rr, VANDPDrr) +ENTRY(VCOMISDZrm, VCOMISDrm) +ENTRY(VCOMISDZrm_Int, VCOMISDrm_Int) +ENTRY(VCOMISDZrr, VCOMISDrr) +ENTRY(VCOMISDZrr_Int, VCOMISDrr_Int) +ENTRY(VCVTPD2DQZ128rm, VCVTPD2DQrm) +ENTRY(VCVTPD2DQZ128rr, VCVTPD2DQrr) +ENTRY(VCVTPD2PSZ128rm, VCVTPD2PSrm) +ENTRY(VCVTPD2PSZ128rr, VCVTPD2PSrr) +ENTRY(VCVTSD2SSZrm, VCVTSD2SSrm) +ENTRY(VCVTSD2SSZrm_Int, VCVTSD2SSrm_Int) +ENTRY(VCVTSD2SSZrr, VCVTSD2SSrr) +ENTRY(VCVTSD2SSZrr_Int, VCVTSD2SSrr_Int) +ENTRY(VCVTTPD2DQZ128rm, VCVTTPD2DQrm) +ENTRY(VCVTTPD2DQZ128rr, VCVTTPD2DQrr) +ENTRY(VDIVPDZ128rm, VDIVPDrm) +ENTRY(VDIVPDZ128rr, VDIVPDrr) +ENTRY(VDIVSDZrm, VDIVSDrm) +ENTRY(VDIVSDZrm_Int, VDIVSDrm_Int) +ENTRY(VDIVSDZrr, VDIVSDrr) +ENTRY(VDIVSDZrr_Int, VDIVSDrr_Int) +ENTRY(VMAXCPDZ128rm, VMAXCPDrm) +ENTRY(VMAXCPDZ128rr, VMAXCPDrr) +ENTRY(VMAXCSDZrm, VMAXCSDrm) +ENTRY(VMAXCSDZrr, VMAXCSDrr) +ENTRY(VMAXPDZ128rm, VMAXPDrm) +ENTRY(VMAXPDZ128rr, VMAXPDrr) +ENTRY(VMAXSDZrm_Int, VMAXSDrm_Int) +ENTRY(VMAXSDZrr_Int, VMAXSDrr_Int) +ENTRY(VMINCPDZ128rm, VMINCPDrm) +ENTRY(VMINCPDZ128rr, VMINCPDrr) +ENTRY(VMINCSDZrm, VMINCSDrm) +ENTRY(VMINCSDZrr, VMINCSDrr) +ENTRY(VMINPDZ128rm, VMINPDrm) +ENTRY(VMINPDZ128rr, VMINPDrr) +ENTRY(VMINSDZrm_Int, VMINSDrm_Int) +ENTRY(VMINSDZrr_Int, VMINSDrr_Int) +ENTRY(VMOVAPDZ128mr, VMOVAPDmr) +ENTRY(VMOVAPDZ128rm, VMOVAPDrm) +ENTRY(VMOVAPDZ128rr, VMOVAPDrr) +ENTRY(VMOVDDUPZ128rm, VMOVDDUPrm) +ENTRY(VMOVDDUPZ128rr, VMOVDDUPrr) +ENTRY(VMOVDQA64Z128mr, VMOVDQAmr) +ENTRY(VMOVDQA64Z128rm, VMOVDQArm) +ENTRY(VMOVDQA64Z128rr, VMOVDQArr) +ENTRY(VMOVDQU64Z128mr, VMOVDQUmr) +ENTRY(VMOVDQU64Z128rm, VMOVDQUrm) +ENTRY(VMOVDQU64Z128rr, VMOVDQUrr) +ENTRY(VMOVHPDZ128mr, VMOVHPDmr) +ENTRY(VMOVHPDZ128rm, VMOVHPDrm) +ENTRY(VMOVLPDZ128mr, VMOVLPDmr) +ENTRY(VMOVLPDZ128rm, VMOVLPDrm) +ENTRY(VMOVNTPDZ128mr, VMOVNTPDmr) +ENTRY(VMOVPQI2QIZmr, VMOVPQI2QImr) +ENTRY(VMOVPQI2QIZrr, VMOVPQI2QIrr) +ENTRY(VMOVQI2PQIZrm, VMOVQI2PQIrm) +ENTRY(VMOVSDZmr, VMOVSDmr) +ENTRY(VMOVSDZrm, VMOVSDrm) +ENTRY(VMOVSDZrm_alt, VMOVSDrm_alt) +ENTRY(VMOVSDZrr, VMOVSDrr) +ENTRY(VMOVUPDZ128mr, VMOVUPDmr) +ENTRY(VMOVUPDZ128rm, VMOVUPDrm) +ENTRY(VMOVUPDZ128rr, VMOVUPDrr) +ENTRY(VMOVZPQILo2PQIZrr, VMOVZPQILo2PQIrr) +ENTRY(VMULPDZ128rm, VMULPDrm) +ENTRY(VMULPDZ128rr, VMULPDrr) +ENTRY(VMULSDZrm, VMULSDrm) +ENTRY(VMULSDZrm_Int, VMULSDrm_Int) +ENTRY(VMULSDZrr, VMULSDrr) +ENTRY(VMULSDZrr_Int, VMULSDrr_Int) +ENTRY(VORPDZ128rm, VORPDrm) +ENTRY(VORPDZ128rr, VORPDrr) +ENTRY(VPADDQZ128rm, VPADDQrm) +ENTRY(VPADDQZ128rr, VPADDQrr) +ENTRY(VPANDNQZ128rm, VPANDNrm) +ENTRY(VPANDNQZ128rr, VPANDNrr) +ENTRY(VPANDQZ128rm, VPANDrm) +ENTRY(VPANDQZ128rr, VPANDrr) +ENTRY(VPERMILPDZ128mi, VPERMILPDmi) +ENTRY(VPERMILPDZ128ri, VPERMILPDri) +ENTRY(VPERMILPDZ128rm, VPERMILPDrm) +ENTRY(VPERMILPDZ128rr, VPERMILPDrr) +ENTRY(VPMULDQZ128rm, VPMULDQrm) +ENTRY(VPMULDQZ128rr, VPMULDQrr) +ENTRY(VPMULUDQZ128rm, VPMULUDQrm) +ENTRY(VPMULUDQZ128rr, VPMULUDQrr) +ENTRY(VPORQZ128rm, VPORrm) +ENTRY(VPORQZ128rr, VPORrr) +ENTRY(VPSLLQZ128ri, VPSLLQri) +ENTRY(VPSLLQZ128rm, VPSLLQrm) +ENTRY(VPSLLQZ128rr, VPSLLQrr) +ENTRY(VPSRLQZ128ri, VPSRLQri) +ENTRY(VPSRLQZ128rm, VPSRLQrm) +ENTRY(VPSRLQZ128rr, VPSRLQrr) +ENTRY(VPSUBQZ128rm, VPSUBQrm) +ENTRY(VPSUBQZ128rr, VPSUBQrr) +ENTRY(VPUNPCKHQDQZ128rm, VPUNPCKHQDQrm) +ENTRY(VPUNPCKHQDQZ128rr, VPUNPCKHQDQrr) +ENTRY(VPUNPCKLQDQZ128rm, VPUNPCKLQDQrm) +ENTRY(VPUNPCKLQDQZ128rr, VPUNPCKLQDQrr) +ENTRY(VPXORQZ128rm, VPXORrm) +ENTRY(VPXORQZ128rr, VPXORrr) +ENTRY(VRNDSCALEPDZ128rmi, VROUNDPDm) +ENTRY(VRNDSCALEPDZ128rri, VROUNDPDr) +ENTRY(VRNDSCALESDZm, VROUNDSDm) +ENTRY(VRNDSCALESDZm_Int, VROUNDSDm_Int) +ENTRY(VRNDSCALESDZr, VROUNDSDr) +ENTRY(VRNDSCALESDZr_Int, VROUNDSDr_Int) +ENTRY(VSHUFPDZ128rmi, VSHUFPDrmi) +ENTRY(VSHUFPDZ128rri, VSHUFPDrri) +ENTRY(VSQRTPDZ128m, VSQRTPDm) +ENTRY(VSQRTPDZ128r, VSQRTPDr) +ENTRY(VSQRTSDZm, VSQRTSDm) +ENTRY(VSQRTSDZm_Int, VSQRTSDm_Int) +ENTRY(VSQRTSDZr, VSQRTSDr) +ENTRY(VSQRTSDZr_Int, VSQRTSDr_Int) +ENTRY(VSUBPDZ128rm, VSUBPDrm) +ENTRY(VSUBPDZ128rr, VSUBPDrr) +ENTRY(VSUBSDZrm, VSUBSDrm) +ENTRY(VSUBSDZrm_Int, VSUBSDrm_Int) +ENTRY(VSUBSDZrr, VSUBSDrr) +ENTRY(VSUBSDZrr_Int, VSUBSDrr_Int) +ENTRY(VUCOMISDZrm, VUCOMISDrm) +ENTRY(VUCOMISDZrm_Int, VUCOMISDrm_Int) +ENTRY(VUCOMISDZrr, VUCOMISDrr) +ENTRY(VUCOMISDZrr_Int, VUCOMISDrr_Int) +ENTRY(VUNPCKHPDZ128rm, VUNPCKHPDrm) +ENTRY(VUNPCKHPDZ128rr, VUNPCKHPDrr) +ENTRY(VUNPCKLPDZ128rm, VUNPCKLPDrm) +ENTRY(VUNPCKLPDZ128rr, VUNPCKLPDrr) +ENTRY(VXORPDZ128rm, VXORPDrm) +ENTRY(VXORPDZ128rr, VXORPDrr) +ENTRY(VADDPDZ256rm, VADDPDYrm) +ENTRY(VADDPDZ256rr, VADDPDYrr) +ENTRY(VANDNPDZ256rm, VANDNPDYrm) +ENTRY(VANDNPDZ256rr, VANDNPDYrr) +ENTRY(VANDPDZ256rm, VANDPDYrm) +ENTRY(VANDPDZ256rr, VANDPDYrr) +ENTRY(VCVTPD2DQZ256rm, VCVTPD2DQYrm) +ENTRY(VCVTPD2DQZ256rr, VCVTPD2DQYrr) +ENTRY(VCVTPD2PSZ256rm, VCVTPD2PSYrm) +ENTRY(VCVTPD2PSZ256rr, VCVTPD2PSYrr) +ENTRY(VCVTTPD2DQZ256rm, VCVTTPD2DQYrm) +ENTRY(VCVTTPD2DQZ256rr, VCVTTPD2DQYrr) +ENTRY(VDIVPDZ256rm, VDIVPDYrm) +ENTRY(VDIVPDZ256rr, VDIVPDYrr) +ENTRY(VEXTRACTF64x2Z256mr, VEXTRACTF128mr) +ENTRY(VEXTRACTF64x2Z256rr, VEXTRACTF128rr) +ENTRY(VEXTRACTI64x2Z256mr, VEXTRACTI128mr) +ENTRY(VEXTRACTI64x2Z256rr, VEXTRACTI128rr) +ENTRY(VINSERTF64x2Z256rm, VINSERTF128rm) +ENTRY(VINSERTF64x2Z256rr, VINSERTF128rr) +ENTRY(VINSERTI64x2Z256rm, VINSERTI128rm) +ENTRY(VINSERTI64x2Z256rr, VINSERTI128rr) +ENTRY(VMAXCPDZ256rm, VMAXCPDYrm) +ENTRY(VMAXCPDZ256rr, VMAXCPDYrr) +ENTRY(VMAXPDZ256rm, VMAXPDYrm) +ENTRY(VMAXPDZ256rr, VMAXPDYrr) +ENTRY(VMINCPDZ256rm, VMINCPDYrm) +ENTRY(VMINCPDZ256rr, VMINCPDYrr) +ENTRY(VMINPDZ256rm, VMINPDYrm) +ENTRY(VMINPDZ256rr, VMINPDYrr) +ENTRY(VMOVAPDZ256mr, VMOVAPDYmr) +ENTRY(VMOVAPDZ256rm, VMOVAPDYrm) +ENTRY(VMOVAPDZ256rr, VMOVAPDYrr) +ENTRY(VMOVDDUPZ256rm, VMOVDDUPYrm) +ENTRY(VMOVDDUPZ256rr, VMOVDDUPYrr) +ENTRY(VMOVDQA64Z256mr, VMOVDQAYmr) +ENTRY(VMOVDQA64Z256rm, VMOVDQAYrm) +ENTRY(VMOVDQA64Z256rr, VMOVDQAYrr) +ENTRY(VMOVDQU64Z256mr, VMOVDQUYmr) +ENTRY(VMOVDQU64Z256rm, VMOVDQUYrm) +ENTRY(VMOVDQU64Z256rr, VMOVDQUYrr) +ENTRY(VMOVNTPDZ256mr, VMOVNTPDYmr) +ENTRY(VMOVUPDZ256mr, VMOVUPDYmr) +ENTRY(VMOVUPDZ256rm, VMOVUPDYrm) +ENTRY(VMOVUPDZ256rr, VMOVUPDYrr) +ENTRY(VMULPDZ256rm, VMULPDYrm) +ENTRY(VMULPDZ256rr, VMULPDYrr) +ENTRY(VORPDZ256rm, VORPDYrm) +ENTRY(VORPDZ256rr, VORPDYrr) +ENTRY(VPADDQZ256rm, VPADDQYrm) +ENTRY(VPADDQZ256rr, VPADDQYrr) +ENTRY(VPANDNQZ256rm, VPANDNYrm) +ENTRY(VPANDNQZ256rr, VPANDNYrr) +ENTRY(VPANDQZ256rm, VPANDYrm) +ENTRY(VPANDQZ256rr, VPANDYrr) +ENTRY(VPERMILPDZ256mi, VPERMILPDYmi) +ENTRY(VPERMILPDZ256ri, VPERMILPDYri) +ENTRY(VPERMILPDZ256rm, VPERMILPDYrm) +ENTRY(VPERMILPDZ256rr, VPERMILPDYrr) +ENTRY(VPMULDQZ256rm, VPMULDQYrm) +ENTRY(VPMULDQZ256rr, VPMULDQYrr) +ENTRY(VPMULUDQZ256rm, VPMULUDQYrm) +ENTRY(VPMULUDQZ256rr, VPMULUDQYrr) +ENTRY(VPORQZ256rm, VPORYrm) +ENTRY(VPORQZ256rr, VPORYrr) +ENTRY(VPSLLQZ256ri, VPSLLQYri) +ENTRY(VPSLLQZ256rm, VPSLLQYrm) +ENTRY(VPSLLQZ256rr, VPSLLQYrr) +ENTRY(VPSRLQZ256ri, VPSRLQYri) +ENTRY(VPSRLQZ256rm, VPSRLQYrm) +ENTRY(VPSRLQZ256rr, VPSRLQYrr) +ENTRY(VPSUBQZ256rm, VPSUBQYrm) +ENTRY(VPSUBQZ256rr, VPSUBQYrr) +ENTRY(VPUNPCKHQDQZ256rm, VPUNPCKHQDQYrm) +ENTRY(VPUNPCKHQDQZ256rr, VPUNPCKHQDQYrr) +ENTRY(VPUNPCKLQDQZ256rm, VPUNPCKLQDQYrm) +ENTRY(VPUNPCKLQDQZ256rr, VPUNPCKLQDQYrr) +ENTRY(VPXORQZ256rm, VPXORYrm) +ENTRY(VPXORQZ256rr, VPXORYrr) +ENTRY(VRNDSCALEPDZ256rmi, VROUNDPDYm) +ENTRY(VRNDSCALEPDZ256rri, VROUNDPDYr) +ENTRY(VSHUFPDZ256rmi, VSHUFPDYrmi) +ENTRY(VSHUFPDZ256rri, VSHUFPDYrri) +ENTRY(VSQRTPDZ256m, VSQRTPDYm) +ENTRY(VSQRTPDZ256r, VSQRTPDYr) +ENTRY(VSUBPDZ256rm, VSUBPDYrm) +ENTRY(VSUBPDZ256rr, VSUBPDYrr) +ENTRY(VUNPCKHPDZ256rm, VUNPCKHPDYrm) +ENTRY(VUNPCKHPDZ256rr, VUNPCKHPDYrr) +ENTRY(VUNPCKLPDZ256rm, VUNPCKLPDYrm) +ENTRY(VUNPCKLPDZ256rr, VUNPCKLPDYrr) +ENTRY(VXORPDZ256rm, VXORPDYrm) +ENTRY(VXORPDZ256rr, VXORPDYrr) +ENTRY(VPBROADCASTQZ128rm, VPBROADCASTQrm) +ENTRY(VPBROADCASTQZ128rr, VPBROADCASTQrr) +ENTRY(VBROADCASTF64X2Z128rm, VBROADCASTF128rm) +ENTRY(VBROADCASTI64X2Z128rm, VBROADCASTI128rm) +ENTRY(VBROADCASTSDZ256rm, VBROADCASTSDYrm) +ENTRY(VBROADCASTSDZ256rr, VBROADCASTSDYrr) +ENTRY(VPBROADCASTQZ256rm, VPBROADCASTQYrm) +ENTRY(VPBROADCASTQZ256rr, VPBROADCASTQYrr) #undef ENTRY