@@ -3830,6 +3830,14 @@ void Assembler::evpermi2q(XMMRegister dst, XMMRegister nds, XMMRegister src, int
38303830 emit_int16 (0x76 , (0xC0 | encode));
38313831}
38323832
3833+ void Assembler::evpermt2b (XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3834+ assert (VM_Version::supports_avx512_vbmi (), " " );
3835+ InstructionAttr attributes (vector_len, /* vex_w */ false , /* legacy_mode */ false , /* no_mask_reg */ true , /* uses_vl */ true );
3836+ attributes.set_is_evex_instruction ();
3837+ int encode = vex_prefix_and_encode (dst->encoding (), nds->encoding (), src->encoding (), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3838+ emit_int16 (0x7D , (0xC0 | encode));
3839+ }
3840+
38333841void Assembler::pause () {
38343842 emit_int16 ((unsigned char )0xF3 , (unsigned char )0x90 );
38353843}
@@ -4549,6 +4557,15 @@ void Assembler::vpmaddwd(XMMRegister dst, XMMRegister nds, XMMRegister src, int
45494557 emit_int16 ((unsigned char )0xF5 , (0xC0 | encode));
45504558}
45514559
4560+ void Assembler::vpmaddubsw (XMMRegister dst, XMMRegister src1, XMMRegister src2, int vector_len) {
4561+ assert (vector_len == AVX_128bit? VM_Version::supports_avx () :
4562+ vector_len == AVX_256bit? VM_Version::supports_avx2 () :
4563+ vector_len == AVX_512bit? VM_Version::supports_avx512bw () : 0 , " " );
4564+ InstructionAttr attributes (vector_len, /* rex_w */ false , /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true , /* uses_vl */ true );
4565+ int encode = simd_prefix_and_encode (dst, src1, src2, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4566+ emit_int16 (0x04 , (0xC0 | encode));
4567+ }
4568+
45524569void Assembler::evpdpwssd (XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
45534570 assert (VM_Version::supports_evex (), " " );
45544571 assert (VM_Version::supports_avx512_vnni (), " must support vnni" );
@@ -4857,6 +4874,15 @@ void Assembler::vptest(XMMRegister dst, XMMRegister src, int vector_len) {
48574874 emit_int16 (0x17 , (0xC0 | encode));
48584875}
48594876
4877+ void Assembler::evptestmb (KRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4878+ assert (VM_Version::supports_avx512vlbw (), " " );
4879+ // Encoding: EVEX.NDS.XXX.66.0F.W0 DB /r
4880+ InstructionAttr attributes (vector_len, /* vex_w */ false , /* legacy_mode */ false , /* no_mask_reg */ true , /* uses_vl */ true );
4881+ attributes.set_is_evex_instruction ();
4882+ int encode = vex_prefix_and_encode (dst->encoding (), nds->encoding (), src->encoding (), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4883+ emit_int16 ((unsigned char )0x26 , (0xC0 | encode));
4884+ }
4885+
48604886void Assembler::punpcklbw (XMMRegister dst, Address src) {
48614887 NOT_LP64 (assert (VM_Version::supports_sse2 (), " " ));
48624888 assert ((UseAVX > 0 ), " SSE mode requires address alignment 16 bytes" );
@@ -9410,6 +9436,13 @@ void Assembler::shlxq(Register dst, Register src1, Register src2) {
94109436 emit_int16 ((unsigned char )0xF7 , (0xC0 | encode));
94119437}
94129438
9439+ void Assembler::shrxl (Register dst, Register src1, Register src2) {
9440+ assert (VM_Version::supports_bmi2 (), " " );
9441+ InstructionAttr attributes (AVX_128bit, /* vex_w */ false , /* legacy_mode */ true , /* no_mask_reg */ true , /* uses_vl */ true );
9442+ int encode = vex_prefix_and_encode (dst->encoding (), src2->encoding (), src1->encoding (), VEX_SIMD_F2, VEX_OPCODE_0F_38, &attributes);
9443+ emit_int16 ((unsigned char )0xF7 , (0xC0 | encode));
9444+ }
9445+
94139446void Assembler::shrxq (Register dst, Register src1, Register src2) {
94149447 assert (VM_Version::supports_bmi2 (), " " );
94159448 InstructionAttr attributes (AVX_128bit, /* vex_w */ true , /* legacy_mode */ true , /* no_mask_reg */ true , /* uses_vl */ true );
0 commit comments