@@ -3758,6 +3758,15 @@ void Assembler::vpermb(XMMRegister dst, XMMRegister nds, XMMRegister src, int ve
3758
3758
emit_int16 ((unsigned char )0x8D , (0xC0 | encode));
3759
3759
}
3760
3760
3761
+ void Assembler::vpermb (XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
3762
+ assert (VM_Version::supports_avx512_vbmi (), " " );
3763
+ InstructionAttr attributes (vector_len, /* rex_w */ false , /* legacy_mode */ false , /* no_mask_reg */ true , /* uses_vl */ true );
3764
+ attributes.set_is_evex_instruction ();
3765
+ vex_prefix (src, nds->encoding (), dst->encoding (), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3766
+ emit_int8 ((unsigned char )0x8D );
3767
+ emit_operand (dst, src);
3768
+ }
3769
+
3761
3770
void Assembler::vpermw (XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3762
3771
assert (vector_len == AVX_128bit ? VM_Version::supports_avx512vlbw () :
3763
3772
vector_len == AVX_256bit ? VM_Version::supports_avx512vlbw () :
@@ -3838,6 +3847,14 @@ void Assembler::evpermt2b(XMMRegister dst, XMMRegister nds, XMMRegister src, int
3838
3847
emit_int16 (0x7D , (0xC0 | encode));
3839
3848
}
3840
3849
3850
+ void Assembler::evpmultishiftqb (XMMRegister dst, XMMRegister ctl, XMMRegister src, int vector_len) {
3851
+ assert (VM_Version::supports_avx512_vbmi (), " " );
3852
+ InstructionAttr attributes (vector_len, /* vex_w */ true , /* legacy_mode */ false , /* no_mask_reg */ true , /* uses_vl */ true );
3853
+ attributes.set_is_evex_instruction ();
3854
+ int encode = vex_prefix_and_encode (dst->encoding (), ctl->encoding (), src->encoding (), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3855
+ emit_int16 ((unsigned char )0x83 , (unsigned char )(0xC0 | encode));
3856
+ }
3857
+
3841
3858
void Assembler::pause () {
3842
3859
emit_int16 ((unsigned char )0xF3 , (unsigned char )0x90 );
3843
3860
}
@@ -4136,6 +4153,15 @@ void Assembler::vpmovmskb(Register dst, XMMRegister src, int vec_enc) {
4136
4153
emit_int16 ((unsigned char )0xD7 , (0xC0 | encode));
4137
4154
}
4138
4155
4156
+ void Assembler::vpmaskmovd (XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4157
+ assert ((VM_Version::supports_avx2 () && vector_len == AVX_256bit), " " );
4158
+ InstructionMark im (this );
4159
+ InstructionAttr attributes (vector_len, /* vex_w */ false , /* legacy_mode */ true , /* no_mask_reg */ false , /* uses_vl */ true );
4160
+ vex_prefix (src, nds->encoding (), dst->encoding (), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4161
+ emit_int8 ((unsigned char )0x8C );
4162
+ emit_operand (dst, src);
4163
+ }
4164
+
4139
4165
void Assembler::pextrd (Register dst, XMMRegister src, int imm8) {
4140
4166
assert (VM_Version::supports_sse4_1 (), " " );
4141
4167
InstructionAttr attributes (AVX_128bit, /* rex_w */ false , /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ true , /* uses_vl */ false );
@@ -6565,6 +6591,13 @@ void Assembler::psubq(XMMRegister dst, XMMRegister src) {
6565
6591
emit_int8 ((0xC0 | encode));
6566
6592
}
6567
6593
6594
+ void Assembler::vpsubusb (XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
6595
+ assert (UseAVX > 0 , " requires some form of AVX" );
6596
+ InstructionAttr attributes (vector_len, /* vex_w */ false , /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true , /* uses_vl */ true );
6597
+ int encode = vex_prefix_and_encode (dst->encoding (), nds->encoding (), src->encoding (), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6598
+ emit_int16 ((unsigned char )0xD8 , (0xC0 | encode));
6599
+ }
6600
+
6568
6601
void Assembler::vpsubb (XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
6569
6602
assert (UseAVX > 0 , " requires some form of AVX" );
6570
6603
InstructionAttr attributes (vector_len, /* vex_w */ false , /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true , /* uses_vl */ true );
@@ -6656,6 +6689,15 @@ void Assembler::pmuludq(XMMRegister dst, XMMRegister src) {
6656
6689
emit_int16 ((unsigned char )0xF4 , (0xC0 | encode));
6657
6690
}
6658
6691
6692
+ void Assembler::vpmulhuw (XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
6693
+ assert ((vector_len == AVX_128bit && VM_Version::supports_avx ()) ||
6694
+ (vector_len == AVX_256bit && VM_Version::supports_avx2 ()) ||
6695
+ (vector_len == AVX_512bit && VM_Version::supports_avx512bw ()), " " );
6696
+ InstructionAttr attributes (vector_len, /* vex_w */ false , /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true , /* uses_vl */ true );
6697
+ int encode = vex_prefix_and_encode (dst->encoding (), nds->encoding (), src->encoding (), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6698
+ emit_int16 ((unsigned char )0xE4 , (0xC0 | encode));
6699
+ }
6700
+
6659
6701
void Assembler::vpmullw (XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
6660
6702
assert (UseAVX > 0 , " requires some form of AVX" );
6661
6703
InstructionAttr attributes (vector_len, /* vex_w */ false , /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true , /* uses_vl */ true );
0 commit comments