4161 assert(VM_Version::supports_evex(), "requires EVEX support");
4162 assert(vector_len == Assembler::AVX_256bit || vector_len == Assembler::AVX_512bit, "");
4163 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
4164 attributes.set_is_evex_instruction();
4165 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
4166 emit_int8(0x43);
4167 emit_int8((unsigned char)(0xC0 | encode));
4168 emit_int8(imm8 & 0xFF);
4169 }
4170
4171 void Assembler::psrldq(XMMRegister dst, int shift) {
4172 // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
4173 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4174 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4175 int encode = simd_prefix_and_encode(xmm3, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4176 emit_int8(0x73);
4177 emit_int8((unsigned char)(0xC0 | encode));
4178 emit_int8(shift);
4179 }
4180
4181 void Assembler::pslldq(XMMRegister dst, int shift) {
4182 // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
4183 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4184 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4185 // XMM7 is for /7 encoding: 66 0F 73 /7 ib
4186 int encode = simd_prefix_and_encode(xmm7, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4187 emit_int8(0x73);
4188 emit_int8((unsigned char)(0xC0 | encode));
4189 emit_int8(shift);
4190 }
4191
4192 void Assembler::ptest(XMMRegister dst, Address src) {
4193 assert(VM_Version::supports_sse4_1(), "");
4194 assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
4195 InstructionMark im(this);
4196 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
4197 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4198 emit_int8(0x17);
4199 emit_operand(dst, src);
4200 }
4201
4202 void Assembler::ptest(XMMRegister dst, XMMRegister src) {
4203 assert(VM_Version::supports_sse4_1(), "");
4204 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
4205 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4206 emit_int8(0x17);
4207 emit_int8((unsigned char)(0xC0 | encode));
4208 }
4209
4210 void Assembler::vptest(XMMRegister dst, Address src) {
4211 assert(VM_Version::supports_avx(), "");
4212 InstructionMark im(this);
4213 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
4214 assert(dst != xnoreg, "sanity");
4215 // swap src<->dst for encoding
4216 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4217 emit_int8(0x17);
4218 emit_operand(dst, src);
4219 }
4220
4221 void Assembler::vptest(XMMRegister dst, XMMRegister src) {
4222 assert(VM_Version::supports_avx(), "");
4223 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
|
4161 assert(VM_Version::supports_evex(), "requires EVEX support");
4162 assert(vector_len == Assembler::AVX_256bit || vector_len == Assembler::AVX_512bit, "");
4163 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
4164 attributes.set_is_evex_instruction();
4165 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
4166 emit_int8(0x43);
4167 emit_int8((unsigned char)(0xC0 | encode));
4168 emit_int8(imm8 & 0xFF);
4169 }
4170
4171 void Assembler::psrldq(XMMRegister dst, int shift) {
4172 // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
4173 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4174 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4175 int encode = simd_prefix_and_encode(xmm3, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4176 emit_int8(0x73);
4177 emit_int8((unsigned char)(0xC0 | encode));
4178 emit_int8(shift);
4179 }
4180
4181 void Assembler::vpsrldq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
4182 assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4183 vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4184 vector_len == AVX_512bit ? VM_Version::supports_avx512bw() : 0, "");
4185 InstructionAttr attributes(vector_len, /*vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4186 int encode = vex_prefix_and_encode(xmm3->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4187 emit_int8(0x73);
4188 emit_int8((unsigned char)(0xC0 | encode));
4189 emit_int8(shift & 0xFF);
4190 }
4191
4192 void Assembler::pslldq(XMMRegister dst, int shift) {
4193 // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
4194 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4195 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4196 // XMM7 is for /7 encoding: 66 0F 73 /7 ib
4197 int encode = simd_prefix_and_encode(xmm7, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4198 emit_int8(0x73);
4199 emit_int8((unsigned char)(0xC0 | encode));
4200 emit_int8(shift);
4201 }
4202
4203 void Assembler::vpslldq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
4204 assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4205 vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4206 vector_len == AVX_512bit ? VM_Version::supports_avx512bw() : 0, "");
4207 InstructionAttr attributes(vector_len, /*vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4208 int encode = vex_prefix_and_encode(xmm7->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4209 emit_int8(0x73);
4210 emit_int8((unsigned char)(0xC0 | encode));
4211 emit_int8(shift & 0xFF);
4212 }
4213
4214 void Assembler::ptest(XMMRegister dst, Address src) {
4215 assert(VM_Version::supports_sse4_1(), "");
4216 assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
4217 InstructionMark im(this);
4218 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
4219 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4220 emit_int8(0x17);
4221 emit_operand(dst, src);
4222 }
4223
4224 void Assembler::ptest(XMMRegister dst, XMMRegister src) {
4225 assert(VM_Version::supports_sse4_1() || VM_Version::supports_avx(), "");
4226 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
4227 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4228 emit_int8(0x17);
4229 emit_int8((unsigned char)(0xC0 | encode));
4230 }
4231
4232 void Assembler::vptest(XMMRegister dst, Address src) {
4233 assert(VM_Version::supports_avx(), "");
4234 InstructionMark im(this);
4235 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
4236 assert(dst != xnoreg, "sanity");
4237 // swap src<->dst for encoding
4238 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4239 emit_int8(0x17);
4240 emit_operand(dst, src);
4241 }
4242
4243 void Assembler::vptest(XMMRegister dst, XMMRegister src) {
4244 assert(VM_Version::supports_avx(), "");
4245 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
|