5186 void Assembler::vsqrtpd(XMMRegister dst, XMMRegister src, int vector_len) {
5187 assert(VM_Version::supports_avx(), "");
5188 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5189 attributes.set_rex_vex_w_reverted();
5190 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5191 emit_int8(0x51);
5192 emit_int8((unsigned char)(0xC0 | encode));
5193 }
5194
5195 void Assembler::vsqrtpd(XMMRegister dst, Address src, int vector_len) {
5196 assert(VM_Version::supports_avx(), "");
5197 InstructionMark im(this);
5198 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5199 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5200 attributes.set_rex_vex_w_reverted();
5201 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5202 emit_int8(0x51);
5203 emit_operand(dst, src);
5204 }
5205
5206 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
5207 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5208 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5209 attributes.set_rex_vex_w_reverted();
5210 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5211 emit_int8(0x54);
5212 emit_int8((unsigned char)(0xC0 | encode));
5213 }
5214
5215 void Assembler::andps(XMMRegister dst, XMMRegister src) {
5216 NOT_LP64(assert(VM_Version::supports_sse(), ""));
5217 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5218 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
5219 emit_int8(0x54);
5220 emit_int8((unsigned char)(0xC0 | encode));
5221 }
5222
5223 void Assembler::andps(XMMRegister dst, Address src) {
5224 NOT_LP64(assert(VM_Version::supports_sse(), ""));
5225 InstructionMark im(this);
|
5186 void Assembler::vsqrtpd(XMMRegister dst, XMMRegister src, int vector_len) {
5187 assert(VM_Version::supports_avx(), "");
5188 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5189 attributes.set_rex_vex_w_reverted();
5190 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5191 emit_int8(0x51);
5192 emit_int8((unsigned char)(0xC0 | encode));
5193 }
5194
5195 void Assembler::vsqrtpd(XMMRegister dst, Address src, int vector_len) {
5196 assert(VM_Version::supports_avx(), "");
5197 InstructionMark im(this);
5198 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5199 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5200 attributes.set_rex_vex_w_reverted();
5201 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5202 emit_int8(0x51);
5203 emit_operand(dst, src);
5204 }
5205
5206 void Assembler::vsqrtps(XMMRegister dst, XMMRegister src, int vector_len) {
5207 assert(VM_Version::supports_avx(), "");
5208 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5209 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
5210 emit_int8(0x51);
5211 emit_int8((unsigned char)(0xC0 | encode));
5212 }
5213
5214 void Assembler::vsqrtps(XMMRegister dst, Address src, int vector_len) {
5215 assert(VM_Version::supports_avx(), "");
5216 InstructionMark im(this);
5217 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5218 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5219 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
5220 emit_int8(0x51);
5221 emit_operand(dst, src);
5222 }
5223
5224 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
5225 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5226 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5227 attributes.set_rex_vex_w_reverted();
5228 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5229 emit_int8(0x54);
5230 emit_int8((unsigned char)(0xC0 | encode));
5231 }
5232
5233 void Assembler::andps(XMMRegister dst, XMMRegister src) {
5234 NOT_LP64(assert(VM_Version::supports_sse(), ""));
5235 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5236 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
5237 emit_int8(0x54);
5238 emit_int8((unsigned char)(0xC0 | encode));
5239 }
5240
5241 void Assembler::andps(XMMRegister dst, Address src) {
5242 NOT_LP64(assert(VM_Version::supports_sse(), ""));
5243 InstructionMark im(this);
|