3130 assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3131 InstructionMark im(this);
3132 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3133 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3134 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3135 emit_int8(0x67);
3136 emit_operand(dst, src);
3137 }
3138
3139 void Assembler::packuswb(XMMRegister dst, XMMRegister src) {
3140 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3141 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3142 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3143 emit_int8(0x67);
3144 emit_int8((unsigned char)(0xC0 | encode));
3145 }
3146
3147 void Assembler::vpackuswb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3148 assert(UseAVX > 0, "some form of AVX must be enabled");
3149 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3150 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3151 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3152 emit_int8(0x67);
3153 emit_int8((unsigned char)(0xC0 | encode));
3154 }
3155
3156 void Assembler::vpermq(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3157 assert(VM_Version::supports_avx2(), "");
3158 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3159 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3160 emit_int8(0x00);
3161 emit_int8(0xC0 | encode);
3162 emit_int8(imm8);
3163 }
3164
3165 void Assembler::pause() {
3166 emit_int8((unsigned char)0xF3);
3167 emit_int8((unsigned char)0x90);
3168 }
3169
3170 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3171 assert(VM_Version::supports_sse4_2(), "");
3172 InstructionMark im(this);
3173 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3174 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3175 emit_int8(0x61);
3176 emit_operand(dst, src);
3177 emit_int8(imm8);
3178 }
3179
3182 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3183 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3184 emit_int8(0x61);
3185 emit_int8((unsigned char)(0xC0 | encode));
3186 emit_int8(imm8);
3187 }
3188
3189 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3190 void Assembler::pcmpeqb(XMMRegister dst, XMMRegister src) {
3191 assert(VM_Version::supports_sse2(), "");
3192 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3193 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3194 emit_int8(0x74);
3195 emit_int8((unsigned char)(0xC0 | encode));
3196 }
3197
3198 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3199 void Assembler::vpcmpeqb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3200 assert(VM_Version::supports_avx(), "");
3201 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3202 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3203 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3204 emit_int8(0x74);
3205 emit_int8((unsigned char)(0xC0 | encode));
3206 }
3207
3208 // In this context, kdst is written the mask used to process the equal components
3209 void Assembler::evpcmpeqb(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3210 assert(VM_Version::supports_avx512bw(), "");
3211 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3212 attributes.set_is_evex_instruction();
3213 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3214 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3215 emit_int8(0x74);
3216 emit_int8((unsigned char)(0xC0 | encode));
3217 }
3218
3219 void Assembler::evpcmpeqb(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3220 assert(VM_Version::supports_avx512bw(), "");
3221 InstructionMark im(this);
3222 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3223 attributes.set_is_evex_instruction();
3224 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3225 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3226 int dst_enc = kdst->encoding();
3227 vex_prefix(src, nds_enc, dst_enc, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3228 emit_int8(0x74);
3229 emit_operand(as_Register(dst_enc), src);
3230 }
3231
3232 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3233 void Assembler::pcmpeqw(XMMRegister dst, XMMRegister src) {
3234 assert(VM_Version::supports_sse2(), "");
3235 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3236 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3237 emit_int8(0x75);
3238 emit_int8((unsigned char)(0xC0 | encode));
3239 }
3240
3241 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3242 void Assembler::vpcmpeqw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3243 assert(VM_Version::supports_avx(), "");
3244 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3245 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3246 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3247 emit_int8(0x75);
3248 emit_int8((unsigned char)(0xC0 | encode));
3249 }
3250
3251 // In this context, kdst is written the mask used to process the equal components
3252 void Assembler::evpcmpeqw(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3253 assert(VM_Version::supports_avx512bw(), "");
3254 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3255 attributes.set_is_evex_instruction();
3256 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3257 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3258 emit_int8(0x75);
3259 emit_int8((unsigned char)(0xC0 | encode));
3260 }
3261
3262 void Assembler::evpcmpeqw(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3263 assert(VM_Version::supports_avx512bw(), "");
3264 InstructionMark im(this);
3265 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3266 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3267 attributes.set_is_evex_instruction();
3268 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3269 int dst_enc = kdst->encoding();
3270 vex_prefix(src, nds_enc, dst_enc, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3271 emit_int8(0x75);
3272 emit_operand(as_Register(dst_enc), src);
3273 }
3274
3275 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3276 void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
3277 assert(VM_Version::supports_sse2(), "");
3278 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3279 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3280 emit_int8(0x76);
3281 emit_int8((unsigned char)(0xC0 | encode));
3282 }
3283
3284 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3285 void Assembler::vpcmpeqd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3286 assert(VM_Version::supports_avx(), "");
3287 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3288 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3289 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3290 emit_int8(0x76);
3291 emit_int8((unsigned char)(0xC0 | encode));
3292 }
3293
3294 // In this context, kdst is written the mask used to process the equal components
3295 void Assembler::evpcmpeqd(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3296 assert(VM_Version::supports_evex(), "");
3297 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3298 attributes.set_is_evex_instruction();
3299 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3300 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3301 emit_int8(0x76);
3302 emit_int8((unsigned char)(0xC0 | encode));
3303 }
3304
3305 void Assembler::evpcmpeqd(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3306 assert(VM_Version::supports_evex(), "");
3307 InstructionMark im(this);
3308 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3309 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3310 attributes.set_is_evex_instruction();
3311 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3312 int dst_enc = kdst->encoding();
3313 vex_prefix(src, nds_enc, dst_enc, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3314 emit_int8(0x76);
3315 emit_operand(as_Register(dst_enc), src);
3316 }
3317
3318 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3319 void Assembler::pcmpeqq(XMMRegister dst, XMMRegister src) {
3320 assert(VM_Version::supports_sse4_1(), "");
3321 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3322 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3323 emit_int8(0x29);
3324 emit_int8((unsigned char)(0xC0 | encode));
3325 }
3326
3327 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3328 void Assembler::vpcmpeqq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3329 assert(VM_Version::supports_avx(), "");
3330 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3331 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3332 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3333 emit_int8(0x29);
3334 emit_int8((unsigned char)(0xC0 | encode));
3335 }
3336
3337 // In this context, kdst is written the mask used to process the equal components
3338 void Assembler::evpcmpeqq(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3339 assert(VM_Version::supports_evex(), "");
3340 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3341 attributes.set_is_evex_instruction();
3342 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3343 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3344 emit_int8(0x29);
3345 emit_int8((unsigned char)(0xC0 | encode));
3346 }
3347
3348 // In this context, kdst is written the mask used to process the equal components
3349 void Assembler::evpcmpeqq(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3350 assert(VM_Version::supports_evex(), "");
3351 InstructionMark im(this);
3352 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3353 attributes.set_is_evex_instruction();
3354 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
3355 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3356 int dst_enc = kdst->encoding();
3357 vex_prefix(src, nds_enc, dst_enc, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3358 emit_int8(0x29);
3359 emit_operand(as_Register(dst_enc), src);
3360 }
3361
3362 void Assembler::pmovmskb(Register dst, XMMRegister src) {
3363 assert(VM_Version::supports_sse2(), "");
3364 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3365 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3366 emit_int8((unsigned char)0xD7);
3367 emit_int8((unsigned char)(0xC0 | encode));
3368 }
3369
3370 void Assembler::vpmovmskb(Register dst, XMMRegister src) {
3371 assert(VM_Version::supports_avx2(), "");
3372 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3373 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3374 emit_int8((unsigned char)0xD7);
3375 emit_int8((unsigned char)(0xC0 | encode));
3376 }
3377
3971 void Assembler::setb(Condition cc, Register dst) {
3972 assert(0 <= cc && cc < 16, "illegal cc");
3973 int encode = prefix_and_encode(dst->encoding(), true);
3974 emit_int8(0x0F);
3975 emit_int8((unsigned char)0x90 | cc);
3976 emit_int8((unsigned char)(0xC0 | encode));
3977 }
3978
3979 void Assembler::palignr(XMMRegister dst, XMMRegister src, int imm8) {
3980 assert(VM_Version::supports_ssse3(), "");
3981 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3982 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3983 emit_int8((unsigned char)0x0F);
3984 emit_int8((unsigned char)(0xC0 | encode));
3985 emit_int8(imm8);
3986 }
3987
3988 void Assembler::pblendw(XMMRegister dst, XMMRegister src, int imm8) {
3989 assert(VM_Version::supports_sse4_1(), "");
3990 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3991 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3992 emit_int8((unsigned char)0x0E);
3993 emit_int8((unsigned char)(0xC0 | encode));
3994 emit_int8(imm8);
3995 }
3996
3997 void Assembler::sha1rnds4(XMMRegister dst, XMMRegister src, int imm8) {
3998 assert(VM_Version::supports_sha(), "");
3999 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4000 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F_3A, &attributes);
4001 emit_int8((unsigned char)0xCC);
4002 emit_int8((unsigned char)(0xC0 | encode));
4003 emit_int8((unsigned char)imm8);
4004 }
4005
4006 void Assembler::sha1nexte(XMMRegister dst, XMMRegister src) {
4007 assert(VM_Version::supports_sha(), "");
4008 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4009 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F_38, &attributes);
4010 emit_int8((unsigned char)0xC8);
4011 emit_int8((unsigned char)(0xC0 | encode));
4378
4379 void Assembler::xorl(Register dst, Register src) {
4380 (void) prefix_and_encode(dst->encoding(), src->encoding());
4381 emit_arith(0x33, 0xC0, dst, src);
4382 }
4383
4384 void Assembler::xorb(Register dst, Address src) {
4385 InstructionMark im(this);
4386 prefix(src, dst);
4387 emit_int8(0x32);
4388 emit_operand(dst, src);
4389 }
4390
4391 // AVX 3-operands scalar float-point arithmetic instructions
4392
4393 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, Address src) {
4394 assert(VM_Version::supports_avx(), "");
4395 InstructionMark im(this);
4396 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4397 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4398 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4399 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4400 emit_int8(0x58);
4401 emit_operand(dst, src);
4402 }
4403
4404 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4405 assert(VM_Version::supports_avx(), "");
4406 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4407 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4408 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4409 emit_int8(0x58);
4410 emit_int8((unsigned char)(0xC0 | encode));
4411 }
4412
4413 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, Address src) {
4414 assert(VM_Version::supports_avx(), "");
4415 InstructionMark im(this);
4416 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4417 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4418 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4419 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4420 emit_int8(0x58);
4421 emit_operand(dst, src);
4422 }
4423
4424 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4425 assert(VM_Version::supports_avx(), "");
4426 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4427 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4428 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4429 emit_int8(0x58);
4430 emit_int8((unsigned char)(0xC0 | encode));
4431 }
4432
4433 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, Address src) {
4434 assert(VM_Version::supports_avx(), "");
4435 InstructionMark im(this);
4436 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4437 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4438 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4439 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4440 emit_int8(0x5E);
4441 emit_operand(dst, src);
4442 }
4443
4444 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4445 assert(VM_Version::supports_avx(), "");
4446 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4447 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4448 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4449 emit_int8(0x5E);
4450 emit_int8((unsigned char)(0xC0 | encode));
4451 }
4452
4453 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, Address src) {
4454 assert(VM_Version::supports_avx(), "");
4455 InstructionMark im(this);
4456 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4457 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4458 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4459 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4460 emit_int8(0x5E);
4461 emit_operand(dst, src);
4462 }
4463
4464 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4465 assert(VM_Version::supports_avx(), "");
4466 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4467 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4468 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4469 emit_int8(0x5E);
4470 emit_int8((unsigned char)(0xC0 | encode));
4471 }
4472
4473 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, Address src) {
4474 assert(VM_Version::supports_avx(), "");
4475 InstructionMark im(this);
4476 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4477 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4478 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4479 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4480 emit_int8(0x59);
4481 emit_operand(dst, src);
4482 }
4483
4484 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4485 assert(VM_Version::supports_avx(), "");
4486 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4487 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4488 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4489 emit_int8(0x59);
4490 emit_int8((unsigned char)(0xC0 | encode));
4491 }
4492
4493 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, Address src) {
4494 assert(VM_Version::supports_avx(), "");
4495 InstructionMark im(this);
4496 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4497 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4498 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4499 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4500 emit_int8(0x59);
4501 emit_operand(dst, src);
4502 }
4503
4504 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4505 assert(VM_Version::supports_avx(), "");
4506 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4507 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4508 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4509 emit_int8(0x59);
4510 emit_int8((unsigned char)(0xC0 | encode));
4511 }
4512
4513 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, Address src) {
4514 assert(VM_Version::supports_avx(), "");
4515 InstructionMark im(this);
4516 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4517 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4518 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4519 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4520 emit_int8(0x5C);
4521 emit_operand(dst, src);
4522 }
4523
4524 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4525 assert(VM_Version::supports_avx(), "");
4526 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4527 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4528 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4529 emit_int8(0x5C);
4530 emit_int8((unsigned char)(0xC0 | encode));
4531 }
4532
4533 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, Address src) {
4534 assert(VM_Version::supports_avx(), "");
4535 InstructionMark im(this);
4536 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4537 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4538 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4539 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4540 emit_int8(0x5C);
4541 emit_operand(dst, src);
4542 }
4543
4544 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4545 assert(VM_Version::supports_avx(), "");
4546 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4547 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4548 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4549 emit_int8(0x5C);
4550 emit_int8((unsigned char)(0xC0 | encode));
4551 }
4552
4553 //====================VECTOR ARITHMETIC=====================================
4554
4555 // Float-point vector arithmetic
4556
4557 void Assembler::addpd(XMMRegister dst, XMMRegister src) {
4558 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4559 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4560 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4561 emit_int8(0x58);
4562 emit_int8((unsigned char)(0xC0 | encode));
4563 }
4564
4565 void Assembler::addpd(XMMRegister dst, Address src) {
4566 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4567 InstructionMark im(this);
4568 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4569 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4570 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4571 emit_int8(0x58);
4572 emit_operand(dst, src);
4573 }
4574
4575
4576 void Assembler::addps(XMMRegister dst, XMMRegister src) {
4577 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4578 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4579 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4580 emit_int8(0x58);
4581 emit_int8((unsigned char)(0xC0 | encode));
4582 }
4583
4584 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4585 assert(VM_Version::supports_avx(), "");
4586 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4587 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4588 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4589 emit_int8(0x58);
4590 emit_int8((unsigned char)(0xC0 | encode));
4591 }
4592
4593 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4594 assert(VM_Version::supports_avx(), "");
4595 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4596 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4597 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4598 emit_int8(0x58);
4599 emit_int8((unsigned char)(0xC0 | encode));
4600 }
4601
4602 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4603 assert(VM_Version::supports_avx(), "");
4604 InstructionMark im(this);
4605 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4606 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4607 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4608 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4609 emit_int8(0x58);
4610 emit_operand(dst, src);
4611 }
4612
4613 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4614 assert(VM_Version::supports_avx(), "");
4615 InstructionMark im(this);
4616 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4617 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4618 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4619 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4620 emit_int8(0x58);
4621 emit_operand(dst, src);
4622 }
4623
4624 void Assembler::subpd(XMMRegister dst, XMMRegister src) {
4625 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4626 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4627 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4628 emit_int8(0x5C);
4629 emit_int8((unsigned char)(0xC0 | encode));
4630 }
4631
4632 void Assembler::subps(XMMRegister dst, XMMRegister src) {
4633 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4634 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4635 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4636 emit_int8(0x5C);
4637 emit_int8((unsigned char)(0xC0 | encode));
4638 }
4639
4640 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4641 assert(VM_Version::supports_avx(), "");
4642 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4643 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4644 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4645 emit_int8(0x5C);
4646 emit_int8((unsigned char)(0xC0 | encode));
4647 }
4648
4649 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4650 assert(VM_Version::supports_avx(), "");
4651 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4652 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4653 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4654 emit_int8(0x5C);
4655 emit_int8((unsigned char)(0xC0 | encode));
4656 }
4657
4658 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4659 assert(VM_Version::supports_avx(), "");
4660 InstructionMark im(this);
4661 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4662 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4663 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4664 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4665 emit_int8(0x5C);
4666 emit_operand(dst, src);
4667 }
4668
4669 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4670 assert(VM_Version::supports_avx(), "");
4671 InstructionMark im(this);
4672 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4673 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4674 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4675 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4676 emit_int8(0x5C);
4677 emit_operand(dst, src);
4678 }
4679
4680 void Assembler::mulpd(XMMRegister dst, XMMRegister src) {
4681 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4682 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4683 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4684 emit_int8(0x59);
4685 emit_int8((unsigned char)(0xC0 | encode));
4686 }
4687
4688 void Assembler::mulpd(XMMRegister dst, Address src) {
4689 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4690 InstructionMark im(this);
4691 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4692 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4693 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4694 emit_int8(0x59);
4695 emit_operand(dst, src);
4696 }
4697
4698 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
4699 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4700 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4701 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4702 emit_int8(0x59);
4703 emit_int8((unsigned char)(0xC0 | encode));
4704 }
4705
4706 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4707 assert(VM_Version::supports_avx(), "");
4708 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4709 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4710 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4711 emit_int8(0x59);
4712 emit_int8((unsigned char)(0xC0 | encode));
4713 }
4714
4715 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4716 assert(VM_Version::supports_avx(), "");
4717 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4718 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4719 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4720 emit_int8(0x59);
4721 emit_int8((unsigned char)(0xC0 | encode));
4722 }
4723
4724 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4725 assert(VM_Version::supports_avx(), "");
4726 InstructionMark im(this);
4727 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4728 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4729 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4730 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4731 emit_int8(0x59);
4732 emit_operand(dst, src);
4733 }
4734
4735 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4736 assert(VM_Version::supports_avx(), "");
4737 InstructionMark im(this);
4738 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4739 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4740 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4741 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4742 emit_int8(0x59);
4743 emit_operand(dst, src);
4744 }
4745
4746 void Assembler::divpd(XMMRegister dst, XMMRegister src) {
4747 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4748 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4749 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4750 emit_int8(0x5E);
4751 emit_int8((unsigned char)(0xC0 | encode));
4752 }
4753
4754 void Assembler::divps(XMMRegister dst, XMMRegister src) {
4755 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4756 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4757 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4758 emit_int8(0x5E);
4759 emit_int8((unsigned char)(0xC0 | encode));
4760 }
4761
4762 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4763 assert(VM_Version::supports_avx(), "");
4764 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4765 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4766 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4767 emit_int8(0x5E);
4768 emit_int8((unsigned char)(0xC0 | encode));
4769 }
4770
4771 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4772 assert(VM_Version::supports_avx(), "");
4773 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4774 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4775 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4776 emit_int8(0x5E);
4777 emit_int8((unsigned char)(0xC0 | encode));
4778 }
4779
4780 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4781 assert(VM_Version::supports_avx(), "");
4782 InstructionMark im(this);
4783 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4784 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4785 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4786 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4787 emit_int8(0x5E);
4788 emit_operand(dst, src);
4789 }
4790
4791 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4792 assert(VM_Version::supports_avx(), "");
4793 InstructionMark im(this);
4794 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4795 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4796 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4797 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4798 emit_int8(0x5E);
4799 emit_operand(dst, src);
4800 }
4801
4802 void Assembler::vsqrtpd(XMMRegister dst, XMMRegister src, int vector_len) {
4803 assert(VM_Version::supports_avx(), "");
4804 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4805 int nds_enc = 0;
4806 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4807 emit_int8(0x51);
4808 emit_int8((unsigned char)(0xC0 | encode));
4809 }
4810
4811 void Assembler::vsqrtpd(XMMRegister dst, Address src, int vector_len) {
4812 assert(VM_Version::supports_avx(), "");
4813 InstructionMark im(this);
4814 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4815 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4816 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4817 emit_int8(0x51);
4818 emit_operand(dst, src);
4819 }
4820
4821 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
4822 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4823 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4824 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4825 emit_int8(0x54);
4826 emit_int8((unsigned char)(0xC0 | encode));
4840 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4841 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4842 simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4843 emit_int8(0x54);
4844 emit_operand(dst, src);
4845 }
4846
4847 void Assembler::andpd(XMMRegister dst, Address src) {
4848 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4849 InstructionMark im(this);
4850 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4851 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4852 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4853 emit_int8(0x54);
4854 emit_operand(dst, src);
4855 }
4856
4857 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4858 assert(VM_Version::supports_avx(), "");
4859 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4860 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4861 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4862 emit_int8(0x54);
4863 emit_int8((unsigned char)(0xC0 | encode));
4864 }
4865
4866 void Assembler::vandps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4867 assert(VM_Version::supports_avx(), "");
4868 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4869 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4870 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4871 emit_int8(0x54);
4872 emit_int8((unsigned char)(0xC0 | encode));
4873 }
4874
4875 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4876 assert(VM_Version::supports_avx(), "");
4877 InstructionMark im(this);
4878 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4879 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4880 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4881 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4882 emit_int8(0x54);
4883 emit_operand(dst, src);
4884 }
4885
4886 void Assembler::vandps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4887 assert(VM_Version::supports_avx(), "");
4888 InstructionMark im(this);
4889 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4890 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4891 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4892 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4893 emit_int8(0x54);
4894 emit_operand(dst, src);
4895 }
4896
4897 void Assembler::unpckhpd(XMMRegister dst, XMMRegister src) {
4898 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4899 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4900 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4901 emit_int8(0x15);
4902 emit_int8((unsigned char)(0xC0 | encode));
4903 }
4904
4905 void Assembler::unpcklpd(XMMRegister dst, XMMRegister src) {
4906 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4907 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4908 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4909 emit_int8(0x14);
4910 emit_int8((unsigned char)(0xC0 | encode));
4911 }
4912
4932 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4933 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4934 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4935 emit_int8(0x57);
4936 emit_operand(dst, src);
4937 }
4938
4939 void Assembler::xorps(XMMRegister dst, Address src) {
4940 NOT_LP64(assert(VM_Version::supports_sse(), ""));
4941 InstructionMark im(this);
4942 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4943 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4944 simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4945 emit_int8(0x57);
4946 emit_operand(dst, src);
4947 }
4948
4949 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4950 assert(VM_Version::supports_avx(), "");
4951 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4952 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4953 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4954 emit_int8(0x57);
4955 emit_int8((unsigned char)(0xC0 | encode));
4956 }
4957
4958 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4959 assert(VM_Version::supports_avx(), "");
4960 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4961 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4962 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4963 emit_int8(0x57);
4964 emit_int8((unsigned char)(0xC0 | encode));
4965 }
4966
4967 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4968 assert(VM_Version::supports_avx(), "");
4969 InstructionMark im(this);
4970 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4971 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4972 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4973 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4974 emit_int8(0x57);
4975 emit_operand(dst, src);
4976 }
4977
4978 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4979 assert(VM_Version::supports_avx(), "");
4980 InstructionMark im(this);
4981 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4982 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4983 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4984 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4985 emit_int8(0x57);
4986 emit_operand(dst, src);
4987 }
4988
4989 // Integer vector arithmetic
4990 void Assembler::vphaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4991 assert(VM_Version::supports_avx() && (vector_len == 0) ||
4992 VM_Version::supports_avx2(), "256 bit integer vectors requires AVX2");
4993 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4994 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
4995 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4996 emit_int8(0x01);
4997 emit_int8((unsigned char)(0xC0 | encode));
4998 }
4999
5000 void Assembler::vphaddd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5001 assert(VM_Version::supports_avx() && (vector_len == 0) ||
5002 VM_Version::supports_avx2(), "256 bit integer vectors requires AVX2");
5003 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
5004 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5005 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5006 emit_int8(0x02);
5007 emit_int8((unsigned char)(0xC0 | encode));
5008 }
5009
5010 void Assembler::paddb(XMMRegister dst, XMMRegister src) {
5011 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5012 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5013 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5014 emit_int8((unsigned char)0xFC);
5015 emit_int8((unsigned char)(0xC0 | encode));
5016 }
5017
5018 void Assembler::paddw(XMMRegister dst, XMMRegister src) {
5019 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5020 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5021 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5022 emit_int8((unsigned char)0xFD);
5023 emit_int8((unsigned char)(0xC0 | encode));
5024 }
5025
5026 void Assembler::paddd(XMMRegister dst, XMMRegister src) {
5027 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5028 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5029 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5030 emit_int8((unsigned char)0xFE);
5031 emit_int8((unsigned char)(0xC0 | encode));
5032 }
5033
5034 void Assembler::paddd(XMMRegister dst, Address src) {
5035 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5036 InstructionMark im(this);
5037 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5038 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5039 emit_int8((unsigned char)0xFE);
5040 emit_operand(dst, src);
5041 }
5042
5043 void Assembler::paddq(XMMRegister dst, XMMRegister src) {
5044 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5045 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5046 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5047 emit_int8((unsigned char)0xD4);
5048 emit_int8((unsigned char)(0xC0 | encode));
5049 }
5050
5051 void Assembler::phaddw(XMMRegister dst, XMMRegister src) {
5052 NOT_LP64(assert(VM_Version::supports_sse3(), ""));
5053 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
5054 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5055 emit_int8(0x01);
5056 emit_int8((unsigned char)(0xC0 | encode));
5057 }
5058
5059 void Assembler::phaddd(XMMRegister dst, XMMRegister src) {
5060 NOT_LP64(assert(VM_Version::supports_sse3(), ""));
5061 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
5062 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5063 emit_int8(0x02);
5064 emit_int8((unsigned char)(0xC0 | encode));
5065 }
5066
5067 void Assembler::vpaddb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5068 assert(UseAVX > 0, "requires some form of AVX");
5069 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5070 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5071 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5072 emit_int8((unsigned char)0xFC);
5073 emit_int8((unsigned char)(0xC0 | encode));
5074 }
5075
5076 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5077 assert(UseAVX > 0, "requires some form of AVX");
5078 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5079 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5080 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5081 emit_int8((unsigned char)0xFD);
5082 emit_int8((unsigned char)(0xC0 | encode));
5083 }
5084
5085 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5086 assert(UseAVX > 0, "requires some form of AVX");
5087 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5088 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5089 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5090 emit_int8((unsigned char)0xFE);
5091 emit_int8((unsigned char)(0xC0 | encode));
5092 }
5093
5094 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5095 assert(UseAVX > 0, "requires some form of AVX");
5096 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5097 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5098 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5099 emit_int8((unsigned char)0xD4);
5100 emit_int8((unsigned char)(0xC0 | encode));
5101 }
5102
5103 void Assembler::vpaddb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5104 assert(UseAVX > 0, "requires some form of AVX");
5105 InstructionMark im(this);
5106 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5107 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5108 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5109 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5110 emit_int8((unsigned char)0xFC);
5111 emit_operand(dst, src);
5112 }
5113
5114 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5115 assert(UseAVX > 0, "requires some form of AVX");
5116 InstructionMark im(this);
5117 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5118 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5119 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5120 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5121 emit_int8((unsigned char)0xFD);
5122 emit_operand(dst, src);
5123 }
5124
5125 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5126 assert(UseAVX > 0, "requires some form of AVX");
5127 InstructionMark im(this);
5128 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5129 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5130 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5131 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5132 emit_int8((unsigned char)0xFE);
5133 emit_operand(dst, src);
5134 }
5135
5136 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5137 assert(UseAVX > 0, "requires some form of AVX");
5138 InstructionMark im(this);
5139 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5140 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5141 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5142 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5143 emit_int8((unsigned char)0xD4);
5144 emit_operand(dst, src);
5145 }
5146
5147 void Assembler::psubb(XMMRegister dst, XMMRegister src) {
5148 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5149 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5150 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5151 emit_int8((unsigned char)0xF8);
5152 emit_int8((unsigned char)(0xC0 | encode));
5153 }
5154
5155 void Assembler::psubw(XMMRegister dst, XMMRegister src) {
5156 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5157 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5158 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5159 emit_int8((unsigned char)0xF9);
5160 emit_int8((unsigned char)(0xC0 | encode));
5161 }
5162
5163 void Assembler::psubd(XMMRegister dst, XMMRegister src) {
5164 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5165 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5166 emit_int8((unsigned char)0xFA);
5167 emit_int8((unsigned char)(0xC0 | encode));
5168 }
5169
5170 void Assembler::psubq(XMMRegister dst, XMMRegister src) {
5171 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5172 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5173 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5174 emit_int8((unsigned char)0xFB);
5175 emit_int8((unsigned char)(0xC0 | encode));
5176 }
5177
5178 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5179 assert(UseAVX > 0, "requires some form of AVX");
5180 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5181 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5182 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5183 emit_int8((unsigned char)0xF8);
5184 emit_int8((unsigned char)(0xC0 | encode));
5185 }
5186
5187 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5188 assert(UseAVX > 0, "requires some form of AVX");
5189 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5190 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5191 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5192 emit_int8((unsigned char)0xF9);
5193 emit_int8((unsigned char)(0xC0 | encode));
5194 }
5195
5196 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5197 assert(UseAVX > 0, "requires some form of AVX");
5198 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5199 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5200 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5201 emit_int8((unsigned char)0xFA);
5202 emit_int8((unsigned char)(0xC0 | encode));
5203 }
5204
5205 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5206 assert(UseAVX > 0, "requires some form of AVX");
5207 InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5208 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5209 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5210 emit_int8((unsigned char)0xFB);
5211 emit_int8((unsigned char)(0xC0 | encode));
5212 }
5213
5214 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5215 assert(UseAVX > 0, "requires some form of AVX");
5216 InstructionMark im(this);
5217 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5218 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5219 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5220 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5221 emit_int8((unsigned char)0xF8);
5222 emit_operand(dst, src);
5223 }
5224
5225 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5226 assert(UseAVX > 0, "requires some form of AVX");
5227 InstructionMark im(this);
5228 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5229 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5230 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5231 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5232 emit_int8((unsigned char)0xF9);
5233 emit_operand(dst, src);
5234 }
5235
5236 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5237 assert(UseAVX > 0, "requires some form of AVX");
5238 InstructionMark im(this);
5239 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5240 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5241 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5242 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5243 emit_int8((unsigned char)0xFA);
5244 emit_operand(dst, src);
5245 }
5246
5247 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5248 assert(UseAVX > 0, "requires some form of AVX");
5249 InstructionMark im(this);
5250 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5251 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5252 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5253 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5254 emit_int8((unsigned char)0xFB);
5255 emit_operand(dst, src);
5256 }
5257
5258 void Assembler::pmullw(XMMRegister dst, XMMRegister src) {
5259 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5260 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5261 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5262 emit_int8((unsigned char)0xD5);
5263 emit_int8((unsigned char)(0xC0 | encode));
5264 }
5265
5266 void Assembler::pmulld(XMMRegister dst, XMMRegister src) {
5267 assert(VM_Version::supports_sse4_1(), "");
5268 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5269 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5270 emit_int8(0x40);
5271 emit_int8((unsigned char)(0xC0 | encode));
5272 }
5273
5274 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5275 assert(UseAVX > 0, "requires some form of AVX");
5276 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5277 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5278 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5279 emit_int8((unsigned char)0xD5);
5280 emit_int8((unsigned char)(0xC0 | encode));
5281 }
5282
5283 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5284 assert(UseAVX > 0, "requires some form of AVX");
5285 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5286 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5287 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5288 emit_int8(0x40);
5289 emit_int8((unsigned char)(0xC0 | encode));
5290 }
5291
5292 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5293 assert(UseAVX > 2, "requires some form of AVX");
5294 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5295 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5296 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5297 emit_int8(0x40);
5298 emit_int8((unsigned char)(0xC0 | encode));
5299 }
5300
5301 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5302 assert(UseAVX > 0, "requires some form of AVX");
5303 InstructionMark im(this);
5304 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5305 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5306 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5307 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5308 emit_int8((unsigned char)0xD5);
5309 emit_operand(dst, src);
5310 }
5311
5312 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5313 assert(UseAVX > 0, "requires some form of AVX");
5314 InstructionMark im(this);
5315 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5316 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5317 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5318 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5319 emit_int8(0x40);
5320 emit_operand(dst, src);
5321 }
5322
5323 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5324 assert(UseAVX > 0, "requires some form of AVX");
5325 InstructionMark im(this);
5326 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5327 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5328 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5329 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5330 emit_int8(0x40);
5331 emit_operand(dst, src);
5332 }
5333
5334 // Shift packed integers left by specified number of bits.
5335 void Assembler::psllw(XMMRegister dst, int shift) {
5336 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5337 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5338 // XMM6 is for /6 encoding: 66 0F 71 /6 ib
5339 int encode = simd_prefix_and_encode(xmm6, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5340 emit_int8(0x71);
5341 emit_int8((unsigned char)(0xC0 | encode));
5342 emit_int8(shift & 0xFF);
5343 }
5344
5345 void Assembler::pslld(XMMRegister dst, int shift) {
5346 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5347 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5348 // XMM6 is for /6 encoding: 66 0F 72 /6 ib
5349 int encode = simd_prefix_and_encode(xmm6, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5621 assert(UseAVX > 0, "requires some form of AVX");
5622 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5623 int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5624 emit_int8((unsigned char)0xE2);
5625 emit_int8((unsigned char)(0xC0 | encode));
5626 }
5627
5628
5629 // logical operations packed integers
5630 void Assembler::pand(XMMRegister dst, XMMRegister src) {
5631 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5632 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5633 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5634 emit_int8((unsigned char)0xDB);
5635 emit_int8((unsigned char)(0xC0 | encode));
5636 }
5637
5638 void Assembler::vpand(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5639 assert(UseAVX > 0, "requires some form of AVX");
5640 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5641 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5642 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5643 emit_int8((unsigned char)0xDB);
5644 emit_int8((unsigned char)(0xC0 | encode));
5645 }
5646
5647 void Assembler::vpand(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5648 assert(UseAVX > 0, "requires some form of AVX");
5649 InstructionMark im(this);
5650 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5651 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5652 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5653 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5654 emit_int8((unsigned char)0xDB);
5655 emit_operand(dst, src);
5656 }
5657
5658 void Assembler::pandn(XMMRegister dst, XMMRegister src) {
5659 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5660 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5661 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5662 emit_int8((unsigned char)0xDF);
5663 emit_int8((unsigned char)(0xC0 | encode));
5664 }
5665
5666 void Assembler::por(XMMRegister dst, XMMRegister src) {
5667 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5668 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5669 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5670 emit_int8((unsigned char)0xEB);
5671 emit_int8((unsigned char)(0xC0 | encode));
5672 }
5673
5674 void Assembler::vpor(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5675 assert(UseAVX > 0, "requires some form of AVX");
5676 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5677 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5678 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5679 emit_int8((unsigned char)0xEB);
5680 emit_int8((unsigned char)(0xC0 | encode));
5681 }
5682
5683 void Assembler::vpor(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5684 assert(UseAVX > 0, "requires some form of AVX");
5685 InstructionMark im(this);
5686 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5687 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5688 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5689 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5690 emit_int8((unsigned char)0xEB);
5691 emit_operand(dst, src);
5692 }
5693
5694 void Assembler::pxor(XMMRegister dst, XMMRegister src) {
5695 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5696 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5697 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5698 emit_int8((unsigned char)0xEF);
5699 emit_int8((unsigned char)(0xC0 | encode));
5700 }
5701
5702 void Assembler::vpxor(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5703 assert(UseAVX > 0, "requires some form of AVX");
5704 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5705 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5706 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5707 emit_int8((unsigned char)0xEF);
5708 emit_int8((unsigned char)(0xC0 | encode));
5709 }
5710
5711 void Assembler::vpxor(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5712 assert(UseAVX > 0, "requires some form of AVX");
5713 InstructionMark im(this);
5714 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5715 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5716 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5717 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5718 emit_int8((unsigned char)0xEF);
5719 emit_operand(dst, src);
5720 }
5721
5722
5723 void Assembler::vinsertf128(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5724 assert(VM_Version::supports_avx(), "");
5725 assert(imm8 <= 0x01, "imm8: %u", imm8);
5726 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5727 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5728 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5729 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5730 emit_int8(0x18);
5731 emit_int8((unsigned char)(0xC0 | encode));
5732 // 0x00 - insert into lower 128 bits
5733 // 0x01 - insert into upper 128 bits
5734 emit_int8(imm8 & 0x01);
5735 }
5736
5737 void Assembler::vinsertf64x4(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5738 assert(VM_Version::supports_evex(), "");
5739 assert(imm8 <= 0x01, "imm8: %u", imm8);
5740 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5741 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5742 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5743 emit_int8(0x1A);
5744 emit_int8((unsigned char)(0xC0 | encode));
5745 // 0x00 - insert into lower 256 bits
5746 // 0x01 - insert into upper 256 bits
5747 emit_int8(imm8 & 0x01);
5748 }
5749
5750 void Assembler::vinsertf64x4(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5751 assert(VM_Version::supports_evex(), "");
5752 assert(dst != xnoreg, "sanity");
5753 assert(imm8 <= 0x01, "imm8: %u", imm8);
5754 InstructionMark im(this);
5755 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5756 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5757 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_64bit);
5758 // swap src<->dst for encoding
5759 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5760 emit_int8(0x1A);
5761 emit_operand(dst, src);
5762 // 0x00 - insert into lower 256 bits
5763 // 0x01 - insert into upper 256 bits
5764 emit_int8(imm8 & 0x01);
5765 }
5766
5767 void Assembler::vinsertf32x4(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5768 assert(VM_Version::supports_evex(), "");
5769 assert(imm8 <= 0x03, "imm8: %u", imm8);
5770 InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5771 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5772 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5773 emit_int8(0x18);
5774 emit_int8((unsigned char)(0xC0 | encode));
5775 // 0x00 - insert into q0 128 bits (0..127)
5776 // 0x01 - insert into q1 128 bits (128..255)
5777 // 0x02 - insert into q2 128 bits (256..383)
5778 // 0x03 - insert into q3 128 bits (384..511)
5779 emit_int8(imm8 & 0x03);
5780 }
5781
5782 void Assembler::vinsertf32x4(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5783 assert(VM_Version::supports_avx(), "");
5784 assert(dst != xnoreg, "sanity");
5785 assert(imm8 <= 0x03, "imm8: %u", imm8);
5786 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_256bit;
5787 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5788 InstructionMark im(this);
5789 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5790 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5791 // swap src<->dst for encoding
5792 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5793 emit_int8(0x18);
5794 emit_operand(dst, src);
5795 // 0x00 - insert into q0 128 bits (0..127)
5796 // 0x01 - insert into q1 128 bits (128..255)
5797 // 0x02 - insert into q2 128 bits (256..383)
5798 // 0x03 - insert into q3 128 bits (384..511)
5799 emit_int8(imm8 & 0x03);
5800 }
5801
5802 void Assembler::vinsertf128(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5803 assert(VM_Version::supports_avx(), "");
5804 assert(dst != xnoreg, "sanity");
5805 assert(imm8 <= 0x01, "imm8: %u", imm8);
5806 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5807 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5808 InstructionMark im(this);
5809 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5810 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5811 // swap src<->dst for encoding
5812 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5813 emit_int8(0x18);
5814 emit_operand(dst, src);
5815 // 0x00 - insert into lower 128 bits
5816 // 0x01 - insert into upper 128 bits
5817 emit_int8(imm8 & 0x01);
5818 }
5819
5820 void Assembler::vextractf128(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5821 assert(VM_Version::supports_avx(), "");
5822 assert(imm8 <= 0x01, "imm8: %u", imm8);
5823 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5824 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5825 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5826 emit_int8(0x19);
5827 emit_int8((unsigned char)(0xC0 | encode));
5828 // 0x00 - extract from lower 128 bits
5829 // 0x01 - extract from upper 128 bits
5830 emit_int8(imm8 & 0x01);
5831 }
5832
5833 void Assembler::vextractf128(Address dst, XMMRegister src, uint8_t imm8) {
5834 assert(VM_Version::supports_avx(), "");
5835 assert(src != xnoreg, "sanity");
5836 assert(imm8 <= 0x01, "imm8: %u", imm8);
5837 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5838 InstructionMark im(this);
5839 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5840 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5841 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5842 emit_int8(0x19);
5843 emit_operand(src, dst);
5844 // 0x00 - extract from lower 128 bits
5845 // 0x01 - extract from upper 128 bits
5846 emit_int8(imm8 & 0x01);
5847 }
5848
5849 void Assembler::vinserti128(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5850 assert(VM_Version::supports_avx2(), "");
5851 assert(imm8 <= 0x01, "imm8: %u", imm8);
5852 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5853 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5854 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5855 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5856 emit_int8(0x38);
5857 emit_int8((unsigned char)(0xC0 | encode));
5858 // 0x00 - insert into lower 128 bits
5859 // 0x01 - insert into upper 128 bits
5860 emit_int8(imm8 & 0x01);
5861 }
5862
5863 void Assembler::vinserti64x4(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5864 assert(VM_Version::supports_evex(), "");
5865 assert(imm8 <= 0x01, "imm8: %u", imm8);
5866 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5867 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5868 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5869 emit_int8(0x38);
5870 emit_int8((unsigned char)(0xC0 | encode));
5871 // 0x00 - insert into lower 256 bits
5872 // 0x01 - insert into upper 256 bits
5873 emit_int8(imm8 & 0x01);
5874 }
5875
5876 void Assembler::vinserti128(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5877 assert(VM_Version::supports_avx2(), "");
5878 assert(dst != xnoreg, "sanity");
5879 assert(imm8 <= 0x01, "imm8: %u", imm8);
5880 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5881 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
5882 InstructionMark im(this);
5883 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5884 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5885 // swap src<->dst for encoding
5886 vex_prefix(src, nds_enc, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5887 emit_int8(0x38);
5888 emit_operand(dst, src);
5889 // 0x00 - insert into lower 128 bits
5890 // 0x01 - insert into upper 128 bits
5891 emit_int8(imm8 & 0x01);
5892 }
5893
5894 void Assembler::vextracti128(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5895 assert(VM_Version::supports_avx(), "");
5896 assert(imm8 <= 0x01, "imm8: %u", imm8);
5897 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5898 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5899 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5900 emit_int8(0x39);
5901 emit_int8((unsigned char)(0xC0 | encode));
5902 // 0x00 - extract from lower 128 bits
5903 // 0x01 - extract from upper 128 bits
5904 emit_int8(imm8 & 0x01);
5905 }
5906
5907 void Assembler::vextracti128(Address dst, XMMRegister src, uint8_t imm8) {
5908 assert(VM_Version::supports_avx2(), "");
5909 assert(src != xnoreg, "sanity");
5910 assert(imm8 <= 0x01, "imm8: %u", imm8);
5911 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5912 InstructionMark im(this);
5913 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5914 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5915 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5916 emit_int8(0x39);
5917 emit_operand(src, dst);
5918 // 0x00 - extract from lower 128 bits
5919 // 0x01 - extract from upper 128 bits
5920 emit_int8(imm8 & 0x01);
5921 }
5922
5923 void Assembler::vextracti64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5924 assert(VM_Version::supports_evex(), "");
5925 assert(imm8 <= 0x01, "imm8: %u", imm8);
5926 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5927 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5928 emit_int8(0x3B);
5929 emit_int8((unsigned char)(0xC0 | encode));
5930 // 0x00 - extract from lower 256 bits
5931 // 0x01 - extract from upper 256 bits
5932 emit_int8(imm8 & 0x01);
5933 }
5934
5935 void Assembler::vextracti64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5936 assert(VM_Version::supports_evex(), "");
5937 assert(imm8 <= 0x03, "imm8: %u", imm8);
5938 InstructionAttr attributes(AVX_512bit, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5939 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5940 emit_int8(0x39);
5941 emit_int8((unsigned char)(0xC0 | encode));
5942 // 0x00 - extract from bits 127:0
5943 // 0x01 - extract from bits 255:128
5944 // 0x02 - extract from bits 383:256
5945 // 0x03 - extract from bits 511:384
5946 emit_int8(imm8 & 0x03);
5947 }
5948
5949 void Assembler::vextractf64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5950 assert(VM_Version::supports_evex(), "");
5951 assert(imm8 <= 0x01, "imm8: %u", imm8);
5952 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5953 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5954 emit_int8(0x1B);
5955 emit_int8((unsigned char)(0xC0 | encode));
5956 // 0x00 - extract from lower 256 bits
5957 // 0x01 - extract from upper 256 bits
5958 emit_int8(imm8 & 0x01);
5959 }
5960
5961 void Assembler::vextractf64x4(Address dst, XMMRegister src, uint8_t imm8) {
5962 assert(VM_Version::supports_evex(), "");
5963 assert(src != xnoreg, "sanity");
5964 assert(imm8 <= 0x01, "imm8: %u", imm8);
5965 InstructionMark im(this);
5966 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5967 attributes.set_address_attributes(/* tuple_type */ EVEX_T4,/* input_size_in_bits */ EVEX_64bit);
5968 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5969 emit_int8(0x1B);
5970 emit_operand(src, dst);
5971 // 0x00 - extract from lower 256 bits
5972 // 0x01 - extract from upper 256 bits
5973 emit_int8(imm8 & 0x01);
5974 }
5975
5976 void Assembler::vextractf32x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5977 assert(VM_Version::supports_avx(), "");
5978 assert(imm8 <= 0x03, "imm8: %u", imm8);
5979 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_256bit;
5980 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5981 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5982 emit_int8(0x19);
5983 emit_int8((unsigned char)(0xC0 | encode));
5984 // 0x00 - extract from bits 127:0
5985 // 0x01 - extract from bits 255:128
5986 // 0x02 - extract from bits 383:256
5987 // 0x03 - extract from bits 511:384
5988 emit_int8(imm8 & 0x03);
5989 }
5990
5991 void Assembler::vextractf32x4(Address dst, XMMRegister src, uint8_t imm8) {
5992 assert(VM_Version::supports_evex(), "");
6002 // 0x01 - extract from bits 255:128
6003 // 0x02 - extract from bits 383:256
6004 // 0x03 - extract from bits 511:384
6005 emit_int8(imm8 & 0x03);
6006 }
6007
6008 void Assembler::vextractf64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
6009 assert(VM_Version::supports_evex(), "");
6010 assert(imm8 <= 0x03, "imm8: %u", imm8);
6011 InstructionAttr attributes(AVX_512bit, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
6012 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6013 emit_int8(0x19);
6014 emit_int8((unsigned char)(0xC0 | encode));
6015 // 0x00 - extract from bits 127:0
6016 // 0x01 - extract from bits 255:128
6017 // 0x02 - extract from bits 383:256
6018 // 0x03 - extract from bits 511:384
6019 emit_int8(imm8 & 0x03);
6020 }
6021
6022 // duplicate 4-bytes integer data from src into 8 locations in dest
6023 void Assembler::vpbroadcastd(XMMRegister dst, XMMRegister src) {
6024 assert(VM_Version::supports_avx2(), "");
6025 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6026 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6027 emit_int8(0x58);
6028 emit_int8((unsigned char)(0xC0 | encode));
6029 }
6030
6031 // duplicate 2-bytes integer data from src into 16 locations in dest
6032 void Assembler::vpbroadcastw(XMMRegister dst, XMMRegister src) {
6033 assert(VM_Version::supports_avx2(), "");
6034 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6035 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6036 emit_int8(0x79);
6037 emit_int8((unsigned char)(0xC0 | encode));
6038 }
6039
6040 // duplicate 1-byte integer data from src into 16||32|64 locations in dest : requires AVX512BW and AVX512VL
6041 void Assembler::evpbroadcastb(XMMRegister dst, XMMRegister src, int vector_len) {
6042 assert(VM_Version::supports_evex(), "");
6043 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6044 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6045 emit_int8(0x78);
6046 emit_int8((unsigned char)(0xC0 | encode));
6047 }
6048
6049 void Assembler::evpbroadcastb(XMMRegister dst, Address src, int vector_len) {
6050 assert(VM_Version::supports_evex(), "");
6051 assert(dst != xnoreg, "sanity");
6052 InstructionMark im(this);
6053 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6054 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_8bit);
6055 // swap src<->dst for encoding
6056 vex_prefix(src, dst->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6057 emit_int8(0x78);
6058 emit_operand(dst, src);
6059 }
6060
6061 // duplicate 2-byte integer data from src into 8|16||32 locations in dest : requires AVX512BW and AVX512VL
6062 void Assembler::evpbroadcastw(XMMRegister dst, XMMRegister src, int vector_len) {
6063 assert(VM_Version::supports_evex(), "");
6064 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6065 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6066 emit_int8(0x79);
6067 emit_int8((unsigned char)(0xC0 | encode));
6068 }
6069
6070 void Assembler::evpbroadcastw(XMMRegister dst, Address src, int vector_len) {
6071 assert(VM_Version::supports_evex(), "");
6072 assert(dst != xnoreg, "sanity");
6073 InstructionMark im(this);
6074 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6075 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_16bit);
6076 // swap src<->dst for encoding
6077 vex_prefix(src, dst->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6078 emit_int8(0x79);
6079 emit_operand(dst, src);
6080 }
6081
6082 // duplicate 4-byte integer data from src into 4|8|16 locations in dest : requires AVX512VL
6083 void Assembler::evpbroadcastd(XMMRegister dst, XMMRegister src, int vector_len) {
6084 assert(VM_Version::supports_evex(), "");
6085 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6086 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6087 emit_int8(0x58);
6088 emit_int8((unsigned char)(0xC0 | encode));
6089 }
6090
6091 void Assembler::evpbroadcastd(XMMRegister dst, Address src, int vector_len) {
6092 assert(VM_Version::supports_evex(), "");
6093 assert(dst != xnoreg, "sanity");
6094 InstructionMark im(this);
6095 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6096 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6097 // swap src<->dst for encoding
6098 vex_prefix(src, dst->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6099 emit_int8(0x58);
6100 emit_operand(dst, src);
6101 }
6102
6103 // duplicate 8-byte integer data from src into 4|8|16 locations in dest : requires AVX512VL
6104 void Assembler::evpbroadcastq(XMMRegister dst, XMMRegister src, int vector_len) {
6105 assert(VM_Version::supports_evex(), "");
6106 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6107 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6108 emit_int8(0x59);
6109 emit_int8((unsigned char)(0xC0 | encode));
6110 }
6111
6112 void Assembler::evpbroadcastq(XMMRegister dst, Address src, int vector_len) {
6113 assert(VM_Version::supports_evex(), "");
6114 assert(dst != xnoreg, "sanity");
6115 InstructionMark im(this);
6116 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6117 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
6118 // swap src<->dst for encoding
6119 vex_prefix(src, dst->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6120 emit_int8(0x59);
6121 emit_operand(dst, src);
6122 }
6123
6124 // duplicate single precision fp from src into 4|8|16 locations in dest : requires AVX512VL
6125 void Assembler::evpbroadcastss(XMMRegister dst, XMMRegister src, int vector_len) {
6126 assert(VM_Version::supports_evex(), "");
6127 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6128 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6129 emit_int8(0x18);
6130 emit_int8((unsigned char)(0xC0 | encode));
6131 }
6132
6133 void Assembler::evpbroadcastss(XMMRegister dst, Address src, int vector_len) {
6134 assert(VM_Version::supports_evex(), "");
6135 assert(dst != xnoreg, "sanity");
6136 InstructionMark im(this);
6137 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6138 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6139 // swap src<->dst for encoding
6140 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6141 emit_int8(0x18);
6142 emit_operand(dst, src);
6143 }
6144
6145 // duplicate double precision fp from src into 2|4|8 locations in dest : requires AVX512VL
6146 void Assembler::evpbroadcastsd(XMMRegister dst, XMMRegister src, int vector_len) {
6147 assert(VM_Version::supports_evex(), "");
6148 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6149 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6150 emit_int8(0x19);
6151 emit_int8((unsigned char)(0xC0 | encode));
6152 }
6153
6154 void Assembler::evpbroadcastsd(XMMRegister dst, Address src, int vector_len) {
6155 assert(VM_Version::supports_evex(), "");
6156 assert(dst != xnoreg, "sanity");
6157 InstructionMark im(this);
6158 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6159 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
6160 // swap src<->dst for encoding
6161 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6162 emit_int8(0x19);
6163 emit_operand(dst, src);
6164 }
6165
6166 // duplicate 1-byte integer data from src into 16||32|64 locations in dest : requires AVX512BW and AVX512VL
6167 void Assembler::evpbroadcastb(XMMRegister dst, Register src, int vector_len) {
6168 assert(VM_Version::supports_evex(), "");
6169 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6170 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6171 if (attributes.is_evex_instruction()) {
6172 emit_int8(0x7A);
6173 } else {
6174 emit_int8(0x78);
6175 }
6176 emit_int8((unsigned char)(0xC0 | encode));
6177 }
6178
6179 // duplicate 2-byte integer data from src into 8|16||32 locations in dest : requires AVX512BW and AVX512VL
6180 void Assembler::evpbroadcastw(XMMRegister dst, Register src, int vector_len) {
6181 assert(VM_Version::supports_evex(), "");
6182 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6183 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6184 if (attributes.is_evex_instruction()) {
6185 emit_int8(0x7B);
6186 } else {
6187 emit_int8(0x79);
6188 }
6189 emit_int8((unsigned char)(0xC0 | encode));
6190 }
6191
6192 // duplicate 4-byte integer data from src into 4|8|16 locations in dest : requires AVX512VL
6193 void Assembler::evpbroadcastd(XMMRegister dst, Register src, int vector_len) {
6194 assert(VM_Version::supports_evex(), "");
6195 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6196 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6197 if (attributes.is_evex_instruction()) {
6198 emit_int8(0x7C);
6199 } else {
6200 emit_int8(0x58);
6201 }
6202 emit_int8((unsigned char)(0xC0 | encode));
6203 }
6204
6205 // duplicate 8-byte integer data from src into 4|8|16 locations in dest : requires AVX512VL
6206 void Assembler::evpbroadcastq(XMMRegister dst, Register src, int vector_len) {
6207 assert(VM_Version::supports_evex(), "");
6208 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6209 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6210 if (attributes.is_evex_instruction()) {
6211 emit_int8(0x7C);
6212 } else {
6213 emit_int8(0x59);
6214 }
6215 emit_int8((unsigned char)(0xC0 | encode));
6216 }
6217
6218 // Carry-Less Multiplication Quadword
6219 void Assembler::pclmulqdq(XMMRegister dst, XMMRegister src, int mask) {
6220 assert(VM_Version::supports_clmul(), "");
6221 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6222 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6223 emit_int8(0x44);
6224 emit_int8((unsigned char)(0xC0 | encode));
6225 emit_int8((unsigned char)mask);
6226 }
6227
6228 // Carry-Less Multiplication Quadword
6229 void Assembler::vpclmulqdq(XMMRegister dst, XMMRegister nds, XMMRegister src, int mask) {
6230 assert(VM_Version::supports_avx() && VM_Version::supports_clmul(), "");
6231 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6232 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
6233 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6234 emit_int8(0x44);
6235 emit_int8((unsigned char)(0xC0 | encode));
6236 emit_int8((unsigned char)mask);
6237 }
6238
6239 void Assembler::vzeroupper() {
6240 assert(VM_Version::supports_avx(), "");
6241 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6242 (void)vex_prefix_and_encode(0, 0, 0, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
6243 emit_int8(0x77);
6244 }
6245
6246
6247 #ifndef _LP64
6248 // 32bit only pieces of the assembler
6249
6250 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
6251 // NO PREFIX AS NEVER 64BIT
6252 InstructionMark im(this);
6253 emit_int8((unsigned char)0x81);
6955 } else {
6956 assert((nds == dst) || (nds == src) || (nds == xnoreg), "wrong sse encoding");
6957 return rex_prefix_and_encode(dst_enc, src_enc, pre, opc, attributes->is_rex_vex_w());
6958 }
6959 }
6960
6961 void Assembler::cmppd(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len) {
6962 assert(VM_Version::supports_avx(), "");
6963 assert(!VM_Version::supports_evex(), "");
6964 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6965 int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6966 emit_int8((unsigned char)0xC2);
6967 emit_int8((unsigned char)(0xC0 | encode));
6968 emit_int8((unsigned char)(0xF & cop));
6969 }
6970
6971 void Assembler::vpblendd(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len) {
6972 assert(VM_Version::supports_avx(), "");
6973 assert(!VM_Version::supports_evex(), "");
6974 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6975 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
6976 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src1->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6977 emit_int8((unsigned char)0x4B);
6978 emit_int8((unsigned char)(0xC0 | encode));
6979 int src2_enc = src2->encoding();
6980 emit_int8((unsigned char)(0xF0 & src2_enc<<4));
6981 }
6982
6983
6984 #ifndef _LP64
6985
6986 void Assembler::incl(Register dst) {
6987 // Don't use it directly. Use MacroAssembler::incrementl() instead.
6988 emit_int8(0x40 | dst->encoding());
6989 }
6990
6991 void Assembler::lea(Register dst, Address src) {
6992 leal(dst, src);
6993 }
6994
6995 void Assembler::mov_literal32(Address dst, int32_t imm32, RelocationHolder const& rspec) {
6996 InstructionMark im(this);
|
3130 assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3131 InstructionMark im(this);
3132 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3133 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3134 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3135 emit_int8(0x67);
3136 emit_operand(dst, src);
3137 }
3138
3139 void Assembler::packuswb(XMMRegister dst, XMMRegister src) {
3140 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3141 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3142 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3143 emit_int8(0x67);
3144 emit_int8((unsigned char)(0xC0 | encode));
3145 }
3146
3147 void Assembler::vpackuswb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3148 assert(UseAVX > 0, "some form of AVX must be enabled");
3149 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3150 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3151 emit_int8(0x67);
3152 emit_int8((unsigned char)(0xC0 | encode));
3153 }
3154
3155 void Assembler::vpermq(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3156 assert(VM_Version::supports_avx2(), "");
3157 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3158 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3159 emit_int8(0x00);
3160 emit_int8(0xC0 | encode);
3161 emit_int8(imm8);
3162 }
3163
3164 void Assembler::pause() {
3165 emit_int8((unsigned char)0xF3);
3166 emit_int8((unsigned char)0x90);
3167 }
3168
3169 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3170 assert(VM_Version::supports_sse4_2(), "");
3171 InstructionMark im(this);
3172 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3173 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3174 emit_int8(0x61);
3175 emit_operand(dst, src);
3176 emit_int8(imm8);
3177 }
3178
3181 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3182 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3183 emit_int8(0x61);
3184 emit_int8((unsigned char)(0xC0 | encode));
3185 emit_int8(imm8);
3186 }
3187
3188 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3189 void Assembler::pcmpeqb(XMMRegister dst, XMMRegister src) {
3190 assert(VM_Version::supports_sse2(), "");
3191 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3192 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3193 emit_int8(0x74);
3194 emit_int8((unsigned char)(0xC0 | encode));
3195 }
3196
3197 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3198 void Assembler::vpcmpeqb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3199 assert(VM_Version::supports_avx(), "");
3200 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3201 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3202 emit_int8(0x74);
3203 emit_int8((unsigned char)(0xC0 | encode));
3204 }
3205
3206 // In this context, kdst is written the mask used to process the equal components
3207 void Assembler::evpcmpeqb(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3208 assert(VM_Version::supports_avx512bw(), "");
3209 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3210 attributes.set_is_evex_instruction();
3211 int encode = vex_prefix_and_encode(kdst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3212 emit_int8(0x74);
3213 emit_int8((unsigned char)(0xC0 | encode));
3214 }
3215
3216 void Assembler::evpcmpeqb(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3217 assert(VM_Version::supports_avx512bw(), "");
3218 InstructionMark im(this);
3219 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3220 attributes.set_is_evex_instruction();
3221 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3222 int dst_enc = kdst->encoding();
3223 vex_prefix(src, nds->encoding(), dst_enc, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3224 emit_int8(0x74);
3225 emit_operand(as_Register(dst_enc), src);
3226 }
3227
3228 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3229 void Assembler::pcmpeqw(XMMRegister dst, XMMRegister src) {
3230 assert(VM_Version::supports_sse2(), "");
3231 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3232 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3233 emit_int8(0x75);
3234 emit_int8((unsigned char)(0xC0 | encode));
3235 }
3236
3237 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3238 void Assembler::vpcmpeqw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3239 assert(VM_Version::supports_avx(), "");
3240 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3241 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3242 emit_int8(0x75);
3243 emit_int8((unsigned char)(0xC0 | encode));
3244 }
3245
3246 // In this context, kdst is written the mask used to process the equal components
3247 void Assembler::evpcmpeqw(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3248 assert(VM_Version::supports_avx512bw(), "");
3249 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3250 attributes.set_is_evex_instruction();
3251 int encode = vex_prefix_and_encode(kdst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3252 emit_int8(0x75);
3253 emit_int8((unsigned char)(0xC0 | encode));
3254 }
3255
3256 void Assembler::evpcmpeqw(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3257 assert(VM_Version::supports_avx512bw(), "");
3258 InstructionMark im(this);
3259 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3260 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3261 attributes.set_is_evex_instruction();
3262 int dst_enc = kdst->encoding();
3263 vex_prefix(src, nds->encoding(), dst_enc, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3264 emit_int8(0x75);
3265 emit_operand(as_Register(dst_enc), src);
3266 }
3267
3268 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3269 void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
3270 assert(VM_Version::supports_sse2(), "");
3271 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3272 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3273 emit_int8(0x76);
3274 emit_int8((unsigned char)(0xC0 | encode));
3275 }
3276
3277 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3278 void Assembler::vpcmpeqd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3279 assert(VM_Version::supports_avx(), "");
3280 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3281 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3282 emit_int8(0x76);
3283 emit_int8((unsigned char)(0xC0 | encode));
3284 }
3285
3286 // In this context, kdst is written the mask used to process the equal components
3287 void Assembler::evpcmpeqd(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3288 assert(VM_Version::supports_evex(), "");
3289 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3290 attributes.set_is_evex_instruction();
3291 int encode = vex_prefix_and_encode(kdst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3292 emit_int8(0x76);
3293 emit_int8((unsigned char)(0xC0 | encode));
3294 }
3295
3296 void Assembler::evpcmpeqd(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3297 assert(VM_Version::supports_evex(), "");
3298 InstructionMark im(this);
3299 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3300 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3301 attributes.set_is_evex_instruction();
3302 int dst_enc = kdst->encoding();
3303 vex_prefix(src, nds->encoding(), dst_enc, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3304 emit_int8(0x76);
3305 emit_operand(as_Register(dst_enc), src);
3306 }
3307
3308 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3309 void Assembler::pcmpeqq(XMMRegister dst, XMMRegister src) {
3310 assert(VM_Version::supports_sse4_1(), "");
3311 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3312 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3313 emit_int8(0x29);
3314 emit_int8((unsigned char)(0xC0 | encode));
3315 }
3316
3317 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3318 void Assembler::vpcmpeqq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3319 assert(VM_Version::supports_avx(), "");
3320 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3321 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3322 emit_int8(0x29);
3323 emit_int8((unsigned char)(0xC0 | encode));
3324 }
3325
3326 // In this context, kdst is written the mask used to process the equal components
3327 void Assembler::evpcmpeqq(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3328 assert(VM_Version::supports_evex(), "");
3329 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3330 attributes.set_is_evex_instruction();
3331 int encode = vex_prefix_and_encode(kdst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3332 emit_int8(0x29);
3333 emit_int8((unsigned char)(0xC0 | encode));
3334 }
3335
3336 // In this context, kdst is written the mask used to process the equal components
3337 void Assembler::evpcmpeqq(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3338 assert(VM_Version::supports_evex(), "");
3339 InstructionMark im(this);
3340 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3341 attributes.set_is_evex_instruction();
3342 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
3343 int dst_enc = kdst->encoding();
3344 vex_prefix(src, nds->encoding(), dst_enc, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3345 emit_int8(0x29);
3346 emit_operand(as_Register(dst_enc), src);
3347 }
3348
3349 void Assembler::pmovmskb(Register dst, XMMRegister src) {
3350 assert(VM_Version::supports_sse2(), "");
3351 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3352 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3353 emit_int8((unsigned char)0xD7);
3354 emit_int8((unsigned char)(0xC0 | encode));
3355 }
3356
3357 void Assembler::vpmovmskb(Register dst, XMMRegister src) {
3358 assert(VM_Version::supports_avx2(), "");
3359 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3360 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3361 emit_int8((unsigned char)0xD7);
3362 emit_int8((unsigned char)(0xC0 | encode));
3363 }
3364
3958 void Assembler::setb(Condition cc, Register dst) {
3959 assert(0 <= cc && cc < 16, "illegal cc");
3960 int encode = prefix_and_encode(dst->encoding(), true);
3961 emit_int8(0x0F);
3962 emit_int8((unsigned char)0x90 | cc);
3963 emit_int8((unsigned char)(0xC0 | encode));
3964 }
3965
3966 void Assembler::palignr(XMMRegister dst, XMMRegister src, int imm8) {
3967 assert(VM_Version::supports_ssse3(), "");
3968 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3969 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3970 emit_int8((unsigned char)0x0F);
3971 emit_int8((unsigned char)(0xC0 | encode));
3972 emit_int8(imm8);
3973 }
3974
3975 void Assembler::pblendw(XMMRegister dst, XMMRegister src, int imm8) {
3976 assert(VM_Version::supports_sse4_1(), "");
3977 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3978 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3979 emit_int8((unsigned char)0x0E);
3980 emit_int8((unsigned char)(0xC0 | encode));
3981 emit_int8(imm8);
3982 }
3983
3984 void Assembler::sha1rnds4(XMMRegister dst, XMMRegister src, int imm8) {
3985 assert(VM_Version::supports_sha(), "");
3986 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3987 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F_3A, &attributes);
3988 emit_int8((unsigned char)0xCC);
3989 emit_int8((unsigned char)(0xC0 | encode));
3990 emit_int8((unsigned char)imm8);
3991 }
3992
3993 void Assembler::sha1nexte(XMMRegister dst, XMMRegister src) {
3994 assert(VM_Version::supports_sha(), "");
3995 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3996 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F_38, &attributes);
3997 emit_int8((unsigned char)0xC8);
3998 emit_int8((unsigned char)(0xC0 | encode));
4365
4366 void Assembler::xorl(Register dst, Register src) {
4367 (void) prefix_and_encode(dst->encoding(), src->encoding());
4368 emit_arith(0x33, 0xC0, dst, src);
4369 }
4370
4371 void Assembler::xorb(Register dst, Address src) {
4372 InstructionMark im(this);
4373 prefix(src, dst);
4374 emit_int8(0x32);
4375 emit_operand(dst, src);
4376 }
4377
4378 // AVX 3-operands scalar float-point arithmetic instructions
4379
4380 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, Address src) {
4381 assert(VM_Version::supports_avx(), "");
4382 InstructionMark im(this);
4383 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4384 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4385 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4386 emit_int8(0x58);
4387 emit_operand(dst, src);
4388 }
4389
4390 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4391 assert(VM_Version::supports_avx(), "");
4392 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4393 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4394 emit_int8(0x58);
4395 emit_int8((unsigned char)(0xC0 | encode));
4396 }
4397
4398 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, Address src) {
4399 assert(VM_Version::supports_avx(), "");
4400 InstructionMark im(this);
4401 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4402 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4403 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4404 emit_int8(0x58);
4405 emit_operand(dst, src);
4406 }
4407
4408 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4409 assert(VM_Version::supports_avx(), "");
4410 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4411 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4412 emit_int8(0x58);
4413 emit_int8((unsigned char)(0xC0 | encode));
4414 }
4415
4416 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, Address src) {
4417 assert(VM_Version::supports_avx(), "");
4418 InstructionMark im(this);
4419 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4420 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4421 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4422 emit_int8(0x5E);
4423 emit_operand(dst, src);
4424 }
4425
4426 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4427 assert(VM_Version::supports_avx(), "");
4428 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4429 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4430 emit_int8(0x5E);
4431 emit_int8((unsigned char)(0xC0 | encode));
4432 }
4433
4434 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, Address src) {
4435 assert(VM_Version::supports_avx(), "");
4436 InstructionMark im(this);
4437 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4438 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4439 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4440 emit_int8(0x5E);
4441 emit_operand(dst, src);
4442 }
4443
4444 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4445 assert(VM_Version::supports_avx(), "");
4446 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4447 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4448 emit_int8(0x5E);
4449 emit_int8((unsigned char)(0xC0 | encode));
4450 }
4451
4452 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, Address src) {
4453 assert(VM_Version::supports_avx(), "");
4454 InstructionMark im(this);
4455 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4456 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4457 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4458 emit_int8(0x59);
4459 emit_operand(dst, src);
4460 }
4461
4462 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4463 assert(VM_Version::supports_avx(), "");
4464 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4465 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4466 emit_int8(0x59);
4467 emit_int8((unsigned char)(0xC0 | encode));
4468 }
4469
4470 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, Address src) {
4471 assert(VM_Version::supports_avx(), "");
4472 InstructionMark im(this);
4473 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4474 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4475 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4476 emit_int8(0x59);
4477 emit_operand(dst, src);
4478 }
4479
4480 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4481 assert(VM_Version::supports_avx(), "");
4482 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4483 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4484 emit_int8(0x59);
4485 emit_int8((unsigned char)(0xC0 | encode));
4486 }
4487
4488 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, Address src) {
4489 assert(VM_Version::supports_avx(), "");
4490 InstructionMark im(this);
4491 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4492 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4493 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4494 emit_int8(0x5C);
4495 emit_operand(dst, src);
4496 }
4497
4498 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4499 assert(VM_Version::supports_avx(), "");
4500 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4501 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4502 emit_int8(0x5C);
4503 emit_int8((unsigned char)(0xC0 | encode));
4504 }
4505
4506 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, Address src) {
4507 assert(VM_Version::supports_avx(), "");
4508 InstructionMark im(this);
4509 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4510 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4511 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4512 emit_int8(0x5C);
4513 emit_operand(dst, src);
4514 }
4515
4516 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4517 assert(VM_Version::supports_avx(), "");
4518 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4519 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4520 emit_int8(0x5C);
4521 emit_int8((unsigned char)(0xC0 | encode));
4522 }
4523
4524 //====================VECTOR ARITHMETIC=====================================
4525
4526 // Float-point vector arithmetic
4527
4528 void Assembler::addpd(XMMRegister dst, XMMRegister src) {
4529 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4530 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4531 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4532 emit_int8(0x58);
4533 emit_int8((unsigned char)(0xC0 | encode));
4534 }
4535
4536 void Assembler::addpd(XMMRegister dst, Address src) {
4537 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4538 InstructionMark im(this);
4539 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4540 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4541 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4542 emit_int8(0x58);
4543 emit_operand(dst, src);
4544 }
4545
4546
4547 void Assembler::addps(XMMRegister dst, XMMRegister src) {
4548 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4549 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4550 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4551 emit_int8(0x58);
4552 emit_int8((unsigned char)(0xC0 | encode));
4553 }
4554
4555 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4556 assert(VM_Version::supports_avx(), "");
4557 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4558 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4559 emit_int8(0x58);
4560 emit_int8((unsigned char)(0xC0 | encode));
4561 }
4562
4563 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4564 assert(VM_Version::supports_avx(), "");
4565 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4566 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4567 emit_int8(0x58);
4568 emit_int8((unsigned char)(0xC0 | encode));
4569 }
4570
4571 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4572 assert(VM_Version::supports_avx(), "");
4573 InstructionMark im(this);
4574 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4575 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4576 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4577 emit_int8(0x58);
4578 emit_operand(dst, src);
4579 }
4580
4581 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4582 assert(VM_Version::supports_avx(), "");
4583 InstructionMark im(this);
4584 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4585 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4586 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4587 emit_int8(0x58);
4588 emit_operand(dst, src);
4589 }
4590
4591 void Assembler::subpd(XMMRegister dst, XMMRegister src) {
4592 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4593 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4594 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4595 emit_int8(0x5C);
4596 emit_int8((unsigned char)(0xC0 | encode));
4597 }
4598
4599 void Assembler::subps(XMMRegister dst, XMMRegister src) {
4600 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4601 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4602 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4603 emit_int8(0x5C);
4604 emit_int8((unsigned char)(0xC0 | encode));
4605 }
4606
4607 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4608 assert(VM_Version::supports_avx(), "");
4609 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4610 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4611 emit_int8(0x5C);
4612 emit_int8((unsigned char)(0xC0 | encode));
4613 }
4614
4615 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4616 assert(VM_Version::supports_avx(), "");
4617 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4618 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4619 emit_int8(0x5C);
4620 emit_int8((unsigned char)(0xC0 | encode));
4621 }
4622
4623 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4624 assert(VM_Version::supports_avx(), "");
4625 InstructionMark im(this);
4626 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4627 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4628 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4629 emit_int8(0x5C);
4630 emit_operand(dst, src);
4631 }
4632
4633 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4634 assert(VM_Version::supports_avx(), "");
4635 InstructionMark im(this);
4636 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4637 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4638 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4639 emit_int8(0x5C);
4640 emit_operand(dst, src);
4641 }
4642
4643 void Assembler::mulpd(XMMRegister dst, XMMRegister src) {
4644 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4645 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4646 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4647 emit_int8(0x59);
4648 emit_int8((unsigned char)(0xC0 | encode));
4649 }
4650
4651 void Assembler::mulpd(XMMRegister dst, Address src) {
4652 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4653 InstructionMark im(this);
4654 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4655 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4656 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4657 emit_int8(0x59);
4658 emit_operand(dst, src);
4659 }
4660
4661 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
4662 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4663 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4664 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4665 emit_int8(0x59);
4666 emit_int8((unsigned char)(0xC0 | encode));
4667 }
4668
4669 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4670 assert(VM_Version::supports_avx(), "");
4671 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4672 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4673 emit_int8(0x59);
4674 emit_int8((unsigned char)(0xC0 | encode));
4675 }
4676
4677 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4678 assert(VM_Version::supports_avx(), "");
4679 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4680 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4681 emit_int8(0x59);
4682 emit_int8((unsigned char)(0xC0 | encode));
4683 }
4684
4685 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4686 assert(VM_Version::supports_avx(), "");
4687 InstructionMark im(this);
4688 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4689 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4690 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4691 emit_int8(0x59);
4692 emit_operand(dst, src);
4693 }
4694
4695 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4696 assert(VM_Version::supports_avx(), "");
4697 InstructionMark im(this);
4698 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4699 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4700 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4701 emit_int8(0x59);
4702 emit_operand(dst, src);
4703 }
4704
4705 void Assembler::divpd(XMMRegister dst, XMMRegister src) {
4706 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4707 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4708 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4709 emit_int8(0x5E);
4710 emit_int8((unsigned char)(0xC0 | encode));
4711 }
4712
4713 void Assembler::divps(XMMRegister dst, XMMRegister src) {
4714 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4715 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4716 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4717 emit_int8(0x5E);
4718 emit_int8((unsigned char)(0xC0 | encode));
4719 }
4720
4721 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4722 assert(VM_Version::supports_avx(), "");
4723 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4724 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4725 emit_int8(0x5E);
4726 emit_int8((unsigned char)(0xC0 | encode));
4727 }
4728
4729 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4730 assert(VM_Version::supports_avx(), "");
4731 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4732 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4733 emit_int8(0x5E);
4734 emit_int8((unsigned char)(0xC0 | encode));
4735 }
4736
4737 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4738 assert(VM_Version::supports_avx(), "");
4739 InstructionMark im(this);
4740 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4741 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4742 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4743 emit_int8(0x5E);
4744 emit_operand(dst, src);
4745 }
4746
4747 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4748 assert(VM_Version::supports_avx(), "");
4749 InstructionMark im(this);
4750 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4751 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4752 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4753 emit_int8(0x5E);
4754 emit_operand(dst, src);
4755 }
4756
4757 void Assembler::vsqrtpd(XMMRegister dst, XMMRegister src, int vector_len) {
4758 assert(VM_Version::supports_avx(), "");
4759 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4760 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4761 emit_int8(0x51);
4762 emit_int8((unsigned char)(0xC0 | encode));
4763 }
4764
4765 void Assembler::vsqrtpd(XMMRegister dst, Address src, int vector_len) {
4766 assert(VM_Version::supports_avx(), "");
4767 InstructionMark im(this);
4768 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4769 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4770 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4771 emit_int8(0x51);
4772 emit_operand(dst, src);
4773 }
4774
4775 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
4776 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4777 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4778 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4779 emit_int8(0x54);
4780 emit_int8((unsigned char)(0xC0 | encode));
4794 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4795 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4796 simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4797 emit_int8(0x54);
4798 emit_operand(dst, src);
4799 }
4800
4801 void Assembler::andpd(XMMRegister dst, Address src) {
4802 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4803 InstructionMark im(this);
4804 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4805 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4806 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4807 emit_int8(0x54);
4808 emit_operand(dst, src);
4809 }
4810
4811 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4812 assert(VM_Version::supports_avx(), "");
4813 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4814 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4815 emit_int8(0x54);
4816 emit_int8((unsigned char)(0xC0 | encode));
4817 }
4818
4819 void Assembler::vandps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4820 assert(VM_Version::supports_avx(), "");
4821 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4822 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4823 emit_int8(0x54);
4824 emit_int8((unsigned char)(0xC0 | encode));
4825 }
4826
4827 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4828 assert(VM_Version::supports_avx(), "");
4829 InstructionMark im(this);
4830 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4831 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4832 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4833 emit_int8(0x54);
4834 emit_operand(dst, src);
4835 }
4836
4837 void Assembler::vandps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4838 assert(VM_Version::supports_avx(), "");
4839 InstructionMark im(this);
4840 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4841 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4842 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4843 emit_int8(0x54);
4844 emit_operand(dst, src);
4845 }
4846
4847 void Assembler::unpckhpd(XMMRegister dst, XMMRegister src) {
4848 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4849 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4850 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4851 emit_int8(0x15);
4852 emit_int8((unsigned char)(0xC0 | encode));
4853 }
4854
4855 void Assembler::unpcklpd(XMMRegister dst, XMMRegister src) {
4856 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4857 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4858 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4859 emit_int8(0x14);
4860 emit_int8((unsigned char)(0xC0 | encode));
4861 }
4862
4882 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4883 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4884 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4885 emit_int8(0x57);
4886 emit_operand(dst, src);
4887 }
4888
4889 void Assembler::xorps(XMMRegister dst, Address src) {
4890 NOT_LP64(assert(VM_Version::supports_sse(), ""));
4891 InstructionMark im(this);
4892 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4893 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4894 simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4895 emit_int8(0x57);
4896 emit_operand(dst, src);
4897 }
4898
4899 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4900 assert(VM_Version::supports_avx(), "");
4901 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4902 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4903 emit_int8(0x57);
4904 emit_int8((unsigned char)(0xC0 | encode));
4905 }
4906
4907 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4908 assert(VM_Version::supports_avx(), "");
4909 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4910 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4911 emit_int8(0x57);
4912 emit_int8((unsigned char)(0xC0 | encode));
4913 }
4914
4915 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4916 assert(VM_Version::supports_avx(), "");
4917 InstructionMark im(this);
4918 InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4919 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4920 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4921 emit_int8(0x57);
4922 emit_operand(dst, src);
4923 }
4924
4925 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4926 assert(VM_Version::supports_avx(), "");
4927 InstructionMark im(this);
4928 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4929 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4930 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4931 emit_int8(0x57);
4932 emit_operand(dst, src);
4933 }
4934
4935 // Integer vector arithmetic
4936 void Assembler::vphaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4937 assert(VM_Version::supports_avx() && (vector_len == 0) ||
4938 VM_Version::supports_avx2(), "256 bit integer vectors requires AVX2");
4939 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4940 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4941 emit_int8(0x01);
4942 emit_int8((unsigned char)(0xC0 | encode));
4943 }
4944
4945 void Assembler::vphaddd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4946 assert(VM_Version::supports_avx() && (vector_len == 0) ||
4947 VM_Version::supports_avx2(), "256 bit integer vectors requires AVX2");
4948 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4949 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4950 emit_int8(0x02);
4951 emit_int8((unsigned char)(0xC0 | encode));
4952 }
4953
4954 void Assembler::paddb(XMMRegister dst, XMMRegister src) {
4955 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4956 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4957 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4958 emit_int8((unsigned char)0xFC);
4959 emit_int8((unsigned char)(0xC0 | encode));
4960 }
4961
4962 void Assembler::paddw(XMMRegister dst, XMMRegister src) {
4963 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4964 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4965 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4966 emit_int8((unsigned char)0xFD);
4967 emit_int8((unsigned char)(0xC0 | encode));
4968 }
4969
4970 void Assembler::paddd(XMMRegister dst, XMMRegister src) {
4971 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4972 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4973 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4974 emit_int8((unsigned char)0xFE);
4975 emit_int8((unsigned char)(0xC0 | encode));
4976 }
4977
4978 void Assembler::paddd(XMMRegister dst, Address src) {
4979 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4980 InstructionMark im(this);
4981 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4982 simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4983 emit_int8((unsigned char)0xFE);
4984 emit_operand(dst, src);
4985 }
4986
4987 void Assembler::paddq(XMMRegister dst, XMMRegister src) {
4988 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4989 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4990 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4991 emit_int8((unsigned char)0xD4);
4992 emit_int8((unsigned char)(0xC0 | encode));
4993 }
4994
4995 void Assembler::phaddw(XMMRegister dst, XMMRegister src) {
4996 NOT_LP64(assert(VM_Version::supports_sse3(), ""));
4997 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4998 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4999 emit_int8(0x01);
5000 emit_int8((unsigned char)(0xC0 | encode));
5001 }
5002
5003 void Assembler::phaddd(XMMRegister dst, XMMRegister src) {
5004 NOT_LP64(assert(VM_Version::supports_sse3(), ""));
5005 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
5006 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5007 emit_int8(0x02);
5008 emit_int8((unsigned char)(0xC0 | encode));
5009 }
5010
5011 void Assembler::vpaddb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5012 assert(UseAVX > 0, "requires some form of AVX");
5013 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5014 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5015 emit_int8((unsigned char)0xFC);
5016 emit_int8((unsigned char)(0xC0 | encode));
5017 }
5018
5019 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5020 assert(UseAVX > 0, "requires some form of AVX");
5021 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5022 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5023 emit_int8((unsigned char)0xFD);
5024 emit_int8((unsigned char)(0xC0 | encode));
5025 }
5026
5027 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5028 assert(UseAVX > 0, "requires some form of AVX");
5029 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5030 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5031 emit_int8((unsigned char)0xFE);
5032 emit_int8((unsigned char)(0xC0 | encode));
5033 }
5034
5035 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5036 assert(UseAVX > 0, "requires some form of AVX");
5037 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5038 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5039 emit_int8((unsigned char)0xD4);
5040 emit_int8((unsigned char)(0xC0 | encode));
5041 }
5042
5043 void Assembler::vpaddb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5044 assert(UseAVX > 0, "requires some form of AVX");
5045 InstructionMark im(this);
5046 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5047 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5048 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5049 emit_int8((unsigned char)0xFC);
5050 emit_operand(dst, src);
5051 }
5052
5053 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5054 assert(UseAVX > 0, "requires some form of AVX");
5055 InstructionMark im(this);
5056 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5057 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5058 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5059 emit_int8((unsigned char)0xFD);
5060 emit_operand(dst, src);
5061 }
5062
5063 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5064 assert(UseAVX > 0, "requires some form of AVX");
5065 InstructionMark im(this);
5066 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5067 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5068 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5069 emit_int8((unsigned char)0xFE);
5070 emit_operand(dst, src);
5071 }
5072
5073 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5074 assert(UseAVX > 0, "requires some form of AVX");
5075 InstructionMark im(this);
5076 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5077 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5078 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5079 emit_int8((unsigned char)0xD4);
5080 emit_operand(dst, src);
5081 }
5082
5083 void Assembler::psubb(XMMRegister dst, XMMRegister src) {
5084 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5085 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5086 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5087 emit_int8((unsigned char)0xF8);
5088 emit_int8((unsigned char)(0xC0 | encode));
5089 }
5090
5091 void Assembler::psubw(XMMRegister dst, XMMRegister src) {
5092 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5093 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5094 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5095 emit_int8((unsigned char)0xF9);
5096 emit_int8((unsigned char)(0xC0 | encode));
5097 }
5098
5099 void Assembler::psubd(XMMRegister dst, XMMRegister src) {
5100 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5101 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5102 emit_int8((unsigned char)0xFA);
5103 emit_int8((unsigned char)(0xC0 | encode));
5104 }
5105
5106 void Assembler::psubq(XMMRegister dst, XMMRegister src) {
5107 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5108 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5109 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5110 emit_int8((unsigned char)0xFB);
5111 emit_int8((unsigned char)(0xC0 | encode));
5112 }
5113
5114 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5115 assert(UseAVX > 0, "requires some form of AVX");
5116 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5117 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5118 emit_int8((unsigned char)0xF8);
5119 emit_int8((unsigned char)(0xC0 | encode));
5120 }
5121
5122 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5123 assert(UseAVX > 0, "requires some form of AVX");
5124 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5125 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5126 emit_int8((unsigned char)0xF9);
5127 emit_int8((unsigned char)(0xC0 | encode));
5128 }
5129
5130 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5131 assert(UseAVX > 0, "requires some form of AVX");
5132 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5133 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5134 emit_int8((unsigned char)0xFA);
5135 emit_int8((unsigned char)(0xC0 | encode));
5136 }
5137
5138 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5139 assert(UseAVX > 0, "requires some form of AVX");
5140 InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5141 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5142 emit_int8((unsigned char)0xFB);
5143 emit_int8((unsigned char)(0xC0 | encode));
5144 }
5145
5146 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5147 assert(UseAVX > 0, "requires some form of AVX");
5148 InstructionMark im(this);
5149 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5150 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5151 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5152 emit_int8((unsigned char)0xF8);
5153 emit_operand(dst, src);
5154 }
5155
5156 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5157 assert(UseAVX > 0, "requires some form of AVX");
5158 InstructionMark im(this);
5159 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5160 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5161 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5162 emit_int8((unsigned char)0xF9);
5163 emit_operand(dst, src);
5164 }
5165
5166 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5167 assert(UseAVX > 0, "requires some form of AVX");
5168 InstructionMark im(this);
5169 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5170 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5171 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5172 emit_int8((unsigned char)0xFA);
5173 emit_operand(dst, src);
5174 }
5175
5176 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5177 assert(UseAVX > 0, "requires some form of AVX");
5178 InstructionMark im(this);
5179 InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5180 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5181 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5182 emit_int8((unsigned char)0xFB);
5183 emit_operand(dst, src);
5184 }
5185
5186 void Assembler::pmullw(XMMRegister dst, XMMRegister src) {
5187 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5188 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5189 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5190 emit_int8((unsigned char)0xD5);
5191 emit_int8((unsigned char)(0xC0 | encode));
5192 }
5193
5194 void Assembler::pmulld(XMMRegister dst, XMMRegister src) {
5195 assert(VM_Version::supports_sse4_1(), "");
5196 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5197 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5198 emit_int8(0x40);
5199 emit_int8((unsigned char)(0xC0 | encode));
5200 }
5201
5202 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5203 assert(UseAVX > 0, "requires some form of AVX");
5204 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5205 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5206 emit_int8((unsigned char)0xD5);
5207 emit_int8((unsigned char)(0xC0 | encode));
5208 }
5209
5210 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5211 assert(UseAVX > 0, "requires some form of AVX");
5212 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5213 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5214 emit_int8(0x40);
5215 emit_int8((unsigned char)(0xC0 | encode));
5216 }
5217
5218 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5219 assert(UseAVX > 2, "requires some form of AVX");
5220 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5221 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5222 emit_int8(0x40);
5223 emit_int8((unsigned char)(0xC0 | encode));
5224 }
5225
5226 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5227 assert(UseAVX > 0, "requires some form of AVX");
5228 InstructionMark im(this);
5229 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5230 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5231 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5232 emit_int8((unsigned char)0xD5);
5233 emit_operand(dst, src);
5234 }
5235
5236 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5237 assert(UseAVX > 0, "requires some form of AVX");
5238 InstructionMark im(this);
5239 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5240 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5241 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5242 emit_int8(0x40);
5243 emit_operand(dst, src);
5244 }
5245
5246 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5247 assert(UseAVX > 0, "requires some form of AVX");
5248 InstructionMark im(this);
5249 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5250 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5251 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5252 emit_int8(0x40);
5253 emit_operand(dst, src);
5254 }
5255
5256 // Shift packed integers left by specified number of bits.
5257 void Assembler::psllw(XMMRegister dst, int shift) {
5258 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5259 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5260 // XMM6 is for /6 encoding: 66 0F 71 /6 ib
5261 int encode = simd_prefix_and_encode(xmm6, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5262 emit_int8(0x71);
5263 emit_int8((unsigned char)(0xC0 | encode));
5264 emit_int8(shift & 0xFF);
5265 }
5266
5267 void Assembler::pslld(XMMRegister dst, int shift) {
5268 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5269 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5270 // XMM6 is for /6 encoding: 66 0F 72 /6 ib
5271 int encode = simd_prefix_and_encode(xmm6, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5543 assert(UseAVX > 0, "requires some form of AVX");
5544 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5545 int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5546 emit_int8((unsigned char)0xE2);
5547 emit_int8((unsigned char)(0xC0 | encode));
5548 }
5549
5550
5551 // logical operations packed integers
5552 void Assembler::pand(XMMRegister dst, XMMRegister src) {
5553 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5554 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5555 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5556 emit_int8((unsigned char)0xDB);
5557 emit_int8((unsigned char)(0xC0 | encode));
5558 }
5559
5560 void Assembler::vpand(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5561 assert(UseAVX > 0, "requires some form of AVX");
5562 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5563 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5564 emit_int8((unsigned char)0xDB);
5565 emit_int8((unsigned char)(0xC0 | encode));
5566 }
5567
5568 void Assembler::vpand(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5569 assert(UseAVX > 0, "requires some form of AVX");
5570 InstructionMark im(this);
5571 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5572 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5573 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5574 emit_int8((unsigned char)0xDB);
5575 emit_operand(dst, src);
5576 }
5577
5578 void Assembler::pandn(XMMRegister dst, XMMRegister src) {
5579 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5580 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5581 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5582 emit_int8((unsigned char)0xDF);
5583 emit_int8((unsigned char)(0xC0 | encode));
5584 }
5585
5586 void Assembler::por(XMMRegister dst, XMMRegister src) {
5587 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5588 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5589 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5590 emit_int8((unsigned char)0xEB);
5591 emit_int8((unsigned char)(0xC0 | encode));
5592 }
5593
5594 void Assembler::vpor(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5595 assert(UseAVX > 0, "requires some form of AVX");
5596 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5597 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5598 emit_int8((unsigned char)0xEB);
5599 emit_int8((unsigned char)(0xC0 | encode));
5600 }
5601
5602 void Assembler::vpor(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5603 assert(UseAVX > 0, "requires some form of AVX");
5604 InstructionMark im(this);
5605 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5606 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5607 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5608 emit_int8((unsigned char)0xEB);
5609 emit_operand(dst, src);
5610 }
5611
5612 void Assembler::pxor(XMMRegister dst, XMMRegister src) {
5613 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5614 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5615 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5616 emit_int8((unsigned char)0xEF);
5617 emit_int8((unsigned char)(0xC0 | encode));
5618 }
5619
5620 void Assembler::vpxor(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5621 assert(UseAVX > 0, "requires some form of AVX");
5622 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5623 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5624 emit_int8((unsigned char)0xEF);
5625 emit_int8((unsigned char)(0xC0 | encode));
5626 }
5627
5628 void Assembler::vpxor(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5629 assert(UseAVX > 0, "requires some form of AVX");
5630 InstructionMark im(this);
5631 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5632 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5633 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5634 emit_int8((unsigned char)0xEF);
5635 emit_operand(dst, src);
5636 }
5637
5638
5639 // vinserti forms
5640
5641 void Assembler::vinserti128(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5642 assert(VM_Version::supports_avx2(), "");
5643 assert(imm8 <= 0x01, "imm8: %u", imm8);
5644 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5645 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5646 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5647 emit_int8(0x38);
5648 emit_int8((unsigned char)(0xC0 | encode));
5649 // 0x00 - insert into lower 128 bits
5650 // 0x01 - insert into upper 128 bits
5651 emit_int8(imm8 & 0x01);
5652 }
5653
5654 void Assembler::vinserti128(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5655 assert(VM_Version::supports_avx2(), "");
5656 assert(dst != xnoreg, "sanity");
5657 assert(imm8 <= 0x01, "imm8: %u", imm8);
5658 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5659 InstructionMark im(this);
5660 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5661 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5662 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5663 emit_int8(0x38);
5664 emit_operand(dst, src);
5665 // 0x00 - insert into lower 128 bits
5666 // 0x01 - insert into upper 128 bits
5667 emit_int8(imm8 & 0x01);
5668 }
5669
5670 void Assembler::vinserti32x4(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5671 assert(VM_Version::supports_evex(), "");
5672 assert(imm8 <= 0x03, "imm8: %u", imm8);
5673 InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5674 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5675 emit_int8(0x38);
5676 emit_int8((unsigned char)(0xC0 | encode));
5677 // 0x00 - insert into q0 128 bits (0..127)
5678 // 0x01 - insert into q1 128 bits (128..255)
5679 // 0x02 - insert into q2 128 bits (256..383)
5680 // 0x03 - insert into q3 128 bits (384..511)
5681 emit_int8(imm8 & 0x03);
5682 }
5683
5684 void Assembler::vinserti32x4(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5685 assert(VM_Version::supports_avx(), "");
5686 assert(dst != xnoreg, "sanity");
5687 assert(imm8 <= 0x03, "imm8: %u", imm8);
5688 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_256bit;
5689 InstructionMark im(this);
5690 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5691 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5692 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5693 emit_int8(0x18);
5694 emit_operand(dst, src);
5695 // 0x00 - insert into q0 128 bits (0..127)
5696 // 0x01 - insert into q1 128 bits (128..255)
5697 // 0x02 - insert into q2 128 bits (256..383)
5698 // 0x03 - insert into q3 128 bits (384..511)
5699 emit_int8(imm8 & 0x03);
5700 }
5701
5702 void Assembler::vinserti64x4(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5703 assert(VM_Version::supports_evex(), "");
5704 assert(imm8 <= 0x01, "imm8: %u", imm8);
5705 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5706 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5707 emit_int8(0x38);
5708 emit_int8((unsigned char)(0xC0 | encode));
5709 // 0x00 - insert into lower 256 bits
5710 // 0x01 - insert into upper 256 bits
5711 emit_int8(imm8 & 0x01);
5712 }
5713
5714
5715 // vinsertf forms
5716
5717 void Assembler::vinsertf128(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5718 assert(VM_Version::supports_avx(), "");
5719 assert(imm8 <= 0x01, "imm8: %u", imm8);
5720 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5721 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5722 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5723 emit_int8(0x18);
5724 emit_int8((unsigned char)(0xC0 | encode));
5725 // 0x00 - insert into lower 128 bits
5726 // 0x01 - insert into upper 128 bits
5727 emit_int8(imm8 & 0x01);
5728 }
5729
5730 void Assembler::vinsertf128(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5731 assert(VM_Version::supports_avx(), "");
5732 assert(dst != xnoreg, "sanity");
5733 assert(imm8 <= 0x01, "imm8: %u", imm8);
5734 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5735 InstructionMark im(this);
5736 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5737 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5738 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5739 emit_int8(0x18);
5740 emit_operand(dst, src);
5741 // 0x00 - insert into lower 128 bits
5742 // 0x01 - insert into upper 128 bits
5743 emit_int8(imm8 & 0x01);
5744 }
5745
5746 void Assembler::vinsertf32x4(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5747 assert(VM_Version::supports_evex(), "");
5748 assert(imm8 <= 0x03, "imm8: %u", imm8);
5749 InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5750 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5751 emit_int8(0x18);
5752 emit_int8((unsigned char)(0xC0 | encode));
5753 // 0x00 - insert into q0 128 bits (0..127)
5754 // 0x01 - insert into q1 128 bits (128..255)
5755 // 0x02 - insert into q2 128 bits (256..383)
5756 // 0x03 - insert into q3 128 bits (384..511)
5757 emit_int8(imm8 & 0x03);
5758 }
5759
5760 void Assembler::vinsertf32x4(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5761 assert(VM_Version::supports_avx(), "");
5762 assert(dst != xnoreg, "sanity");
5763 assert(imm8 <= 0x03, "imm8: %u", imm8);
5764 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_256bit;
5765 InstructionMark im(this);
5766 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5767 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5768 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5769 emit_int8(0x18);
5770 emit_operand(dst, src);
5771 // 0x00 - insert into q0 128 bits (0..127)
5772 // 0x01 - insert into q1 128 bits (128..255)
5773 // 0x02 - insert into q2 128 bits (256..383)
5774 // 0x03 - insert into q3 128 bits (384..511)
5775 emit_int8(imm8 & 0x03);
5776 }
5777
5778 void Assembler::vinsertf64x4(XMMRegister dst, XMMRegister nds, XMMRegister src, uint8_t imm8) {
5779 assert(VM_Version::supports_evex(), "");
5780 assert(imm8 <= 0x01, "imm8: %u", imm8);
5781 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5782 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5783 emit_int8(0x1A);
5784 emit_int8((unsigned char)(0xC0 | encode));
5785 // 0x00 - insert into lower 256 bits
5786 // 0x01 - insert into upper 256 bits
5787 emit_int8(imm8 & 0x01);
5788 }
5789
5790 void Assembler::vinsertf64x4(XMMRegister dst, XMMRegister nds, Address src, uint8_t imm8) {
5791 assert(VM_Version::supports_evex(), "");
5792 assert(dst != xnoreg, "sanity");
5793 assert(imm8 <= 0x01, "imm8: %u", imm8);
5794 InstructionMark im(this);
5795 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5796 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_64bit);
5797 vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5798 emit_int8(0x1A);
5799 emit_operand(dst, src);
5800 // 0x00 - insert into lower 256 bits
5801 // 0x01 - insert into upper 256 bits
5802 emit_int8(imm8 & 0x01);
5803 }
5804
5805
5806 // vextracti forms
5807
5808 void Assembler::vextracti128(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5809 assert(VM_Version::supports_avx(), "");
5810 assert(imm8 <= 0x01, "imm8: %u", imm8);
5811 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5812 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5813 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5814 emit_int8(0x39);
5815 emit_int8((unsigned char)(0xC0 | encode));
5816 // 0x00 - extract from lower 128 bits
5817 // 0x01 - extract from upper 128 bits
5818 emit_int8(imm8 & 0x01);
5819 }
5820
5821 void Assembler::vextracti128(Address dst, XMMRegister src, uint8_t imm8) {
5822 assert(VM_Version::supports_avx2(), "");
5823 assert(src != xnoreg, "sanity");
5824 assert(imm8 <= 0x01, "imm8: %u", imm8);
5825 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5826 InstructionMark im(this);
5827 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5828 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5829 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5830 emit_int8(0x39);
5831 emit_operand(src, dst);
5832 // 0x00 - extract from lower 128 bits
5833 // 0x01 - extract from upper 128 bits
5834 emit_int8(imm8 & 0x01);
5835 }
5836
5837 void Assembler::vextracti32x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5838 assert(VM_Version::supports_avx(), "");
5839 assert(imm8 <= 0x03, "imm8: %u", imm8);
5840 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_256bit;
5841 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5842 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5843 emit_int8(0x39);
5844 emit_int8((unsigned char)(0xC0 | encode));
5845 // 0x00 - extract from bits 127:0
5846 // 0x01 - extract from bits 255:128
5847 // 0x02 - extract from bits 383:256
5848 // 0x03 - extract from bits 511:384
5849 emit_int8(imm8 & 0x03);
5850 }
5851
5852 void Assembler::vextracti32x4(Address dst, XMMRegister src, uint8_t imm8) {
5853 assert(VM_Version::supports_evex(), "");
5854 assert(src != xnoreg, "sanity");
5855 assert(imm8 <= 0x03, "imm8: %u", imm8);
5856 InstructionMark im(this);
5857 InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5858 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5859 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5860 emit_int8(0x39);
5861 emit_operand(src, dst);
5862 // 0x00 - extract from bits 127:0
5863 // 0x01 - extract from bits 255:128
5864 // 0x02 - extract from bits 383:256
5865 // 0x03 - extract from bits 511:384
5866 emit_int8(imm8 & 0x03);
5867 }
5868
5869 void Assembler::vextracti64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5870 assert(VM_Version::supports_evex(), "");
5871 assert(imm8 <= 0x03, "imm8: %u", imm8);
5872 InstructionAttr attributes(AVX_512bit, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5873 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5874 emit_int8(0x39);
5875 emit_int8((unsigned char)(0xC0 | encode));
5876 // 0x00 - extract from bits 127:0
5877 // 0x01 - extract from bits 255:128
5878 // 0x02 - extract from bits 383:256
5879 // 0x03 - extract from bits 511:384
5880 emit_int8(imm8 & 0x03);
5881 }
5882
5883 void Assembler::vextracti64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5884 assert(VM_Version::supports_evex(), "");
5885 assert(imm8 <= 0x01, "imm8: %u", imm8);
5886 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5887 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5888 emit_int8(0x3B);
5889 emit_int8((unsigned char)(0xC0 | encode));
5890 // 0x00 - extract from lower 256 bits
5891 // 0x01 - extract from upper 256 bits
5892 emit_int8(imm8 & 0x01);
5893 }
5894
5895
5896 // vextractf forms
5897
5898 void Assembler::vextractf128(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5899 assert(VM_Version::supports_avx(), "");
5900 assert(imm8 <= 0x01, "imm8: %u", imm8);
5901 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5902 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5903 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5904 emit_int8(0x19);
5905 emit_int8((unsigned char)(0xC0 | encode));
5906 // 0x00 - extract from lower 128 bits
5907 // 0x01 - extract from upper 128 bits
5908 emit_int8(imm8 & 0x01);
5909 }
5910
5911 void Assembler::vextractf128(Address dst, XMMRegister src, uint8_t imm8) {
5912 assert(VM_Version::supports_avx(), "");
5913 assert(src != xnoreg, "sanity");
5914 assert(imm8 <= 0x01, "imm8: %u", imm8);
5915 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_256bit;
5916 InstructionMark im(this);
5917 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5918 attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5919 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5920 emit_int8(0x19);
5921 emit_operand(src, dst);
5922 // 0x00 - extract from lower 128 bits
5923 // 0x01 - extract from upper 128 bits
5924 emit_int8(imm8 & 0x01);
5925 }
5926
5927 void Assembler::vextractf32x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5928 assert(VM_Version::supports_avx(), "");
5929 assert(imm8 <= 0x03, "imm8: %u", imm8);
5930 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_256bit;
5931 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5932 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5933 emit_int8(0x19);
5934 emit_int8((unsigned char)(0xC0 | encode));
5935 // 0x00 - extract from bits 127:0
5936 // 0x01 - extract from bits 255:128
5937 // 0x02 - extract from bits 383:256
5938 // 0x03 - extract from bits 511:384
5939 emit_int8(imm8 & 0x03);
5940 }
5941
5942 void Assembler::vextractf32x4(Address dst, XMMRegister src, uint8_t imm8) {
5943 assert(VM_Version::supports_evex(), "");
5953 // 0x01 - extract from bits 255:128
5954 // 0x02 - extract from bits 383:256
5955 // 0x03 - extract from bits 511:384
5956 emit_int8(imm8 & 0x03);
5957 }
5958
5959 void Assembler::vextractf64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5960 assert(VM_Version::supports_evex(), "");
5961 assert(imm8 <= 0x03, "imm8: %u", imm8);
5962 InstructionAttr attributes(AVX_512bit, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5963 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5964 emit_int8(0x19);
5965 emit_int8((unsigned char)(0xC0 | encode));
5966 // 0x00 - extract from bits 127:0
5967 // 0x01 - extract from bits 255:128
5968 // 0x02 - extract from bits 383:256
5969 // 0x03 - extract from bits 511:384
5970 emit_int8(imm8 & 0x03);
5971 }
5972
5973 void Assembler::vextractf64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5974 assert(VM_Version::supports_evex(), "");
5975 assert(imm8 <= 0x01, "imm8: %u", imm8);
5976 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5977 int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5978 emit_int8(0x1B);
5979 emit_int8((unsigned char)(0xC0 | encode));
5980 // 0x00 - extract from lower 256 bits
5981 // 0x01 - extract from upper 256 bits
5982 emit_int8(imm8 & 0x01);
5983 }
5984
5985 void Assembler::vextractf64x4(Address dst, XMMRegister src, uint8_t imm8) {
5986 assert(VM_Version::supports_evex(), "");
5987 assert(src != xnoreg, "sanity");
5988 assert(imm8 <= 0x01, "imm8: %u", imm8);
5989 InstructionMark im(this);
5990 InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5991 attributes.set_address_attributes(/* tuple_type */ EVEX_T4,/* input_size_in_bits */ EVEX_64bit);
5992 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5993 emit_int8(0x1B);
5994 emit_operand(src, dst);
5995 // 0x00 - extract from lower 256 bits
5996 // 0x01 - extract from upper 256 bits
5997 emit_int8(imm8 & 0x01);
5998 }
5999
6000
6001 // legacy word/dword replicate
6002 void Assembler::vpbroadcastw(XMMRegister dst, XMMRegister src) {
6003 assert(VM_Version::supports_avx2(), "");
6004 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6005 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6006 emit_int8(0x79);
6007 emit_int8((unsigned char)(0xC0 | encode));
6008 }
6009
6010 void Assembler::vpbroadcastd(XMMRegister dst, XMMRegister src) {
6011 assert(VM_Version::supports_avx2(), "");
6012 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6013 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6014 emit_int8(0x58);
6015 emit_int8((unsigned char)(0xC0 | encode));
6016 }
6017
6018
6019 // xmm/mem sourced byte/word/dword/qword replicate
6020
6021 // duplicate 1-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6022 void Assembler::evpbroadcastb(XMMRegister dst, XMMRegister src, int vector_len) {
6023 assert(VM_Version::supports_evex(), "");
6024 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6025 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6026 emit_int8(0x78);
6027 emit_int8((unsigned char)(0xC0 | encode));
6028 }
6029
6030 void Assembler::evpbroadcastb(XMMRegister dst, Address src, int vector_len) {
6031 assert(VM_Version::supports_evex(), "");
6032 assert(dst != xnoreg, "sanity");
6033 InstructionMark im(this);
6034 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6035 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_8bit);
6036 // swap src<->dst for encoding
6037 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6038 emit_int8(0x78);
6039 emit_operand(dst, src);
6040 }
6041
6042 // duplicate 2-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6043 void Assembler::evpbroadcastw(XMMRegister dst, XMMRegister src, int vector_len) {
6044 assert(VM_Version::supports_evex(), "");
6045 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6046 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6047 emit_int8(0x79);
6048 emit_int8((unsigned char)(0xC0 | encode));
6049 }
6050
6051 void Assembler::evpbroadcastw(XMMRegister dst, Address src, int vector_len) {
6052 assert(VM_Version::supports_evex(), "");
6053 assert(dst != xnoreg, "sanity");
6054 InstructionMark im(this);
6055 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6056 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_16bit);
6057 // swap src<->dst for encoding
6058 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6059 emit_int8(0x79);
6060 emit_operand(dst, src);
6061 }
6062
6063 // duplicate 4-byte integer data from src into programmed locations in dest : requires AVX512VL
6064 void Assembler::evpbroadcastd(XMMRegister dst, XMMRegister src, int vector_len) {
6065 assert(VM_Version::supports_evex(), "");
6066 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6067 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6068 emit_int8(0x58);
6069 emit_int8((unsigned char)(0xC0 | encode));
6070 }
6071
6072 void Assembler::evpbroadcastd(XMMRegister dst, Address src, int vector_len) {
6073 assert(VM_Version::supports_evex(), "");
6074 assert(dst != xnoreg, "sanity");
6075 InstructionMark im(this);
6076 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6077 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6078 // swap src<->dst for encoding
6079 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6080 emit_int8(0x58);
6081 emit_operand(dst, src);
6082 }
6083
6084 // duplicate 8-byte integer data from src into programmed locations in dest : requires AVX512VL
6085 void Assembler::evpbroadcastq(XMMRegister dst, XMMRegister src, int vector_len) {
6086 assert(VM_Version::supports_evex(), "");
6087 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6088 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6089 emit_int8(0x59);
6090 emit_int8((unsigned char)(0xC0 | encode));
6091 }
6092
6093 void Assembler::evpbroadcastq(XMMRegister dst, Address src, int vector_len) {
6094 assert(VM_Version::supports_evex(), "");
6095 assert(dst != xnoreg, "sanity");
6096 InstructionMark im(this);
6097 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6098 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
6099 // swap src<->dst for encoding
6100 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6101 emit_int8(0x59);
6102 emit_operand(dst, src);
6103 }
6104
6105
6106 // scalar single/double precision replicate
6107
6108 // duplicate single precision data from src into programmed locations in dest : requires AVX512VL
6109 void Assembler::evpbroadcastss(XMMRegister dst, XMMRegister src, int vector_len) {
6110 assert(VM_Version::supports_evex(), "");
6111 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6112 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6113 emit_int8(0x18);
6114 emit_int8((unsigned char)(0xC0 | encode));
6115 }
6116
6117 void Assembler::evpbroadcastss(XMMRegister dst, Address src, int vector_len) {
6118 assert(VM_Version::supports_evex(), "");
6119 assert(dst != xnoreg, "sanity");
6120 InstructionMark im(this);
6121 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6122 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6123 // swap src<->dst for encoding
6124 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6125 emit_int8(0x18);
6126 emit_operand(dst, src);
6127 }
6128
6129 // duplicate double precision data from src into programmed locations in dest : requires AVX512VL
6130 void Assembler::evpbroadcastsd(XMMRegister dst, XMMRegister src, int vector_len) {
6131 assert(VM_Version::supports_evex(), "");
6132 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6133 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6134 emit_int8(0x19);
6135 emit_int8((unsigned char)(0xC0 | encode));
6136 }
6137
6138 void Assembler::evpbroadcastsd(XMMRegister dst, Address src, int vector_len) {
6139 assert(VM_Version::supports_evex(), "");
6140 assert(dst != xnoreg, "sanity");
6141 InstructionMark im(this);
6142 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6143 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
6144 // swap src<->dst for encoding
6145 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6146 emit_int8(0x19);
6147 emit_operand(dst, src);
6148 }
6149
6150
6151 // gpr source broadcast forms
6152
6153 // duplicate 1-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6154 void Assembler::evpbroadcastb(XMMRegister dst, Register src, int vector_len) {
6155 assert(VM_Version::supports_evex(), "");
6156 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6157 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6158 if (attributes.is_evex_instruction()) {
6159 emit_int8(0x7A);
6160 } else {
6161 emit_int8(0x78);
6162 }
6163 emit_int8((unsigned char)(0xC0 | encode));
6164 }
6165
6166 // duplicate 2-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6167 void Assembler::evpbroadcastw(XMMRegister dst, Register src, int vector_len) {
6168 assert(VM_Version::supports_evex(), "");
6169 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6170 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6171 if (attributes.is_evex_instruction()) {
6172 emit_int8(0x7B);
6173 } else {
6174 emit_int8(0x79);
6175 }
6176 emit_int8((unsigned char)(0xC0 | encode));
6177 }
6178
6179 // duplicate 4-byte integer data from src into programmed locations in dest : requires AVX512VL
6180 void Assembler::evpbroadcastd(XMMRegister dst, Register src, int vector_len) {
6181 assert(VM_Version::supports_evex(), "");
6182 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6183 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6184 if (attributes.is_evex_instruction()) {
6185 emit_int8(0x7C);
6186 } else {
6187 emit_int8(0x58);
6188 }
6189 emit_int8((unsigned char)(0xC0 | encode));
6190 }
6191
6192 // duplicate 8-byte integer data from src into programmed locations in dest : requires AVX512VL
6193 void Assembler::evpbroadcastq(XMMRegister dst, Register src, int vector_len) {
6194 assert(VM_Version::supports_evex(), "");
6195 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6196 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6197 if (attributes.is_evex_instruction()) {
6198 emit_int8(0x7C);
6199 } else {
6200 emit_int8(0x59);
6201 }
6202 emit_int8((unsigned char)(0xC0 | encode));
6203 }
6204
6205
6206 // Carry-Less Multiplication Quadword
6207 void Assembler::pclmulqdq(XMMRegister dst, XMMRegister src, int mask) {
6208 assert(VM_Version::supports_clmul(), "");
6209 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6210 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6211 emit_int8(0x44);
6212 emit_int8((unsigned char)(0xC0 | encode));
6213 emit_int8((unsigned char)mask);
6214 }
6215
6216 // Carry-Less Multiplication Quadword
6217 void Assembler::vpclmulqdq(XMMRegister dst, XMMRegister nds, XMMRegister src, int mask) {
6218 assert(VM_Version::supports_avx() && VM_Version::supports_clmul(), "");
6219 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6220 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6221 emit_int8(0x44);
6222 emit_int8((unsigned char)(0xC0 | encode));
6223 emit_int8((unsigned char)mask);
6224 }
6225
6226 void Assembler::vzeroupper() {
6227 assert(VM_Version::supports_avx(), "");
6228 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6229 (void)vex_prefix_and_encode(0, 0, 0, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
6230 emit_int8(0x77);
6231 }
6232
6233
6234 #ifndef _LP64
6235 // 32bit only pieces of the assembler
6236
6237 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
6238 // NO PREFIX AS NEVER 64BIT
6239 InstructionMark im(this);
6240 emit_int8((unsigned char)0x81);
6942 } else {
6943 assert((nds == dst) || (nds == src) || (nds == xnoreg), "wrong sse encoding");
6944 return rex_prefix_and_encode(dst_enc, src_enc, pre, opc, attributes->is_rex_vex_w());
6945 }
6946 }
6947
6948 void Assembler::cmppd(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len) {
6949 assert(VM_Version::supports_avx(), "");
6950 assert(!VM_Version::supports_evex(), "");
6951 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6952 int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6953 emit_int8((unsigned char)0xC2);
6954 emit_int8((unsigned char)(0xC0 | encode));
6955 emit_int8((unsigned char)(0xF & cop));
6956 }
6957
6958 void Assembler::vpblendd(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len) {
6959 assert(VM_Version::supports_avx(), "");
6960 assert(!VM_Version::supports_evex(), "");
6961 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6962 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src1->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6963 emit_int8((unsigned char)0x4B);
6964 emit_int8((unsigned char)(0xC0 | encode));
6965 int src2_enc = src2->encoding();
6966 emit_int8((unsigned char)(0xF0 & src2_enc<<4));
6967 }
6968
6969
6970 #ifndef _LP64
6971
6972 void Assembler::incl(Register dst) {
6973 // Don't use it directly. Use MacroAssembler::incrementl() instead.
6974 emit_int8(0x40 | dst->encoding());
6975 }
6976
6977 void Assembler::lea(Register dst, Address src) {
6978 leal(dst, src);
6979 }
6980
6981 void Assembler::mov_literal32(Address dst, int32_t imm32, RelocationHolder const& rspec) {
6982 InstructionMark im(this);
|