< prev index next >

src/cpu/x86/vm/assembler_x86.cpp

Print this page
rev 10837 : SHA256-AVX2


2352 
2353 void Assembler::movdqa(XMMRegister dst, XMMRegister src) {
2354   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2355   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2356   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2357   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2358   emit_int8(0x6F);
2359   emit_int8((unsigned char)(0xC0 | encode));
2360 }
2361 
2362 void Assembler::movdqa(XMMRegister dst, Address src) {
2363   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2364   InstructionMark im(this);
2365   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2366   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2367   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2368   emit_int8(0x6F);
2369   emit_operand(dst, src);
2370 }
2371 

































2372 void Assembler::movdqu(XMMRegister dst, Address src) {
2373   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2374   InstructionMark im(this);
2375   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2376   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2377   simd_prefix(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2378   emit_int8(0x6F);
2379   emit_operand(dst, src);
2380 }
2381 
2382 void Assembler::movdqu(XMMRegister dst, XMMRegister src) {
2383   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2384   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2385   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2386   emit_int8(0x6F);
2387   emit_int8((unsigned char)(0xC0 | encode));
2388 }
2389 
2390 void Assembler::movdqu(Address dst, XMMRegister src) {
2391   NOT_LP64(assert(VM_Version::supports_sse2(), ""));


3153   emit_int8((unsigned char)(0xC0 | encode));
3154 }
3155 
3156 void Assembler::vpackuswb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3157   assert(UseAVX > 0, "some form of AVX must be enabled");
3158   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3159   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3160   emit_int8(0x67);
3161   emit_int8((unsigned char)(0xC0 | encode));
3162 }
3163 
3164 void Assembler::vpermq(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3165   assert(VM_Version::supports_avx2(), "");
3166   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3167   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3168   emit_int8(0x00);
3169   emit_int8(0xC0 | encode);
3170   emit_int8(imm8);
3171 }
3172 










3173 void Assembler::pause() {
3174   emit_int8((unsigned char)0xF3);
3175   emit_int8((unsigned char)0x90);
3176 }
3177 
3178 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3179   assert(VM_Version::supports_sse4_2(), "");
3180   InstructionMark im(this);
3181   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3182   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3183   emit_int8(0x61);
3184   emit_operand(dst, src);
3185   emit_int8(imm8);
3186 }
3187 
3188 void Assembler::pcmpestri(XMMRegister dst, XMMRegister src, int imm8) {
3189   assert(VM_Version::supports_sse4_2(), "");
3190   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3191   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3192   emit_int8(0x61);


3621 void Assembler::prefetchw(Address src) {
3622   assert(VM_Version::supports_3dnow_prefetch(), "must support");
3623   InstructionMark im(this);
3624   prefetch_prefix(src);
3625   emit_int8(0x0D);
3626   emit_operand(rcx, src); // 1, src
3627 }
3628 
3629 void Assembler::prefix(Prefix p) {
3630   emit_int8(p);
3631 }
3632 
3633 void Assembler::pshufb(XMMRegister dst, XMMRegister src) {
3634   assert(VM_Version::supports_ssse3(), "");
3635   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3636   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3637   emit_int8(0x00);
3638   emit_int8((unsigned char)(0xC0 | encode));
3639 }
3640 








3641 void Assembler::pshufb(XMMRegister dst, Address src) {
3642   assert(VM_Version::supports_ssse3(), "");
3643   InstructionMark im(this);
3644   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3645   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3646   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3647   emit_int8(0x00);
3648   emit_operand(dst, src);
3649 }
3650 
3651 void Assembler::pshufd(XMMRegister dst, XMMRegister src, int mode) {
3652   assert(isByte(mode), "invalid value");
3653   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3654   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;










3655   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3656   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3657   emit_int8(0x70);
3658   emit_int8((unsigned char)(0xC0 | encode));
3659   emit_int8(mode & 0xFF);
3660 }
3661 
3662 void Assembler::pshufd(XMMRegister dst, Address src, int mode) {
3663   assert(isByte(mode), "invalid value");
3664   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3665   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3666   InstructionMark im(this);
3667   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3668   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3669   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3670   emit_int8(0x70);
3671   emit_operand(dst, src);
3672   emit_int8(mode & 0xFF);
3673 }
3674 


3682   emit_int8(mode & 0xFF);
3683 }
3684 
3685 void Assembler::pshuflw(XMMRegister dst, Address src, int mode) {
3686   assert(isByte(mode), "invalid value");
3687   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3688   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3689   InstructionMark im(this);
3690   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3691   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3692   simd_prefix(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
3693   emit_int8(0x70);
3694   emit_operand(dst, src);
3695   emit_int8(mode & 0xFF);
3696 }
3697 
3698 void Assembler::psrldq(XMMRegister dst, int shift) {
3699   // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
3700   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3701   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3702   // XMM3 is for /3 encoding: 66 0F 73 /3 ib
3703   int encode = simd_prefix_and_encode(xmm3, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3704   emit_int8(0x73);
3705   emit_int8((unsigned char)(0xC0 | encode));
3706   emit_int8(shift);
3707 }
3708 











3709 void Assembler::pslldq(XMMRegister dst, int shift) {
3710   // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
3711   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3712   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3713   // XMM7 is for /7 encoding: 66 0F 73 /7 ib
3714   int encode = simd_prefix_and_encode(xmm7, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3715   emit_int8(0x73);
3716   emit_int8((unsigned char)(0xC0 | encode));
3717   emit_int8(shift);
3718 }
3719 











3720 void Assembler::ptest(XMMRegister dst, Address src) {
3721   assert(VM_Version::supports_sse4_1(), "");
3722   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3723   InstructionMark im(this);
3724   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3725   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3726   emit_int8(0x17);
3727   emit_operand(dst, src);
3728 }
3729 
3730 void Assembler::ptest(XMMRegister dst, XMMRegister src) {
3731   assert(VM_Version::supports_sse4_1(), "");
3732   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3733   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3734   emit_int8(0x17);
3735   emit_int8((unsigned char)(0xC0 | encode));
3736 }
3737 
3738 void Assembler::vptest(XMMRegister dst, Address src) {
3739   assert(VM_Version::supports_avx(), "");


3964   emit_arith(0x1B, 0xC0, dst, src);
3965 }
3966 
3967 void Assembler::setb(Condition cc, Register dst) {
3968   assert(0 <= cc && cc < 16, "illegal cc");
3969   int encode = prefix_and_encode(dst->encoding(), true);
3970   emit_int8(0x0F);
3971   emit_int8((unsigned char)0x90 | cc);
3972   emit_int8((unsigned char)(0xC0 | encode));
3973 }
3974 
3975 void Assembler::palignr(XMMRegister dst, XMMRegister src, int imm8) {
3976   assert(VM_Version::supports_ssse3(), "");
3977   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3978   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3979   emit_int8((unsigned char)0x0F);
3980   emit_int8((unsigned char)(0xC0 | encode));
3981   emit_int8(imm8);
3982 }
3983 









3984 void Assembler::pblendw(XMMRegister dst, XMMRegister src, int imm8) {
3985   assert(VM_Version::supports_sse4_1(), "");
3986   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3987   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3988   emit_int8((unsigned char)0x0E);
3989   emit_int8((unsigned char)(0xC0 | encode));
3990   emit_int8(imm8);
3991 }
3992 
3993 void Assembler::sha1rnds4(XMMRegister dst, XMMRegister src, int imm8) {
3994   assert(VM_Version::supports_sha(), "");
3995   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3996   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F_3A, &attributes);
3997   emit_int8((unsigned char)0xCC);
3998   emit_int8((unsigned char)(0xC0 | encode));
3999   emit_int8((unsigned char)imm8);
4000 }
4001 
4002 void Assembler::sha1nexte(XMMRegister dst, XMMRegister src) {
4003   assert(VM_Version::supports_sha(), "");


8171     emit_int8(imm8);
8172   }
8173 }
8174 
8175 void Assembler::rorq(Register dst, int imm8) {
8176   assert(isShiftCount(imm8 >> 1), "illegal shift count");
8177   int encode = prefixq_and_encode(dst->encoding());
8178   if (imm8 == 1) {
8179     emit_int8((unsigned char)0xD1);
8180     emit_int8((unsigned char)(0xC8 | encode));
8181   } else {
8182     emit_int8((unsigned char)0xC1);
8183     emit_int8((unsigned char)(0xc8 | encode));
8184     emit_int8(imm8);
8185   }
8186 }
8187 
8188 void Assembler::rorxq(Register dst, Register src, int imm8) {
8189   assert(VM_Version::supports_bmi2(), "bit manipulation instructions not supported");
8190   InstructionAttr attributes(AVX_128bit, /* vex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);









8191   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F_3A, &attributes);
8192   emit_int8((unsigned char)0xF0);
8193   emit_int8((unsigned char)(0xC0 | encode));
8194   emit_int8(imm8);
8195 }
8196 
8197 void Assembler::sarq(Register dst, int imm8) {
8198   assert(isShiftCount(imm8 >> 1), "illegal shift count");
8199   int encode = prefixq_and_encode(dst->encoding());
8200   if (imm8 == 1) {
8201     emit_int8((unsigned char)0xD1);
8202     emit_int8((unsigned char)(0xF8 | encode));
8203   } else {
8204     emit_int8((unsigned char)0xC1);
8205     emit_int8((unsigned char)(0xF8 | encode));
8206     emit_int8(imm8);
8207   }
8208 }
8209 
8210 void Assembler::sarq(Register dst) {




2352 
2353 void Assembler::movdqa(XMMRegister dst, XMMRegister src) {
2354   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2355   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2356   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2357   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2358   emit_int8(0x6F);
2359   emit_int8((unsigned char)(0xC0 | encode));
2360 }
2361 
2362 void Assembler::movdqa(XMMRegister dst, Address src) {
2363   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2364   InstructionMark im(this);
2365   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2366   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2367   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2368   emit_int8(0x6F);
2369   emit_operand(dst, src);
2370 }
2371 
2372 void Assembler::movdqa(Address dst, XMMRegister src) {
2373   assert(VM_Version::supports_sse2(), "");
2374   InstructionMark im(this);
2375   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2376   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2377   simd_prefix(src, xnoreg, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2378   emit_int8(0x7F);
2379   emit_operand(src, dst);
2380 }
2381 
2382 void Assembler::vmovdqa(XMMRegister dst, Address src) {
2383   assert(UseAVX > 0, "");
2384   InstructionMark im(this);
2385   InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2386   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2387   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2388   emit_int8(0x6F);
2389   emit_operand(dst, src);
2390 }
2391 
2392 void Assembler::vmovdqa(Address dst, XMMRegister src) {
2393   assert(UseAVX > 0, "");
2394   InstructionMark im(this);
2395   InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2396   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2397   // swap src<->dst for encoding
2398   assert(src != xnoreg, "sanity");
2399   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2400   emit_int8(0x7F);
2401   emit_operand(src, dst);
2402 }
2403 
2404 
2405 void Assembler::movdqu(XMMRegister dst, Address src) {
2406   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2407   InstructionMark im(this);
2408   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2409   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2410   simd_prefix(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2411   emit_int8(0x6F);
2412   emit_operand(dst, src);
2413 }
2414 
2415 void Assembler::movdqu(XMMRegister dst, XMMRegister src) {
2416   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2417   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2418   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2419   emit_int8(0x6F);
2420   emit_int8((unsigned char)(0xC0 | encode));
2421 }
2422 
2423 void Assembler::movdqu(Address dst, XMMRegister src) {
2424   NOT_LP64(assert(VM_Version::supports_sse2(), ""));


3186   emit_int8((unsigned char)(0xC0 | encode));
3187 }
3188 
3189 void Assembler::vpackuswb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3190   assert(UseAVX > 0, "some form of AVX must be enabled");
3191   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3192   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3193   emit_int8(0x67);
3194   emit_int8((unsigned char)(0xC0 | encode));
3195 }
3196 
3197 void Assembler::vpermq(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3198   assert(VM_Version::supports_avx2(), "");
3199   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3200   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3201   emit_int8(0x00);
3202   emit_int8(0xC0 | encode);
3203   emit_int8(imm8);
3204 }
3205 
3206 void Assembler::vperm2i128(XMMRegister dst,  XMMRegister nds, XMMRegister src, int imm8) {
3207   assert(VM_Version::supports_avx2(), "");
3208   InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3209   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3210   emit_int8(0x46);
3211   emit_int8(0xC0 | encode);
3212   emit_int8(imm8);
3213 }
3214 
3215 
3216 void Assembler::pause() {
3217   emit_int8((unsigned char)0xF3);
3218   emit_int8((unsigned char)0x90);
3219 }
3220 
3221 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3222   assert(VM_Version::supports_sse4_2(), "");
3223   InstructionMark im(this);
3224   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3225   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3226   emit_int8(0x61);
3227   emit_operand(dst, src);
3228   emit_int8(imm8);
3229 }
3230 
3231 void Assembler::pcmpestri(XMMRegister dst, XMMRegister src, int imm8) {
3232   assert(VM_Version::supports_sse4_2(), "");
3233   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3234   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3235   emit_int8(0x61);


3664 void Assembler::prefetchw(Address src) {
3665   assert(VM_Version::supports_3dnow_prefetch(), "must support");
3666   InstructionMark im(this);
3667   prefetch_prefix(src);
3668   emit_int8(0x0D);
3669   emit_operand(rcx, src); // 1, src
3670 }
3671 
3672 void Assembler::prefix(Prefix p) {
3673   emit_int8(p);
3674 }
3675 
3676 void Assembler::pshufb(XMMRegister dst, XMMRegister src) {
3677   assert(VM_Version::supports_ssse3(), "");
3678   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3679   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3680   emit_int8(0x00);
3681   emit_int8((unsigned char)(0xC0 | encode));
3682 }
3683 
3684 void Assembler::vpshufb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3685   assert(VM_Version::supports_ssse3(), "");
3686   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3687   int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3688   emit_int8(0x00);
3689   emit_int8((unsigned char)(0xC0 | encode));
3690 }
3691 
3692 void Assembler::pshufb(XMMRegister dst, Address src) {
3693   assert(VM_Version::supports_ssse3(), "");
3694   InstructionMark im(this);
3695   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3696   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3697   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3698   emit_int8(0x00);
3699   emit_operand(dst, src);
3700 }
3701 
3702 void Assembler::pshufd(XMMRegister dst, XMMRegister src, int mode) {
3703   assert(isByte(mode), "invalid value");
3704   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3705   int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_128bit;
3706   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3707   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3708   emit_int8(0x70);
3709   emit_int8((unsigned char)(0xC0 | encode));
3710   emit_int8(mode & 0xFF);
3711 }
3712 
3713 void Assembler::vpshufd(XMMRegister dst, XMMRegister src, int mode, int vector_len) {
3714   assert(isByte(mode), "invalid value");
3715   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3716   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3717   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3718   emit_int8(0x70);
3719   emit_int8((unsigned char)(0xC0 | encode));
3720   emit_int8(mode & 0xFF);
3721 }
3722 
3723 void Assembler::pshufd(XMMRegister dst, Address src, int mode) {
3724   assert(isByte(mode), "invalid value");
3725   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3726   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3727   InstructionMark im(this);
3728   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3729   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3730   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3731   emit_int8(0x70);
3732   emit_operand(dst, src);
3733   emit_int8(mode & 0xFF);
3734 }
3735 


3743   emit_int8(mode & 0xFF);
3744 }
3745 
3746 void Assembler::pshuflw(XMMRegister dst, Address src, int mode) {
3747   assert(isByte(mode), "invalid value");
3748   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3749   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3750   InstructionMark im(this);
3751   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3752   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
3753   simd_prefix(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
3754   emit_int8(0x70);
3755   emit_operand(dst, src);
3756   emit_int8(mode & 0xFF);
3757 }
3758 
3759 void Assembler::psrldq(XMMRegister dst, int shift) {
3760   // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
3761   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3762   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);

3763   int encode = simd_prefix_and_encode(xmm3, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3764   emit_int8(0x73);
3765   emit_int8((unsigned char)(0xC0 | encode));
3766   emit_int8(shift);
3767 }
3768 
3769 void Assembler::vpsrldq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
3770   // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
3771   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3772   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3773   // XMM3 is for /3 encoding: 66 0F 73 /3 ib
3774   int encode = simd_prefix_and_encode(xmm3, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3775   emit_int8(0x73);
3776   emit_int8((unsigned char)(0xC0 | encode));
3777   emit_int8(shift);
3778 }
3779 
3780 void Assembler::pslldq(XMMRegister dst, int shift) {
3781   // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
3782   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3783   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
3784   // XMM7 is for /7 encoding: 66 0F 73 /7 ib
3785   int encode = simd_prefix_and_encode(xmm7, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3786   emit_int8(0x73);
3787   emit_int8((unsigned char)(0xC0 | encode));
3788   emit_int8(shift);
3789 }
3790 
3791 void Assembler::vpslldq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
3792   // Shift left 128 bit value in dst XMMRegister by shift number of bytes.
3793   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3794   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3795   // XMM7 is for /7 encoding: 66 0F 73 /7 ib
3796   int encode = simd_prefix_and_encode(xmm7, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3797   emit_int8(0x73);
3798   emit_int8((unsigned char)(0xC0 | encode));
3799   emit_int8(shift);
3800 }
3801 
3802 void Assembler::ptest(XMMRegister dst, Address src) {
3803   assert(VM_Version::supports_sse4_1(), "");
3804   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3805   InstructionMark im(this);
3806   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3807   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3808   emit_int8(0x17);
3809   emit_operand(dst, src);
3810 }
3811 
3812 void Assembler::ptest(XMMRegister dst, XMMRegister src) {
3813   assert(VM_Version::supports_sse4_1(), "");
3814   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3815   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3816   emit_int8(0x17);
3817   emit_int8((unsigned char)(0xC0 | encode));
3818 }
3819 
3820 void Assembler::vptest(XMMRegister dst, Address src) {
3821   assert(VM_Version::supports_avx(), "");


4046   emit_arith(0x1B, 0xC0, dst, src);
4047 }
4048 
4049 void Assembler::setb(Condition cc, Register dst) {
4050   assert(0 <= cc && cc < 16, "illegal cc");
4051   int encode = prefix_and_encode(dst->encoding(), true);
4052   emit_int8(0x0F);
4053   emit_int8((unsigned char)0x90 | cc);
4054   emit_int8((unsigned char)(0xC0 | encode));
4055 }
4056 
4057 void Assembler::palignr(XMMRegister dst, XMMRegister src, int imm8) {
4058   assert(VM_Version::supports_ssse3(), "");
4059   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
4060   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
4061   emit_int8((unsigned char)0x0F);
4062   emit_int8((unsigned char)(0xC0 | encode));
4063   emit_int8(imm8);
4064 }
4065 
4066 void Assembler::vpalignr(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8, int vector_len) {
4067   assert(VM_Version::supports_ssse3(), "");
4068   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ true);
4069   int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
4070   emit_int8((unsigned char)0x0F);
4071   emit_int8((unsigned char)(0xC0 | encode));
4072   emit_int8(imm8);
4073 }
4074 
4075 void Assembler::pblendw(XMMRegister dst, XMMRegister src, int imm8) {
4076   assert(VM_Version::supports_sse4_1(), "");
4077   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4078   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
4079   emit_int8((unsigned char)0x0E);
4080   emit_int8((unsigned char)(0xC0 | encode));
4081   emit_int8(imm8);
4082 }
4083 
4084 void Assembler::sha1rnds4(XMMRegister dst, XMMRegister src, int imm8) {
4085   assert(VM_Version::supports_sha(), "");
4086   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4087   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F_3A, &attributes);
4088   emit_int8((unsigned char)0xCC);
4089   emit_int8((unsigned char)(0xC0 | encode));
4090   emit_int8((unsigned char)imm8);
4091 }
4092 
4093 void Assembler::sha1nexte(XMMRegister dst, XMMRegister src) {
4094   assert(VM_Version::supports_sha(), "");


8262     emit_int8(imm8);
8263   }
8264 }
8265 
8266 void Assembler::rorq(Register dst, int imm8) {
8267   assert(isShiftCount(imm8 >> 1), "illegal shift count");
8268   int encode = prefixq_and_encode(dst->encoding());
8269   if (imm8 == 1) {
8270     emit_int8((unsigned char)0xD1);
8271     emit_int8((unsigned char)(0xC8 | encode));
8272   } else {
8273     emit_int8((unsigned char)0xC1);
8274     emit_int8((unsigned char)(0xc8 | encode));
8275     emit_int8(imm8);
8276   }
8277 }
8278 
8279 void Assembler::rorxq(Register dst, Register src, int imm8) {
8280   assert(VM_Version::supports_bmi2(), "bit manipulation instructions not supported");
8281   InstructionAttr attributes(AVX_128bit, /* vex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
8282   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F_3A, &attributes);
8283   emit_int8((unsigned char)0xF0);
8284   emit_int8((unsigned char)(0xC0 | encode));
8285   emit_int8(imm8);
8286 }
8287 
8288 void Assembler::rorxd(Register dst, Register src, int imm8) {
8289   assert(VM_Version::supports_bmi2(), "bit manipulation instructions not supported");
8290   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
8291   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F_3A, &attributes);
8292   emit_int8((unsigned char)0xF0);
8293   emit_int8((unsigned char)(0xC0 | encode));
8294   emit_int8(imm8);
8295 }
8296 
8297 void Assembler::sarq(Register dst, int imm8) {
8298   assert(isShiftCount(imm8 >> 1), "illegal shift count");
8299   int encode = prefixq_and_encode(dst->encoding());
8300   if (imm8 == 1) {
8301     emit_int8((unsigned char)0xD1);
8302     emit_int8((unsigned char)(0xF8 | encode));
8303   } else {
8304     emit_int8((unsigned char)0xC1);
8305     emit_int8((unsigned char)(0xF8 | encode));
8306     emit_int8(imm8);
8307   }
8308 }
8309 
8310 void Assembler::sarq(Register dst) {


< prev index next >