1877 emit_int8((unsigned char)(0xC0 | encode));
1878 }
1879
1880 void Assembler::cvttss2sil(Register dst, XMMRegister src) {
1881 NOT_LP64(assert(VM_Version::supports_sse(), ""));
1882 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1883 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1884 emit_int8(0x2C);
1885 emit_int8((unsigned char)(0xC0 | encode));
1886 }
1887
1888 void Assembler::cvttpd2dq(XMMRegister dst, XMMRegister src) {
1889 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1890 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
1891 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1892 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1893 emit_int8((unsigned char)0xE6);
1894 emit_int8((unsigned char)(0xC0 | encode));
1895 }
1896
1897 void Assembler::decl(Address dst) {
1898 // Don't use it directly. Use MacroAssembler::decrement() instead.
1899 InstructionMark im(this);
1900 prefix(dst);
1901 emit_int8((unsigned char)0xFF);
1902 emit_operand(rcx, dst);
1903 }
1904
1905 void Assembler::divsd(XMMRegister dst, Address src) {
1906 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1907 InstructionMark im(this);
1908 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1909 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1910 attributes.set_rex_vex_w_reverted();
1911 simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1912 emit_int8(0x5E);
1913 emit_operand(dst, src);
1914 }
1915
1916 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
3399 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3400 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3401 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3402 emit_int8(0x67);
3403 emit_int8((unsigned char)(0xC0 | encode));
3404 }
3405
3406 void Assembler::vpackuswb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3407 assert(UseAVX > 0, "some form of AVX must be enabled");
3408 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3409 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3410 emit_int8(0x67);
3411 emit_int8((unsigned char)(0xC0 | encode));
3412 }
3413
3414 void Assembler::vpermq(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3415 assert(VM_Version::supports_avx2(), "");
3416 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3417 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3418 emit_int8(0x00);
3419 emit_int8(0xC0 | encode);
3420 emit_int8(imm8);
3421 }
3422
3423 void Assembler::vperm2i128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8) {
3424 assert(VM_Version::supports_avx2(), "");
3425 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
3426 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3427 emit_int8(0x46);
3428 emit_int8(0xC0 | encode);
3429 emit_int8(imm8);
3430 }
3431
3432 void Assembler::vperm2f128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8) {
3433 assert(VM_Version::supports_avx(), "");
3434 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
3435 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3436 emit_int8(0x06);
3437 emit_int8(0xC0 | encode);
3438 emit_int8(imm8);
3439 }
3440
3441 void Assembler::evpermi2q(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3442 assert(VM_Version::supports_evex(), "");
3867 }
3868
3869 void Assembler::pmovzxbw(XMMRegister dst, Address src) {
3870 assert(VM_Version::supports_sse4_1(), "");
3871 InstructionMark im(this);
3872 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3873 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3874 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3875 emit_int8(0x30);
3876 emit_operand(dst, src);
3877 }
3878
3879 void Assembler::pmovzxbw(XMMRegister dst, XMMRegister src) {
3880 assert(VM_Version::supports_sse4_1(), "");
3881 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3882 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3883 emit_int8(0x30);
3884 emit_int8((unsigned char)(0xC0 | encode));
3885 }
3886
3887 void Assembler::vpmovzxbw(XMMRegister dst, Address src, int vector_len) {
3888 assert(VM_Version::supports_avx(), "");
3889 InstructionMark im(this);
3890 assert(dst != xnoreg, "sanity");
3891 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3892 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3893 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3894 emit_int8(0x30);
3895 emit_operand(dst, src);
3896 }
3897
3898 void Assembler::vpmovzxbw(XMMRegister dst, XMMRegister src, int vector_len) {
3899 assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
3900 vector_len == AVX_256bit? VM_Version::supports_avx2() :
3901 vector_len == AVX_512bit? VM_Version::supports_avx512bw() : 0, "");
3902 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3903 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3904 emit_int8(0x30);
3905 emit_int8((unsigned char) (0xC0 | encode));
3906 }
3907
3908
3909 void Assembler::evpmovzxbw(XMMRegister dst, KRegister mask, Address src, int vector_len) {
3910 assert(VM_Version::supports_avx512vlbw(), "");
3911 assert(dst != xnoreg, "sanity");
3912 InstructionMark im(this);
3913 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3914 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3915 attributes.set_embedded_opmask_register_specifier(mask);
3916 attributes.set_is_evex_instruction();
3917 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3918 emit_int8(0x30);
3919 emit_operand(dst, src);
3920 }
3921 void Assembler::evpmovwb(Address dst, XMMRegister src, int vector_len) {
3922 assert(VM_Version::supports_avx512vlbw(), "");
3923 assert(src != xnoreg, "sanity");
3924 InstructionMark im(this);
3925 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3926 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3927 attributes.set_is_evex_instruction();
6260 emit_int8((unsigned char)(0xC0 | encode));
6261 emit_int8(shift & 0xFF);
6262 }
6263
6264 void Assembler::vpsraw(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
6265 assert(UseAVX > 0, "requires some form of AVX");
6266 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6267 int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6268 emit_int8((unsigned char)0xE1);
6269 emit_int8((unsigned char)(0xC0 | encode));
6270 }
6271
6272 void Assembler::vpsrad(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
6273 assert(UseAVX > 0, "requires some form of AVX");
6274 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6275 int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6276 emit_int8((unsigned char)0xE2);
6277 emit_int8((unsigned char)(0xC0 | encode));
6278 }
6279
6280
6281 // logical operations packed integers
6282 void Assembler::pand(XMMRegister dst, XMMRegister src) {
6283 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
6284 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6285 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6286 emit_int8((unsigned char)0xDB);
6287 emit_int8((unsigned char)(0xC0 | encode));
6288 }
6289
6290 void Assembler::vpand(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
6291 assert(UseAVX > 0, "requires some form of AVX");
6292 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6293 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6294 emit_int8((unsigned char)0xDB);
6295 emit_int8((unsigned char)(0xC0 | encode));
6296 }
6297
6298 void Assembler::vpand(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
6299 assert(UseAVX > 0, "requires some form of AVX");
|
1877 emit_int8((unsigned char)(0xC0 | encode));
1878 }
1879
1880 void Assembler::cvttss2sil(Register dst, XMMRegister src) {
1881 NOT_LP64(assert(VM_Version::supports_sse(), ""));
1882 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1883 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1884 emit_int8(0x2C);
1885 emit_int8((unsigned char)(0xC0 | encode));
1886 }
1887
1888 void Assembler::cvttpd2dq(XMMRegister dst, XMMRegister src) {
1889 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1890 int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
1891 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1892 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1893 emit_int8((unsigned char)0xE6);
1894 emit_int8((unsigned char)(0xC0 | encode));
1895 }
1896
1897 void Assembler::pabsb(XMMRegister dst, XMMRegister src) {
1898 assert(VM_Version::supports_ssse3(), "");
1899 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
1900 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1901 emit_int8(0x1C);
1902 emit_int8((unsigned char)(0xC0 | encode));
1903 }
1904
1905 void Assembler::pabsw(XMMRegister dst, XMMRegister src) {
1906 assert(VM_Version::supports_ssse3(), "");
1907 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
1908 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1909 emit_int8(0x1D);
1910 emit_int8((unsigned char)(0xC0 | encode));
1911 }
1912
1913 void Assembler::pabsd(XMMRegister dst, XMMRegister src) {
1914 assert(VM_Version::supports_ssse3(), "");
1915 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1916 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1917 emit_int8(0x1E);
1918 emit_int8((unsigned char)(0xC0 | encode));
1919 }
1920
1921 void Assembler::vpabsb(XMMRegister dst, XMMRegister src, int vector_len) {
1922 assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
1923 vector_len == AVX_256bit? VM_Version::supports_avx2() :
1924 vector_len == AVX_512bit? VM_Version::supports_avx512bw() : 0, "");
1925 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
1926 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1927 emit_int8((unsigned char)0x1C);
1928 emit_int8((unsigned char)(0xC0 | encode));
1929 }
1930
1931 void Assembler::vpabsw(XMMRegister dst, XMMRegister src, int vector_len) {
1932 assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
1933 vector_len == AVX_256bit? VM_Version::supports_avx2() :
1934 vector_len == AVX_512bit? VM_Version::supports_avx512bw() : 0, "");
1935 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
1936 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1937 emit_int8((unsigned char)0x1D);
1938 emit_int8((unsigned char)(0xC0 | encode));
1939 }
1940
1941 void Assembler::vpabsd(XMMRegister dst, XMMRegister src, int vector_len) {
1942 assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
1943 vector_len == AVX_256bit? VM_Version::supports_avx2() :
1944 vector_len == AVX_512bit? VM_Version::supports_evex() : 0, "");
1945 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1946 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1947 emit_int8((unsigned char)0x1E);
1948 emit_int8((unsigned char)(0xC0 | encode));
1949 }
1950
1951 void Assembler::evpabsq(XMMRegister dst, XMMRegister src, int vector_len) {
1952 assert(UseAVX > 2, "");
1953 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1954 attributes.set_is_evex_instruction();
1955 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1956 emit_int8((unsigned char)0x1F);
1957 emit_int8((unsigned char)(0xC0 | encode));
1958 }
1959
1960 void Assembler::decl(Address dst) {
1961 // Don't use it directly. Use MacroAssembler::decrement() instead.
1962 InstructionMark im(this);
1963 prefix(dst);
1964 emit_int8((unsigned char)0xFF);
1965 emit_operand(rcx, dst);
1966 }
1967
1968 void Assembler::divsd(XMMRegister dst, Address src) {
1969 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1970 InstructionMark im(this);
1971 InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1972 attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1973 attributes.set_rex_vex_w_reverted();
1974 simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1975 emit_int8(0x5E);
1976 emit_operand(dst, src);
1977 }
1978
1979 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
3462 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3463 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3464 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3465 emit_int8(0x67);
3466 emit_int8((unsigned char)(0xC0 | encode));
3467 }
3468
3469 void Assembler::vpackuswb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3470 assert(UseAVX > 0, "some form of AVX must be enabled");
3471 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3472 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3473 emit_int8(0x67);
3474 emit_int8((unsigned char)(0xC0 | encode));
3475 }
3476
3477 void Assembler::vpermq(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3478 assert(VM_Version::supports_avx2(), "");
3479 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3480 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3481 emit_int8(0x00);
3482 emit_int8((unsigned char)(0xC0 | encode));
3483 emit_int8(imm8);
3484 }
3485
3486 void Assembler::vpermq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3487 assert(UseAVX > 2, "requires AVX512F");
3488 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
3489 attributes.set_is_evex_instruction();
3490 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3491 emit_int8((unsigned char)0x36);
3492 emit_int8((unsigned char)(0xC0 | encode));
3493 }
3494
3495 void Assembler::vperm2i128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8) {
3496 assert(VM_Version::supports_avx2(), "");
3497 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
3498 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3499 emit_int8(0x46);
3500 emit_int8(0xC0 | encode);
3501 emit_int8(imm8);
3502 }
3503
3504 void Assembler::vperm2f128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8) {
3505 assert(VM_Version::supports_avx(), "");
3506 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
3507 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3508 emit_int8(0x06);
3509 emit_int8(0xC0 | encode);
3510 emit_int8(imm8);
3511 }
3512
3513 void Assembler::evpermi2q(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3514 assert(VM_Version::supports_evex(), "");
3939 }
3940
3941 void Assembler::pmovzxbw(XMMRegister dst, Address src) {
3942 assert(VM_Version::supports_sse4_1(), "");
3943 InstructionMark im(this);
3944 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3945 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3946 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3947 emit_int8(0x30);
3948 emit_operand(dst, src);
3949 }
3950
3951 void Assembler::pmovzxbw(XMMRegister dst, XMMRegister src) {
3952 assert(VM_Version::supports_sse4_1(), "");
3953 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3954 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3955 emit_int8(0x30);
3956 emit_int8((unsigned char)(0xC0 | encode));
3957 }
3958
3959 void Assembler::pmovsxbw(XMMRegister dst, XMMRegister src) {
3960 assert(VM_Version::supports_sse4_1(), "");
3961 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3962 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3963 emit_int8(0x20);
3964 emit_int8((unsigned char)(0xC0 | encode));
3965 }
3966
3967 void Assembler::vpmovzxbw(XMMRegister dst, Address src, int vector_len) {
3968 assert(VM_Version::supports_avx(), "");
3969 InstructionMark im(this);
3970 assert(dst != xnoreg, "sanity");
3971 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3972 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3973 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3974 emit_int8(0x30);
3975 emit_operand(dst, src);
3976 }
3977
3978 void Assembler::vpmovzxbw(XMMRegister dst, XMMRegister src, int vector_len) {
3979 assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
3980 vector_len == AVX_256bit? VM_Version::supports_avx2() :
3981 vector_len == AVX_512bit? VM_Version::supports_avx512bw() : 0, "");
3982 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3983 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3984 emit_int8(0x30);
3985 emit_int8((unsigned char) (0xC0 | encode));
3986 }
3987
3988 void Assembler::vpmovsxbw(XMMRegister dst, XMMRegister src, int vector_len) {
3989 assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
3990 vector_len == AVX_256bit? VM_Version::supports_avx2() :
3991 vector_len == AVX_512bit? VM_Version::supports_avx512bw() : 0, "");
3992 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
3993 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3994 emit_int8(0x20);
3995 emit_int8((unsigned char)(0xC0 | encode));
3996 }
3997
3998 void Assembler::evpmovzxbw(XMMRegister dst, KRegister mask, Address src, int vector_len) {
3999 assert(VM_Version::supports_avx512vlbw(), "");
4000 assert(dst != xnoreg, "sanity");
4001 InstructionMark im(this);
4002 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4003 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
4004 attributes.set_embedded_opmask_register_specifier(mask);
4005 attributes.set_is_evex_instruction();
4006 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4007 emit_int8(0x30);
4008 emit_operand(dst, src);
4009 }
4010 void Assembler::evpmovwb(Address dst, XMMRegister src, int vector_len) {
4011 assert(VM_Version::supports_avx512vlbw(), "");
4012 assert(src != xnoreg, "sanity");
4013 InstructionMark im(this);
4014 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
4015 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
4016 attributes.set_is_evex_instruction();
6349 emit_int8((unsigned char)(0xC0 | encode));
6350 emit_int8(shift & 0xFF);
6351 }
6352
6353 void Assembler::vpsraw(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
6354 assert(UseAVX > 0, "requires some form of AVX");
6355 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6356 int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6357 emit_int8((unsigned char)0xE1);
6358 emit_int8((unsigned char)(0xC0 | encode));
6359 }
6360
6361 void Assembler::vpsrad(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
6362 assert(UseAVX > 0, "requires some form of AVX");
6363 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6364 int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6365 emit_int8((unsigned char)0xE2);
6366 emit_int8((unsigned char)(0xC0 | encode));
6367 }
6368
6369 void Assembler::evpsraq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
6370 assert(UseAVX > 2, "requires AVX512");
6371 assert ((VM_Version::supports_avx512vl() || vector_len == 2), "requires AVX512vl");
6372 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6373 attributes.set_is_evex_instruction();
6374 int encode = vex_prefix_and_encode(xmm4->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6375 emit_int8((unsigned char)0x72);
6376 emit_int8((unsigned char)(0xC0 | encode));
6377 emit_int8(shift & 0xFF);
6378 }
6379
6380 void Assembler::evpsraq(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
6381 assert(UseAVX > 2, "requires AVX512");
6382 assert ((VM_Version::supports_avx512vl() || vector_len == 2), "requires AVX512vl");
6383 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6384 attributes.set_is_evex_instruction();
6385 int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6386 emit_int8((unsigned char)0xE2);
6387 emit_int8((unsigned char)(0xC0 | encode));
6388 }
6389
6390 // logical operations packed integers
6391 void Assembler::pand(XMMRegister dst, XMMRegister src) {
6392 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
6393 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6394 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6395 emit_int8((unsigned char)0xDB);
6396 emit_int8((unsigned char)(0xC0 | encode));
6397 }
6398
6399 void Assembler::vpand(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
6400 assert(UseAVX > 0, "requires some form of AVX");
6401 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
6402 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
6403 emit_int8((unsigned char)0xDB);
6404 emit_int8((unsigned char)(0xC0 | encode));
6405 }
6406
6407 void Assembler::vpand(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
6408 assert(UseAVX > 0, "requires some form of AVX");
|