< prev index next >

src/hotspot/cpu/x86/assembler_x86.cpp

Print this page
rev 50140 : Vector cast support


1745     break;
1746   default:
1747     assert(0, "Unsupported value for a sizeInBytes argument");
1748     break;
1749   }
1750   LP64_ONLY(prefix(crc, adr, p);)
1751   emit_int8((int8_t)0x0F);
1752   emit_int8(0x38);
1753   emit_int8((int8_t)(0xF0 | w));
1754   emit_operand(crc, adr);
1755 }
1756 
1757 void Assembler::cvtdq2pd(XMMRegister dst, XMMRegister src) {
1758   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1759   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1760   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1761   emit_int8((unsigned char)0xE6);
1762   emit_int8((unsigned char)(0xC0 | encode));
1763 }
1764 








1765 void Assembler::cvtdq2ps(XMMRegister dst, XMMRegister src) {
1766   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1767   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1768   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1769   emit_int8(0x5B);
1770   emit_int8((unsigned char)(0xC0 | encode));
1771 }
1772 








1773 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
1774   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1775   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1776   attributes.set_rex_vex_w_reverted();
1777   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1778   emit_int8(0x5A);
1779   emit_int8((unsigned char)(0xC0 | encode));
1780 }
1781 
1782 void Assembler::cvtsd2ss(XMMRegister dst, Address src) {
1783   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1784   InstructionMark im(this);
1785   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1786   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1787   attributes.set_rex_vex_w_reverted();
1788   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1789   emit_int8(0x5A);
1790   emit_operand(dst, src);
1791 }
1792 


1867   int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1868   emit_int8(0x2C);
1869   emit_int8((unsigned char)(0xC0 | encode));
1870 }
1871 
1872 void Assembler::cvttpd2dq(XMMRegister dst, XMMRegister src) {
1873   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1874   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
1875   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1876   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1877   emit_int8((unsigned char)0xE6);
1878   emit_int8((unsigned char)(0xC0 | encode));
1879 }
1880 
1881 void Assembler::vcvtps2pd(XMMRegister dst, XMMRegister src, int vector_len) {
1882   assert(UseAVX > 0 && (vector_len == AVX_128bit || vector_len == AVX_256bit), "");
1883   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1884   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1885   emit_int8((unsigned char)0x5A);
1886   emit_int8((unsigned char)(0xC0 | encode));
1887 
1888 }
1889 
1890 void Assembler::evcvtps2pd(XMMRegister dst, XMMRegister src, int vector_len) {
1891   assert(UseAVX > 2, "");
1892   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1893   attributes.set_is_evex_instruction();
1894   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1895   emit_int8((unsigned char)0x5A);
1896   emit_int8((unsigned char)(0xC0 | encode));
1897 }
1898 
1899 void Assembler::pabsb(XMMRegister dst, XMMRegister src) {
1900   assert(VM_Version::supports_ssse3(), "");
1901   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1902   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1903   emit_int8(0x1C);
1904   emit_int8((unsigned char)(0xC0 | encode));
1905 }
1906 
1907 void Assembler::pabsw(XMMRegister dst, XMMRegister src) {
1908   assert(VM_Version::supports_ssse3(), "");
1909   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1910   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1911   emit_int8(0x1D);
1912   emit_int8((unsigned char)(0xC0 | encode));
1913 }
1914 

























































































1915 void Assembler::pabsd(XMMRegister dst, XMMRegister src) {
1916   assert(VM_Version::supports_ssse3(), "");
1917   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1918   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1919   emit_int8(0x1E);
1920   emit_int8((unsigned char)(0xC0 | encode));
1921 }
1922 
1923 void Assembler::vpabsb(XMMRegister dst, XMMRegister src, int vector_len) {
1924   assert(UseAVX > 0 && (vector_len == AVX_128bit || vector_len == AVX_256bit), "");
1925   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1926   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1927   emit_int8((unsigned char)0x1C);
1928   emit_int8((unsigned char)(0xC0 | encode));
1929 }
1930 
1931 void Assembler::vpabsw(XMMRegister dst, XMMRegister src, int vector_len) {
1932   assert(UseAVX > 0 && (vector_len == AVX_128bit || vector_len == AVX_256bit), "");
1933   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1934   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);


3620 }
3621 
3622 void Assembler::vperm2i128(XMMRegister dst,  XMMRegister nds, XMMRegister src, int imm8) {
3623   assert(VM_Version::supports_avx2(), "");
3624   InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3625   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3626   emit_int8(0x46);
3627   emit_int8(0xC0 | encode);
3628   emit_int8(imm8);
3629 }
3630 
3631 void Assembler::vperm2f128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8) {
3632   assert(VM_Version::supports_avx(), "");
3633   InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3634   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3635   emit_int8(0x06);
3636   emit_int8(0xC0 | encode);
3637   emit_int8(imm8);
3638 }
3639 

















3640 
3641 void Assembler::pause() {
3642   emit_int8((unsigned char)0xF3);
3643   emit_int8((unsigned char)0x90);
3644 }
3645 
3646 void Assembler::ud2() {
3647   emit_int8(0x0F);
3648   emit_int8(0x0B);
3649 }
3650 
3651 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3652   assert(VM_Version::supports_sse4_2(), "");
3653   InstructionMark im(this);
3654   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3655   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3656   emit_int8(0x61);
3657   emit_operand(dst, src);
3658   emit_int8(imm8);
3659 }


4164   emit_int8((unsigned char)(0xC0 | encode));
4165 }
4166 
4167 void Assembler::vpmovzxbd(XMMRegister dst, XMMRegister src, int vector_len) {
4168   assert(vector_len > AVX_128bit ? VM_Version::supports_avx2() : VM_Version::supports_avx(), "");
4169   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
4170   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4171   emit_int8(0x31);
4172   emit_int8((unsigned char)(0xC0 | encode));
4173 }
4174 
4175 void Assembler::vpmovzxbq(XMMRegister dst, XMMRegister src, int vector_len) {
4176   assert(vector_len > AVX_128bit ? VM_Version::supports_avx2() : VM_Version::supports_avx(), "");
4177   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
4178   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4179   emit_int8(0x32);
4180   emit_int8((unsigned char)(0xC0 | encode));
4181 }
4182 
4183 void Assembler::vpmovsxbd(XMMRegister dst, XMMRegister src, int vector_len) {
4184   assert(vector_len > AVX_128bit ? VM_Version::supports_avx2() : VM_Version::supports_avx(), "");
4185   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);


4186   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4187   emit_int8(0x21);
4188   emit_int8((unsigned char)(0xC0 | encode));
4189 }
4190 
4191 void Assembler::vpmovsxbq(XMMRegister dst, XMMRegister src, int vector_len) {
4192   assert(vector_len > AVX_128bit ? VM_Version::supports_avx2() : VM_Version::supports_avx(), "");
4193   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);


4194   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4195   emit_int8(0x22);
4196   emit_int8((unsigned char)(0xC0 | encode));
4197 }
4198 
4199 void Assembler::vpmovsxbw(XMMRegister dst, XMMRegister src, int vector_len) {
4200   assert(vector_len > AVX_128bit ? VM_Version::supports_avx2() : VM_Version::supports_avx(), "");
4201   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);


4202   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4203   emit_int8(0x20);






























4204   emit_int8((unsigned char)(0xC0 | encode));
4205 }
4206 
4207 void Assembler::evpmovwb(Address dst, XMMRegister src, int vector_len) {
4208   assert(VM_Version::supports_avx512vlbw(), "");
4209   assert(src != xnoreg, "sanity");
4210   InstructionMark im(this);
4211   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4212   attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
4213   attributes.set_is_evex_instruction();
4214   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
4215   emit_int8(0x30);
4216   emit_operand(src, dst);
4217 }
4218 
4219 void Assembler::evpmovwb(Address dst, KRegister mask, XMMRegister src, int vector_len) {
4220   assert(is_vector_masking(), "");
4221   assert(VM_Version::supports_avx512vlbw(), "");
4222   assert(src != xnoreg, "sanity");
4223   InstructionMark im(this);




1745     break;
1746   default:
1747     assert(0, "Unsupported value for a sizeInBytes argument");
1748     break;
1749   }
1750   LP64_ONLY(prefix(crc, adr, p);)
1751   emit_int8((int8_t)0x0F);
1752   emit_int8(0x38);
1753   emit_int8((int8_t)(0xF0 | w));
1754   emit_operand(crc, adr);
1755 }
1756 
1757 void Assembler::cvtdq2pd(XMMRegister dst, XMMRegister src) {
1758   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1759   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1760   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1761   emit_int8((unsigned char)0xE6);
1762   emit_int8((unsigned char)(0xC0 | encode));
1763 }
1764 
1765 void Assembler::vcvtdq2pd(XMMRegister dst, XMMRegister src, int vector_len) {
1766   assert(vector_len <= AVX_256bit ? VM_Version::supports_avx() : VM_Version::supports_evex(), "");
1767   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
1768   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1769   emit_int8((unsigned char)0xE6);
1770   emit_int8((unsigned char)(0xC0 | encode));
1771 }
1772 
1773 void Assembler::cvtdq2ps(XMMRegister dst, XMMRegister src) {
1774   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1775   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1776   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1777   emit_int8(0x5B);
1778   emit_int8((unsigned char)(0xC0 | encode));
1779 }
1780 
1781 void Assembler::vcvtdq2ps(XMMRegister dst, XMMRegister src, int vector_len) {
1782   assert(vector_len <= AVX_256bit ? VM_Version::supports_avx() : VM_Version::supports_evex(), "");
1783   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
1784   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1785   emit_int8(0x5B);
1786   emit_int8((unsigned char)(0xC0 | encode));
1787 }
1788 
1789 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
1790   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1791   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1792   attributes.set_rex_vex_w_reverted();
1793   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1794   emit_int8(0x5A);
1795   emit_int8((unsigned char)(0xC0 | encode));
1796 }
1797 
1798 void Assembler::cvtsd2ss(XMMRegister dst, Address src) {
1799   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1800   InstructionMark im(this);
1801   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1802   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1803   attributes.set_rex_vex_w_reverted();
1804   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1805   emit_int8(0x5A);
1806   emit_operand(dst, src);
1807 }
1808 


1883   int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1884   emit_int8(0x2C);
1885   emit_int8((unsigned char)(0xC0 | encode));
1886 }
1887 
1888 void Assembler::cvttpd2dq(XMMRegister dst, XMMRegister src) {
1889   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1890   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
1891   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1892   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1893   emit_int8((unsigned char)0xE6);
1894   emit_int8((unsigned char)(0xC0 | encode));
1895 }
1896 
1897 void Assembler::vcvtps2pd(XMMRegister dst, XMMRegister src, int vector_len) {
1898   assert(UseAVX > 0 && (vector_len == AVX_128bit || vector_len == AVX_256bit), "");
1899   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1900   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1901   emit_int8((unsigned char)0x5A);
1902   emit_int8((unsigned char)(0xC0 | encode));

1903 }
1904 
1905 void Assembler::evcvtps2pd(XMMRegister dst, XMMRegister src, int vector_len) {
1906   assert(UseAVX > 2, "");
1907   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1908   attributes.set_is_evex_instruction();
1909   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1910   emit_int8((unsigned char)0x5A);
1911   emit_int8((unsigned char)(0xC0 | encode));
1912 }
1913 
1914 void Assembler::pabsb(XMMRegister dst, XMMRegister src) {
1915   assert(VM_Version::supports_ssse3(), "");
1916   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1917   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1918   emit_int8(0x1C);
1919   emit_int8((unsigned char)(0xC0 | encode));
1920 }
1921 
1922 void Assembler::pabsw(XMMRegister dst, XMMRegister src) {
1923   assert(VM_Version::supports_ssse3(), "");
1924   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1925   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
1926   emit_int8(0x1D);
1927   emit_int8((unsigned char)(0xC0 | encode));
1928 }
1929 
1930 void Assembler::vcvtpd2ps(XMMRegister dst, XMMRegister src, int vector_len) {
1931   assert(UseAVX > 0 && (vector_len == AVX_128bit || vector_len == AVX_256bit), "");
1932   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1933   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1934   emit_int8((unsigned char)0x5A);
1935   emit_int8((unsigned char)(0xC0 | encode));
1936 }
1937 
1938 void Assembler::evcvtpd2ps(XMMRegister dst, XMMRegister src, int vector_len) {
1939   assert(UseAVX > 2, "");
1940   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1941   attributes.set_is_evex_instruction();
1942   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1943   emit_int8((unsigned char)0x5A);
1944   emit_int8((unsigned char)(0xC0 | encode));
1945 }
1946 
1947 void Assembler::evcvtqq2ps(XMMRegister dst, XMMRegister src, int vector_len) {
1948   assert(UseAVX > 2 && VM_Version::supports_avx512dq(), "");
1949   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1950   attributes.set_is_evex_instruction();
1951   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1952   emit_int8((unsigned char)0x5B);
1953   emit_int8((unsigned char)(0xC0 | encode));
1954 }
1955 
1956 void Assembler::evcvtqq2pd(XMMRegister dst, XMMRegister src, int vector_len) {
1957   assert(UseAVX > 2 && VM_Version::supports_avx512dq(), "");
1958   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1959   attributes.set_is_evex_instruction();
1960   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1961   emit_int8((unsigned char)0xE6);
1962   emit_int8((unsigned char)(0xC0 | encode));
1963 }
1964 
1965 void Assembler::evpmovwb(XMMRegister dst, XMMRegister src, int vector_len) {
1966   assert(UseAVX > 2, "");
1967   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1968   attributes.set_is_evex_instruction();
1969   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
1970   emit_int8((unsigned char)0x30);
1971   emit_int8((unsigned char)(0xC0 | encode));
1972 }
1973 
1974 void Assembler::evpmovdw(XMMRegister dst, XMMRegister src, int vector_len) {
1975   assert(UseAVX > 2, "");
1976   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1977   attributes.set_is_evex_instruction();
1978   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
1979   emit_int8((unsigned char)0x33);
1980   emit_int8((unsigned char)(0xC0 | encode));
1981 }
1982 
1983 void Assembler::evpmovdb(XMMRegister dst, XMMRegister src, int vector_len) {
1984   assert(UseAVX > 2, "");
1985   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1986   attributes.set_is_evex_instruction();
1987   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
1988   emit_int8((unsigned char)0x31);
1989   emit_int8((unsigned char)(0xC0 | encode));
1990 }
1991 
1992 void Assembler::evpmovqd(XMMRegister dst, XMMRegister src, int vector_len) {
1993   assert(UseAVX > 2, "");
1994   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
1995   attributes.set_is_evex_instruction();
1996   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
1997   emit_int8((unsigned char)0x35);
1998   emit_int8((unsigned char)(0xC0 | encode));
1999 }
2000 
2001 void Assembler::evpmovqb(XMMRegister dst, XMMRegister src, int vector_len) {
2002   assert(UseAVX > 2, "");
2003   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
2004   attributes.set_is_evex_instruction();
2005   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
2006   emit_int8((unsigned char)0x32);
2007   emit_int8((unsigned char)(0xC0 | encode));
2008 }
2009 
2010 void Assembler::evpmovqw(XMMRegister dst, XMMRegister src, int vector_len) {
2011   assert(UseAVX > 2, "");
2012   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
2013   attributes.set_is_evex_instruction();
2014   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
2015   emit_int8((unsigned char)0x34);
2016   emit_int8((unsigned char)(0xC0 | encode));
2017 }
2018 
2019 void Assembler::pabsd(XMMRegister dst, XMMRegister src) {
2020   assert(VM_Version::supports_ssse3(), "");
2021   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2022   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
2023   emit_int8(0x1E);
2024   emit_int8((unsigned char)(0xC0 | encode));
2025 }
2026 
2027 void Assembler::vpabsb(XMMRegister dst, XMMRegister src, int vector_len) {
2028   assert(UseAVX > 0 && (vector_len == AVX_128bit || vector_len == AVX_256bit), "");
2029   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2030   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
2031   emit_int8((unsigned char)0x1C);
2032   emit_int8((unsigned char)(0xC0 | encode));
2033 }
2034 
2035 void Assembler::vpabsw(XMMRegister dst, XMMRegister src, int vector_len) {
2036   assert(UseAVX > 0 && (vector_len == AVX_128bit || vector_len == AVX_256bit), "");
2037   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2038   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);


3724 }
3725 
3726 void Assembler::vperm2i128(XMMRegister dst,  XMMRegister nds, XMMRegister src, int imm8) {
3727   assert(VM_Version::supports_avx2(), "");
3728   InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3729   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3730   emit_int8(0x46);
3731   emit_int8(0xC0 | encode);
3732   emit_int8(imm8);
3733 }
3734 
3735 void Assembler::vperm2f128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8) {
3736   assert(VM_Version::supports_avx(), "");
3737   InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3738   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3739   emit_int8(0x06);
3740   emit_int8(0xC0 | encode);
3741   emit_int8(imm8);
3742 }
3743 
3744 void Assembler::vpermilps(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3745   assert(VM_Version::supports_avx(), "");
3746   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
3747   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3748   emit_int8(0x04);
3749   emit_int8(0xC0 | encode);
3750   emit_int8(imm8);
3751 }
3752 
3753 void Assembler::vpermpd(XMMRegister dst, XMMRegister src, int imm8, int vector_len) {
3754   assert(VM_Version::supports_avx2(), "");
3755   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
3756   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3757   emit_int8(0x01);
3758   emit_int8(0xC0 | encode);
3759   emit_int8(imm8);
3760 }
3761 
3762 void Assembler::pause() {
3763   emit_int8((unsigned char)0xF3);
3764   emit_int8((unsigned char)0x90);
3765 }
3766 
3767 void Assembler::ud2() {
3768   emit_int8(0x0F);
3769   emit_int8(0x0B);
3770 }
3771 
3772 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3773   assert(VM_Version::supports_sse4_2(), "");
3774   InstructionMark im(this);
3775   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3776   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3777   emit_int8(0x61);
3778   emit_operand(dst, src);
3779   emit_int8(imm8);
3780 }


4285   emit_int8((unsigned char)(0xC0 | encode));
4286 }
4287 
4288 void Assembler::vpmovzxbd(XMMRegister dst, XMMRegister src, int vector_len) {
4289   assert(vector_len > AVX_128bit ? VM_Version::supports_avx2() : VM_Version::supports_avx(), "");
4290   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
4291   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4292   emit_int8(0x31);
4293   emit_int8((unsigned char)(0xC0 | encode));
4294 }
4295 
4296 void Assembler::vpmovzxbq(XMMRegister dst, XMMRegister src, int vector_len) {
4297   assert(vector_len > AVX_128bit ? VM_Version::supports_avx2() : VM_Version::supports_avx(), "");
4298   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
4299   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4300   emit_int8(0x32);
4301   emit_int8((unsigned char)(0xC0 | encode));
4302 }
4303 
4304 void Assembler::vpmovsxbd(XMMRegister dst, XMMRegister src, int vector_len) {
4305   assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4306          vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4307              VM_Version::supports_evex(), "");
4308   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4309   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4310   emit_int8(0x21);
4311   emit_int8((unsigned char)(0xC0 | encode));
4312 }
4313 
4314 void Assembler::vpmovsxbq(XMMRegister dst, XMMRegister src, int vector_len) {
4315   assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4316          vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4317              VM_Version::supports_evex(), "");
4318   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4319   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4320   emit_int8(0x22);
4321   emit_int8((unsigned char)(0xC0 | encode));
4322 }
4323 
4324 void Assembler::vpmovsxbw(XMMRegister dst, XMMRegister src, int vector_len) {
4325   assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4326          vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4327              VM_Version::supports_evex(), "");
4328   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4329   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4330   emit_int8(0x20);
4331   emit_int8((unsigned char)(0xC0 | encode));
4332 }
4333 
4334 void Assembler::vpmovsxwd(XMMRegister dst, XMMRegister src, int vector_len) {
4335   assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4336          vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4337              VM_Version::supports_evex(), "");
4338   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4339   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4340   emit_int8(0x23);
4341   emit_int8((unsigned char)(0xC0 | encode));
4342 }
4343 
4344 void Assembler::vpmovsxwq(XMMRegister dst, XMMRegister src, int vector_len) {
4345   assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4346          vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4347              VM_Version::supports_evex(), "");
4348   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4349   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4350   emit_int8(0x24);
4351   emit_int8((unsigned char)(0xC0 | encode));
4352 }
4353 
4354 void Assembler::vpmovsxdq(XMMRegister dst, XMMRegister src, int vector_len) {
4355   assert(vector_len == AVX_128bit ? VM_Version::supports_avx() :
4356          vector_len == AVX_256bit ? VM_Version::supports_avx2() :
4357              VM_Version::supports_evex(), "");
4358   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
4359   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4360   emit_int8(0x25);
4361   emit_int8((unsigned char)(0xC0 | encode));
4362 }
4363 
4364 void Assembler::evpmovwb(Address dst, XMMRegister src, int vector_len) {
4365   assert(VM_Version::supports_avx512vlbw(), "");
4366   assert(src != xnoreg, "sanity");
4367   InstructionMark im(this);
4368   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4369   attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
4370   attributes.set_is_evex_instruction();
4371   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
4372   emit_int8(0x30);
4373   emit_operand(src, dst);
4374 }
4375 
4376 void Assembler::evpmovwb(Address dst, KRegister mask, XMMRegister src, int vector_len) {
4377   assert(is_vector_masking(), "");
4378   assert(VM_Version::supports_avx512vlbw(), "");
4379   assert(src != xnoreg, "sanity");
4380   InstructionMark im(this);


< prev index next >