< prev index next >

src/cpu/x86/vm/assembler_x86.cpp

Print this page




1223   emit_int8(0x1F);
1224   emit_int8((unsigned char)0x80);
1225                    // emit_rm(cbuf, 0x2, EAX_enc, EAX_enc);
1226   emit_int32(0);   // 32-bits offset (4 bytes)
1227 }
1228 
1229 void Assembler::addr_nop_8() {
1230   assert(UseAddressNop, "no CPU support");
1231   // 8 bytes: NOP DWORD PTR [EAX+EAX*0+0] 32-bits offset
1232   emit_int8(0x0F);
1233   emit_int8(0x1F);
1234   emit_int8((unsigned char)0x84);
1235                    // emit_rm(cbuf, 0x2, EAX_enc, 0x4);
1236   emit_int8(0x00); // emit_rm(cbuf, 0x0, EAX_enc, EAX_enc);
1237   emit_int32(0);   // 32-bits offset (4 bytes)
1238 }
1239 
1240 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
1241   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1242   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

1243   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1244   emit_int8(0x58);
1245   emit_int8((unsigned char)(0xC0 | encode));
1246 }
1247 
1248 void Assembler::addsd(XMMRegister dst, Address src) {
1249   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1250   InstructionMark im(this);
1251   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1252   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

1253   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1254   emit_int8(0x58);
1255   emit_operand(dst, src);
1256 }
1257 
1258 void Assembler::addss(XMMRegister dst, XMMRegister src) {
1259   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1260   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1261   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1262   emit_int8(0x58);
1263   emit_int8((unsigned char)(0xC0 | encode));
1264 }
1265 
1266 void Assembler::addss(XMMRegister dst, Address src) {
1267   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1268   InstructionMark im(this);
1269   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1270   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1271   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1272   emit_int8(0x58);


1582 }
1583 
1584 // The 8-bit cmpxchg compares the value at adr with the contents of rax,
1585 // and stores reg into adr if so; otherwise, the value at adr is loaded into rax,.
1586 // The ZF is set if the compared values were equal, and cleared otherwise.
1587 void Assembler::cmpxchgb(Register reg, Address adr) { // cmpxchg
1588   InstructionMark im(this);
1589   prefix(adr, reg, true);
1590   emit_int8(0x0F);
1591   emit_int8((unsigned char)0xB0);
1592   emit_operand(reg, adr);
1593 }
1594 
1595 void Assembler::comisd(XMMRegister dst, Address src) {
1596   // NOTE: dbx seems to decode this as comiss even though the
1597   // 0x66 is there. Strangly ucomisd comes out correct
1598   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1599   InstructionMark im(this);
1600   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);;
1601   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

1602   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1603   emit_int8(0x2F);
1604   emit_operand(dst, src);
1605 }
1606 
1607 void Assembler::comisd(XMMRegister dst, XMMRegister src) {
1608   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1609   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);

1610   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1611   emit_int8(0x2F);
1612   emit_int8((unsigned char)(0xC0 | encode));
1613 }
1614 
1615 void Assembler::comiss(XMMRegister dst, Address src) {
1616   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1617   InstructionMark im(this);
1618   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1619   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1620   simd_prefix(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1621   emit_int8(0x2F);
1622   emit_operand(dst, src);
1623 }
1624 
1625 void Assembler::comiss(XMMRegister dst, XMMRegister src) {
1626   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1627   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1628   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1629   emit_int8(0x2F);


1716 
1717 void Assembler::cvtdq2pd(XMMRegister dst, XMMRegister src) {
1718   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1719   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1720   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1721   emit_int8((unsigned char)0xE6);
1722   emit_int8((unsigned char)(0xC0 | encode));
1723 }
1724 
1725 void Assembler::cvtdq2ps(XMMRegister dst, XMMRegister src) {
1726   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1727   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1728   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1729   emit_int8(0x5B);
1730   emit_int8((unsigned char)(0xC0 | encode));
1731 }
1732 
1733 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
1734   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1735   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

1736   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1737   emit_int8(0x5A);
1738   emit_int8((unsigned char)(0xC0 | encode));
1739 }
1740 
1741 void Assembler::cvtsd2ss(XMMRegister dst, Address src) {
1742   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1743   InstructionMark im(this);
1744   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1745   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

1746   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1747   emit_int8(0x5A);
1748   emit_operand(dst, src);
1749 }
1750 
1751 void Assembler::cvtsi2sdl(XMMRegister dst, Register src) {
1752   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1753   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1754   int encode = simd_prefix_and_encode(dst, dst, as_XMMRegister(src->encoding()), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1755   emit_int8(0x2A);
1756   emit_int8((unsigned char)(0xC0 | encode));
1757 }
1758 
1759 void Assembler::cvtsi2sdl(XMMRegister dst, Address src) {
1760   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1761   InstructionMark im(this);
1762   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1763   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1764   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1765   emit_int8(0x2A);


1823   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1824   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1825   int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1826   emit_int8(0x2C);
1827   emit_int8((unsigned char)(0xC0 | encode));
1828 }
1829 
1830 void Assembler::decl(Address dst) {
1831   // Don't use it directly. Use MacroAssembler::decrement() instead.
1832   InstructionMark im(this);
1833   prefix(dst);
1834   emit_int8((unsigned char)0xFF);
1835   emit_operand(rcx, dst);
1836 }
1837 
1838 void Assembler::divsd(XMMRegister dst, Address src) {
1839   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1840   InstructionMark im(this);
1841   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1842   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

1843   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1844   emit_int8(0x5E);
1845   emit_operand(dst, src);
1846 }
1847 
1848 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
1849   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1850   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

1851   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1852   emit_int8(0x5E);
1853   emit_int8((unsigned char)(0xC0 | encode));
1854 }
1855 
1856 void Assembler::divss(XMMRegister dst, Address src) {
1857   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1858   InstructionMark im(this);
1859   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1860   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1861   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1862   emit_int8(0x5E);
1863   emit_operand(dst, src);
1864 }
1865 
1866 void Assembler::divss(XMMRegister dst, XMMRegister src) {
1867   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1868   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1869   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1870   emit_int8(0x5E);


2105   emit_int8((unsigned char)0xBD);
2106   emit_int8((unsigned char)(0xC0 | encode));
2107 }
2108 
2109 // Emit mfence instruction
2110 void Assembler::mfence() {
2111   NOT_LP64(assert(VM_Version::supports_sse2(), "unsupported");)
2112   emit_int8(0x0F);
2113   emit_int8((unsigned char)0xAE);
2114   emit_int8((unsigned char)0xF0);
2115 }
2116 
2117 void Assembler::mov(Register dst, Register src) {
2118   LP64_ONLY(movq(dst, src)) NOT_LP64(movl(dst, src));
2119 }
2120 
2121 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2122   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2123   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2124   InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

2125   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2126   emit_int8(0x28);
2127   emit_int8((unsigned char)(0xC0 | encode));
2128 }
2129 
2130 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2131   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2132   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2133   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2134   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2135   emit_int8(0x28);
2136   emit_int8((unsigned char)(0xC0 | encode));
2137 }
2138 
2139 void Assembler::movlhps(XMMRegister dst, XMMRegister src) {
2140   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2141   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2142   int encode = simd_prefix_and_encode(dst, src, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2143   emit_int8(0x16);
2144   emit_int8((unsigned char)(0xC0 | encode));
2145 }
2146 
2147 void Assembler::movb(Register dst, Address src) {
2148   NOT_LP64(assert(dst->has_byte_register(), "must have byte register"));
2149   InstructionMark im(this);
2150   prefix(src, dst, true);
2151   emit_int8((unsigned char)0x8A);
2152   emit_operand(dst, src);
2153 }
2154 
2155 void Assembler::movddup(XMMRegister dst, XMMRegister src) {
2156   NOT_LP64(assert(VM_Version::supports_sse3(), ""));
2157   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2158   InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

2159   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2160   emit_int8(0x12);
2161   emit_int8(0xC0 | encode);
2162 }
2163 
2164 void Assembler::kmovbl(KRegister dst, Register src) {
2165   assert(VM_Version::supports_avx512dq(), "");
2166   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2167   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2168   emit_int8((unsigned char)0x92);
2169   emit_int8((unsigned char)(0xC0 | encode));
2170 }
2171 
2172 void Assembler::kmovbl(Register dst, KRegister src) {
2173   assert(VM_Version::supports_avx512dq(), "");
2174   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2175   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2176   emit_int8((unsigned char)0x93);
2177   emit_int8((unsigned char)(0xC0 | encode));
2178 }
2179 
2180 void Assembler::kmovwl(KRegister dst, Register src) {
2181   assert(VM_Version::supports_evex(), "");
2182   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2183   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2184   emit_int8((unsigned char)0x92);
2185   emit_int8((unsigned char)(0xC0 | encode));
2186 }
2187 
2188 void Assembler::kmovwl(Register dst, KRegister src) {
2189   assert(VM_Version::supports_evex(), "");
2190   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2191   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2192   emit_int8((unsigned char)0x93);
2193   emit_int8((unsigned char)(0xC0 | encode));
2194 }
2195 









2196 void Assembler::kmovdl(KRegister dst, Register src) {
2197   assert(VM_Version::supports_avx512bw(), "");
2198   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2199   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2200   emit_int8((unsigned char)0x92);
2201   emit_int8((unsigned char)(0xC0 | encode));
2202 }
2203 
2204 void Assembler::kmovdl(Register dst, KRegister src) {
2205   assert(VM_Version::supports_avx512bw(), "");
2206   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2207   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2208   emit_int8((unsigned char)0x93);
2209   emit_int8((unsigned char)(0xC0 | encode));
2210 }
2211 
2212 void Assembler::kmovql(KRegister dst, KRegister src) {
2213   assert(VM_Version::supports_avx512bw(), "");
2214   InstructionAttr attributes(AVX_128bit, /* rex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2215   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);


2406   emit_int8(0x6F);
2407   emit_operand(dst, src);
2408 }
2409 
2410 void Assembler::vmovdqu(Address dst, XMMRegister src) {
2411   assert(UseAVX > 0, "");
2412   InstructionMark im(this);
2413   InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2414   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2415   // swap src<->dst for encoding
2416   assert(src != xnoreg, "sanity");
2417   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2418   emit_int8(0x7F);
2419   emit_operand(src, dst);
2420 }
2421 
2422 // Move Unaligned EVEX enabled Vector (programmable : 8,16,32,64)
2423 void Assembler::evmovdqub(XMMRegister dst, XMMRegister src, int vector_len) {
2424   assert(VM_Version::supports_evex(), "");
2425   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);

2426   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2427   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2428   emit_int8(0x6F);
2429   emit_int8((unsigned char)(0xC0 | encode));
2430 }
2431 
2432 void Assembler::evmovdqub(XMMRegister dst, Address src, int vector_len) {
2433   assert(VM_Version::supports_evex(), "");
2434   InstructionMark im(this);
2435   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2436   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2437   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);

2438   vex_prefix(src, 0, dst->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2439   emit_int8(0x6F);
2440   emit_operand(dst, src);
2441 }
2442 
2443 void Assembler::evmovdqub(Address dst, XMMRegister src, int vector_len) {
2444   assert(VM_Version::supports_evex(), "");
2445   assert(src != xnoreg, "sanity");
2446   InstructionMark im(this);
2447   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2448   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2449   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);

2450   vex_prefix(dst, 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2451   emit_int8(0x7F);
2452   emit_operand(src, dst);
2453 }
2454 
2455 void Assembler::evmovdquw(XMMRegister dst, XMMRegister src, int vector_len) {
2456   assert(VM_Version::supports_evex(), "");
2457   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);

2458   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2459   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2460   emit_int8(0x6F);
2461   emit_int8((unsigned char)(0xC0 | encode));
2462 }
2463 
2464 void Assembler::evmovdquw(XMMRegister dst, Address src, int vector_len) {
2465   assert(VM_Version::supports_evex(), "");
2466   InstructionMark im(this);
2467   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2468   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);

2469   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2470   vex_prefix(src, 0, dst->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2471   emit_int8(0x6F);
2472   emit_operand(dst, src);
2473 }
2474 
2475 void Assembler::evmovdquw(Address dst, XMMRegister src, int vector_len) {
2476   assert(VM_Version::supports_evex(), "");
2477   assert(src != xnoreg, "sanity");
2478   InstructionMark im(this);
2479   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2480   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);

2481   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2482   vex_prefix(dst, 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2483   emit_int8(0x7F);
2484   emit_operand(src, dst);
2485 }
2486 
2487 void Assembler::evmovdqul(XMMRegister dst, XMMRegister src, int vector_len) {
2488   assert(VM_Version::supports_evex(), "");
2489   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2490   attributes.set_is_evex_instruction();
2491   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2492   emit_int8(0x6F);
2493   emit_int8((unsigned char)(0xC0 | encode));
2494 }
2495 
2496 void Assembler::evmovdqul(XMMRegister dst, Address src, int vector_len) {
2497   assert(VM_Version::supports_evex(), "");
2498   InstructionMark im(this);
2499   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false , /* uses_vl */ true);
2500   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2501   attributes.set_is_evex_instruction();
2502   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2503   emit_int8(0x6F);
2504   emit_operand(dst, src);
2505 }
2506 
2507 void Assembler::evmovdqul(Address dst, XMMRegister src, int vector_len) {
2508   assert(VM_Version::supports_evex(), "");
2509   assert(src != xnoreg, "sanity");
2510   InstructionMark im(this);
2511   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2512   attributes.set_is_evex_instruction();
2513   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);

2514   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2515   emit_int8(0x7F);
2516   emit_operand(src, dst);
2517 }
2518 
2519 void Assembler::evmovdquq(XMMRegister dst, XMMRegister src, int vector_len) {
2520   assert(VM_Version::supports_evex(), "");
2521   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2522   attributes.set_is_evex_instruction();
2523   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2524   emit_int8(0x6F);
2525   emit_int8((unsigned char)(0xC0 | encode));
2526 }
2527 
2528 void Assembler::evmovdquq(XMMRegister dst, Address src, int vector_len) {
2529   assert(VM_Version::supports_evex(), "");
2530   InstructionMark im(this);
2531   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2532   attributes.set_is_evex_instruction();
2533   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);

2534   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2535   emit_int8(0x6F);
2536   emit_operand(dst, src);
2537 }
2538 
2539 void Assembler::evmovdquq(Address dst, XMMRegister src, int vector_len) {
2540   assert(VM_Version::supports_evex(), "");
2541   assert(src != xnoreg, "sanity");
2542   InstructionMark im(this);
2543   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2544   attributes.set_is_evex_instruction();
2545   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);

2546   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2547   emit_int8(0x7F);
2548   emit_operand(src, dst);
2549 }
2550 
2551 // Uses zero extension on 64bit
2552 
2553 void Assembler::movl(Register dst, int32_t imm32) {
2554   int encode = prefix_and_encode(dst->encoding());
2555   emit_int8((unsigned char)(0xB8 | encode));
2556   emit_int32(imm32);
2557 }
2558 
2559 void Assembler::movl(Register dst, Register src) {
2560   int encode = prefix_and_encode(dst->encoding(), src->encoding());
2561   emit_int8((unsigned char)0x8B);
2562   emit_int8((unsigned char)(0xC0 | encode));
2563 }
2564 
2565 void Assembler::movl(Register dst, Address src) {


2575   emit_int8((unsigned char)0xC7);
2576   emit_operand(rax, dst, 4);
2577   emit_int32(imm32);
2578 }
2579 
2580 void Assembler::movl(Address dst, Register src) {
2581   InstructionMark im(this);
2582   prefix(dst, src);
2583   emit_int8((unsigned char)0x89);
2584   emit_operand(src, dst);
2585 }
2586 
2587 // New cpus require to use movsd and movss to avoid partial register stall
2588 // when loading from memory. But for old Opteron use movlpd instead of movsd.
2589 // The selection is done in MacroAssembler::movdbl() and movflt().
2590 void Assembler::movlpd(XMMRegister dst, Address src) {
2591   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2592   InstructionMark im(this);
2593   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2594   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);

2595   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2596   emit_int8(0x12);
2597   emit_operand(dst, src);
2598 }
2599 
2600 void Assembler::movq( MMXRegister dst, Address src ) {
2601   assert( VM_Version::supports_mmx(), "" );
2602   emit_int8(0x0F);
2603   emit_int8(0x6F);
2604   emit_operand(dst, src);
2605 }
2606 
2607 void Assembler::movq( Address dst, MMXRegister src ) {
2608   assert( VM_Version::supports_mmx(), "" );
2609   emit_int8(0x0F);
2610   emit_int8(0x7F);
2611   // workaround gcc (3.2.1-7a) bug
2612   // In that version of gcc with only an emit_operand(MMX, Address)
2613   // gcc will tail jump and try and reverse the parameters completely
2614   // obliterating dst in the process. By having a version available
2615   // that doesn't need to swap the args at the tail jump the bug is
2616   // avoided.
2617   emit_operand(dst, src);
2618 }
2619 
2620 void Assembler::movq(XMMRegister dst, Address src) {
2621   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2622   InstructionMark im(this);
2623   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2624   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

2625   simd_prefix(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2626   emit_int8(0x7E);
2627   emit_operand(dst, src);
2628 }
2629 
2630 void Assembler::movq(Address dst, XMMRegister src) {
2631   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2632   InstructionMark im(this);
2633   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2634   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

2635   simd_prefix(src, xnoreg, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2636   emit_int8((unsigned char)0xD6);
2637   emit_operand(src, dst);
2638 }
2639 
2640 void Assembler::movsbl(Register dst, Address src) { // movsxb
2641   InstructionMark im(this);
2642   prefix(src, dst);
2643   emit_int8(0x0F);
2644   emit_int8((unsigned char)0xBE);
2645   emit_operand(dst, src);
2646 }
2647 
2648 void Assembler::movsbl(Register dst, Register src) { // movsxb
2649   NOT_LP64(assert(src->has_byte_register(), "must have byte register"));
2650   int encode = prefix_and_encode(dst->encoding(), false, src->encoding(), true);
2651   emit_int8(0x0F);
2652   emit_int8((unsigned char)0xBE);
2653   emit_int8((unsigned char)(0xC0 | encode));
2654 }
2655 
2656 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2657   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2658   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

2659   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2660   emit_int8(0x10);
2661   emit_int8((unsigned char)(0xC0 | encode));
2662 }
2663 
2664 void Assembler::movsd(XMMRegister dst, Address src) {
2665   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2666   InstructionMark im(this);
2667   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2668   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

2669   simd_prefix(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2670   emit_int8(0x10);
2671   emit_operand(dst, src);
2672 }
2673 
2674 void Assembler::movsd(Address dst, XMMRegister src) {
2675   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2676   InstructionMark im(this);
2677   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2678   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

2679   simd_prefix(src, xnoreg, dst, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2680   emit_int8(0x11);
2681   emit_operand(src, dst);
2682 }
2683 
2684 void Assembler::movss(XMMRegister dst, XMMRegister src) {
2685   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2686   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2687   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2688   emit_int8(0x10);
2689   emit_int8((unsigned char)(0xC0 | encode));
2690 }
2691 
2692 void Assembler::movss(XMMRegister dst, Address src) {
2693   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2694   InstructionMark im(this);
2695   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2696   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
2697   simd_prefix(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2698   emit_int8(0x10);


2782 }
2783 
2784 void Assembler::mull(Address src) {
2785   InstructionMark im(this);
2786   prefix(src);
2787   emit_int8((unsigned char)0xF7);
2788   emit_operand(rsp, src);
2789 }
2790 
2791 void Assembler::mull(Register src) {
2792   int encode = prefix_and_encode(src->encoding());
2793   emit_int8((unsigned char)0xF7);
2794   emit_int8((unsigned char)(0xE0 | encode));
2795 }
2796 
2797 void Assembler::mulsd(XMMRegister dst, Address src) {
2798   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2799   InstructionMark im(this);
2800   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2801   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

2802   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2803   emit_int8(0x59);
2804   emit_operand(dst, src);
2805 }
2806 
2807 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2808   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2809   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

2810   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2811   emit_int8(0x59);
2812   emit_int8((unsigned char)(0xC0 | encode));
2813 }
2814 
2815 void Assembler::mulss(XMMRegister dst, Address src) {
2816   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2817   InstructionMark im(this);
2818   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2819   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
2820   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2821   emit_int8(0x59);
2822   emit_operand(dst, src);
2823 }
2824 
2825 void Assembler::mulss(XMMRegister dst, XMMRegister src) {
2826   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2827   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2828   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2829   emit_int8(0x59);


3769   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3770   InstructionMark im(this);
3771   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3772   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3773   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3774   emit_int8(0x62);
3775   emit_operand(dst, src);
3776 }
3777 
3778 void Assembler::punpckldq(XMMRegister dst, XMMRegister src) {
3779   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3780   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3781   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3782   emit_int8(0x62);
3783   emit_int8((unsigned char)(0xC0 | encode));
3784 }
3785 
3786 void Assembler::punpcklqdq(XMMRegister dst, XMMRegister src) {
3787   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3788   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

3789   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3790   emit_int8(0x6C);
3791   emit_int8((unsigned char)(0xC0 | encode));
3792 }
3793 
3794 void Assembler::push(int32_t imm32) {
3795   // in 64bits we push 64bits onto the stack but only
3796   // take a 32bit immediate
3797   emit_int8(0x68);
3798   emit_int32(imm32);
3799 }
3800 
3801 void Assembler::push(Register src) {
3802   int encode = prefix_and_encode(src->encoding());
3803 
3804   emit_int8(0x50 | encode);
3805 }
3806 
3807 void Assembler::pushf() {
3808   emit_int8((unsigned char)0x9C);


4064   int encode = prefix_and_encode(dst->encoding());
4065   emit_int8((unsigned char)0xC1);
4066   emit_int8((unsigned char)(0xE8 | encode));
4067   emit_int8(imm8);
4068 }
4069 
4070 void Assembler::shrl(Register dst) {
4071   int encode = prefix_and_encode(dst->encoding());
4072   emit_int8((unsigned char)0xD3);
4073   emit_int8((unsigned char)(0xE8 | encode));
4074 }
4075 
4076 // copies a single word from [esi] to [edi]
4077 void Assembler::smovl() {
4078   emit_int8((unsigned char)0xA5);
4079 }
4080 
4081 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
4082   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4083   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

4084   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4085   emit_int8(0x51);
4086   emit_int8((unsigned char)(0xC0 | encode));
4087 }
4088 
4089 void Assembler::sqrtsd(XMMRegister dst, Address src) {
4090   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4091   InstructionMark im(this);
4092   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4093   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

4094   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4095   emit_int8(0x51);
4096   emit_operand(dst, src);
4097 }
4098 
4099 void Assembler::sqrtss(XMMRegister dst, XMMRegister src) {
4100   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4101   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4102   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4103   emit_int8(0x51);
4104   emit_int8((unsigned char)(0xC0 | encode));
4105 }
4106 
4107 void Assembler::std() {
4108   emit_int8((unsigned char)0xFD);
4109 }
4110 
4111 void Assembler::sqrtss(XMMRegister dst, Address src) {
4112   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4113   InstructionMark im(this);


4149 void Assembler::subl_imm32(Register dst, int32_t imm32) {
4150   prefix(dst);
4151   emit_arith_imm32(0x81, 0xE8, dst, imm32);
4152 }
4153 
4154 void Assembler::subl(Register dst, Address src) {
4155   InstructionMark im(this);
4156   prefix(src, dst);
4157   emit_int8(0x2B);
4158   emit_operand(dst, src);
4159 }
4160 
4161 void Assembler::subl(Register dst, Register src) {
4162   (void) prefix_and_encode(dst->encoding(), src->encoding());
4163   emit_arith(0x2B, 0xC0, dst, src);
4164 }
4165 
4166 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
4167   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4168   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

4169   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4170   emit_int8(0x5C);
4171   emit_int8((unsigned char)(0xC0 | encode));
4172 }
4173 
4174 void Assembler::subsd(XMMRegister dst, Address src) {
4175   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4176   InstructionMark im(this);
4177   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4178   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

4179   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4180   emit_int8(0x5C);
4181   emit_operand(dst, src);
4182 }
4183 
4184 void Assembler::subss(XMMRegister dst, XMMRegister src) {
4185   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4186   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false , /* uses_vl */ false);
4187   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4188   emit_int8(0x5C);
4189   emit_int8((unsigned char)(0xC0 | encode));
4190 }
4191 
4192 void Assembler::subss(XMMRegister dst, Address src) {
4193   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4194   InstructionMark im(this);
4195   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4196   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4197   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4198   emit_int8(0x5C);


4246   int encode = prefix_and_encode(dst->encoding(), src->encoding());
4247   emit_int8(0x0F);
4248   emit_int8((unsigned char)0xBC);
4249   emit_int8((unsigned char)0xC0 | encode);
4250 }
4251 
4252 void Assembler::tzcntq(Register dst, Register src) {
4253   assert(VM_Version::supports_bmi1(), "tzcnt instruction not supported");
4254   emit_int8((unsigned char)0xF3);
4255   int encode = prefixq_and_encode(dst->encoding(), src->encoding());
4256   emit_int8(0x0F);
4257   emit_int8((unsigned char)0xBC);
4258   emit_int8((unsigned char)(0xC0 | encode));
4259 }
4260 
4261 void Assembler::ucomisd(XMMRegister dst, Address src) {
4262   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4263   InstructionMark im(this);
4264   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4265   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

4266   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4267   emit_int8(0x2E);
4268   emit_operand(dst, src);
4269 }
4270 
4271 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
4272   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4273   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);

4274   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4275   emit_int8(0x2E);
4276   emit_int8((unsigned char)(0xC0 | encode));
4277 }
4278 
4279 void Assembler::ucomiss(XMMRegister dst, Address src) {
4280   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4281   InstructionMark im(this);
4282   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4283   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4284   simd_prefix(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4285   emit_int8(0x2E);
4286   emit_operand(dst, src);
4287 }
4288 
4289 void Assembler::ucomiss(XMMRegister dst, XMMRegister src) {
4290   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4291   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4292   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4293   emit_int8(0x2E);


4365 
4366 void Assembler::xorl(Register dst, Register src) {
4367   (void) prefix_and_encode(dst->encoding(), src->encoding());
4368   emit_arith(0x33, 0xC0, dst, src);
4369 }
4370 
4371 void Assembler::xorb(Register dst, Address src) {
4372   InstructionMark im(this);
4373   prefix(src, dst);
4374   emit_int8(0x32);
4375   emit_operand(dst, src);
4376 }
4377 
4378 // AVX 3-operands scalar float-point arithmetic instructions
4379 
4380 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, Address src) {
4381   assert(VM_Version::supports_avx(), "");
4382   InstructionMark im(this);
4383   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4384   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

4385   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4386   emit_int8(0x58);
4387   emit_operand(dst, src);
4388 }
4389 
4390 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4391   assert(VM_Version::supports_avx(), "");
4392   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

4393   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4394   emit_int8(0x58);
4395   emit_int8((unsigned char)(0xC0 | encode));
4396 }
4397 
4398 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, Address src) {
4399   assert(VM_Version::supports_avx(), "");
4400   InstructionMark im(this);
4401   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4402   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4403   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4404   emit_int8(0x58);
4405   emit_operand(dst, src);
4406 }
4407 
4408 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4409   assert(VM_Version::supports_avx(), "");
4410   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4411   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4412   emit_int8(0x58);
4413   emit_int8((unsigned char)(0xC0 | encode));
4414 }
4415 
4416 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, Address src) {
4417   assert(VM_Version::supports_avx(), "");
4418   InstructionMark im(this);
4419   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4420   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

4421   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4422   emit_int8(0x5E);
4423   emit_operand(dst, src);
4424 }
4425 
4426 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4427   assert(VM_Version::supports_avx(), "");
4428   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

4429   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4430   emit_int8(0x5E);
4431   emit_int8((unsigned char)(0xC0 | encode));
4432 }
4433 
4434 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, Address src) {
4435   assert(VM_Version::supports_avx(), "");
4436   InstructionMark im(this);
4437   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4438   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4439   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4440   emit_int8(0x5E);
4441   emit_operand(dst, src);
4442 }
4443 
4444 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4445   assert(VM_Version::supports_avx(), "");
4446   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4447   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4448   emit_int8(0x5E);
4449   emit_int8((unsigned char)(0xC0 | encode));
4450 }
4451 
4452 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, Address src) {
4453   assert(VM_Version::supports_avx(), "");
4454   InstructionMark im(this);
4455   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4456   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

4457   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4458   emit_int8(0x59);
4459   emit_operand(dst, src);
4460 }
4461 
4462 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4463   assert(VM_Version::supports_avx(), "");
4464   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

4465   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4466   emit_int8(0x59);
4467   emit_int8((unsigned char)(0xC0 | encode));
4468 }
4469 
4470 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, Address src) {
4471   assert(VM_Version::supports_avx(), "");
4472   InstructionMark im(this);
4473   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4474   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4475   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4476   emit_int8(0x59);
4477   emit_operand(dst, src);
4478 }
4479 
4480 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4481   assert(VM_Version::supports_avx(), "");
4482   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4483   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4484   emit_int8(0x59);
4485   emit_int8((unsigned char)(0xC0 | encode));
4486 }
4487 
4488 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, Address src) {
4489   assert(VM_Version::supports_avx(), "");
4490   InstructionMark im(this);
4491   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4492   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

4493   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4494   emit_int8(0x5C);
4495   emit_operand(dst, src);
4496 }
4497 
4498 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4499   assert(VM_Version::supports_avx(), "");
4500   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);

4501   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4502   emit_int8(0x5C);
4503   emit_int8((unsigned char)(0xC0 | encode));
4504 }
4505 
4506 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, Address src) {
4507   assert(VM_Version::supports_avx(), "");
4508   InstructionMark im(this);
4509   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4510   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4511   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4512   emit_int8(0x5C);
4513   emit_operand(dst, src);
4514 }
4515 
4516 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4517   assert(VM_Version::supports_avx(), "");
4518   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4519   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4520   emit_int8(0x5C);
4521   emit_int8((unsigned char)(0xC0 | encode));
4522 }
4523 
4524 //====================VECTOR ARITHMETIC=====================================
4525 
4526 // Float-point vector arithmetic
4527 
4528 void Assembler::addpd(XMMRegister dst, XMMRegister src) {
4529   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4530   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4531   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4532   emit_int8(0x58);
4533   emit_int8((unsigned char)(0xC0 | encode));
4534 }
4535 
4536 void Assembler::addpd(XMMRegister dst, Address src) {
4537   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4538   InstructionMark im(this);
4539   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4540   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4541   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4542   emit_int8(0x58);
4543   emit_operand(dst, src);
4544 }
4545 
4546 
4547 void Assembler::addps(XMMRegister dst, XMMRegister src) {
4548   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4549   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4550   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4551   emit_int8(0x58);
4552   emit_int8((unsigned char)(0xC0 | encode));
4553 }
4554 
4555 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4556   assert(VM_Version::supports_avx(), "");
4557   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4558   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4559   emit_int8(0x58);
4560   emit_int8((unsigned char)(0xC0 | encode));
4561 }
4562 
4563 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4564   assert(VM_Version::supports_avx(), "");
4565   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4566   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4567   emit_int8(0x58);
4568   emit_int8((unsigned char)(0xC0 | encode));
4569 }
4570 
4571 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4572   assert(VM_Version::supports_avx(), "");
4573   InstructionMark im(this);
4574   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4575   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4576   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4577   emit_int8(0x58);
4578   emit_operand(dst, src);
4579 }
4580 
4581 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4582   assert(VM_Version::supports_avx(), "");
4583   InstructionMark im(this);
4584   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4585   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4586   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4587   emit_int8(0x58);
4588   emit_operand(dst, src);
4589 }
4590 
4591 void Assembler::subpd(XMMRegister dst, XMMRegister src) {
4592   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4593   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4594   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4595   emit_int8(0x5C);
4596   emit_int8((unsigned char)(0xC0 | encode));
4597 }
4598 
4599 void Assembler::subps(XMMRegister dst, XMMRegister src) {
4600   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4601   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4602   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4603   emit_int8(0x5C);
4604   emit_int8((unsigned char)(0xC0 | encode));
4605 }
4606 
4607 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4608   assert(VM_Version::supports_avx(), "");
4609   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4610   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4611   emit_int8(0x5C);
4612   emit_int8((unsigned char)(0xC0 | encode));
4613 }
4614 
4615 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4616   assert(VM_Version::supports_avx(), "");
4617   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4618   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4619   emit_int8(0x5C);
4620   emit_int8((unsigned char)(0xC0 | encode));
4621 }
4622 
4623 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4624   assert(VM_Version::supports_avx(), "");
4625   InstructionMark im(this);
4626   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4627   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4628   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4629   emit_int8(0x5C);
4630   emit_operand(dst, src);
4631 }
4632 
4633 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4634   assert(VM_Version::supports_avx(), "");
4635   InstructionMark im(this);
4636   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4637   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4638   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4639   emit_int8(0x5C);
4640   emit_operand(dst, src);
4641 }
4642 
4643 void Assembler::mulpd(XMMRegister dst, XMMRegister src) {
4644   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4645   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4646   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4647   emit_int8(0x59);
4648   emit_int8((unsigned char)(0xC0 | encode));
4649 }
4650 
4651 void Assembler::mulpd(XMMRegister dst, Address src) {
4652   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4653   InstructionMark im(this);
4654   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4655   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4656   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4657   emit_int8(0x59);
4658   emit_operand(dst, src);
4659 }
4660 
4661 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
4662   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4663   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4664   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4665   emit_int8(0x59);
4666   emit_int8((unsigned char)(0xC0 | encode));
4667 }
4668 
4669 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4670   assert(VM_Version::supports_avx(), "");
4671   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4672   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4673   emit_int8(0x59);
4674   emit_int8((unsigned char)(0xC0 | encode));
4675 }
4676 
4677 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4678   assert(VM_Version::supports_avx(), "");
4679   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4680   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4681   emit_int8(0x59);
4682   emit_int8((unsigned char)(0xC0 | encode));
4683 }
4684 
4685 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4686   assert(VM_Version::supports_avx(), "");
4687   InstructionMark im(this);
4688   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4689   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4690   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4691   emit_int8(0x59);
4692   emit_operand(dst, src);
4693 }
4694 
4695 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4696   assert(VM_Version::supports_avx(), "");
4697   InstructionMark im(this);
4698   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4699   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4700   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4701   emit_int8(0x59);
4702   emit_operand(dst, src);
4703 }
4704 
4705 void Assembler::divpd(XMMRegister dst, XMMRegister src) {
4706   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4707   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4708   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4709   emit_int8(0x5E);
4710   emit_int8((unsigned char)(0xC0 | encode));
4711 }
4712 
4713 void Assembler::divps(XMMRegister dst, XMMRegister src) {
4714   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4715   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4716   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4717   emit_int8(0x5E);
4718   emit_int8((unsigned char)(0xC0 | encode));
4719 }
4720 
4721 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4722   assert(VM_Version::supports_avx(), "");
4723   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4724   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4725   emit_int8(0x5E);
4726   emit_int8((unsigned char)(0xC0 | encode));
4727 }
4728 
4729 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4730   assert(VM_Version::supports_avx(), "");
4731   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4732   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4733   emit_int8(0x5E);
4734   emit_int8((unsigned char)(0xC0 | encode));
4735 }
4736 
4737 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4738   assert(VM_Version::supports_avx(), "");
4739   InstructionMark im(this);
4740   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4741   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4742   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4743   emit_int8(0x5E);
4744   emit_operand(dst, src);
4745 }
4746 
4747 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4748   assert(VM_Version::supports_avx(), "");
4749   InstructionMark im(this);
4750   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4751   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4752   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4753   emit_int8(0x5E);
4754   emit_operand(dst, src);
4755 }
4756 
4757 void Assembler::vsqrtpd(XMMRegister dst, XMMRegister src, int vector_len) {
4758   assert(VM_Version::supports_avx(), "");
4759   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4760   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4761   emit_int8(0x51);
4762   emit_int8((unsigned char)(0xC0 | encode));
4763 }
4764 
4765 void Assembler::vsqrtpd(XMMRegister dst, Address src, int vector_len) {
4766   assert(VM_Version::supports_avx(), "");
4767   InstructionMark im(this);
4768   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4769   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4770   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4771   emit_int8(0x51);
4772   emit_operand(dst, src);
4773 }
4774 
4775 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
4776   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4777   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);

4778   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4779   emit_int8(0x54);
4780   emit_int8((unsigned char)(0xC0 | encode));
4781 }
4782 
4783 void Assembler::andps(XMMRegister dst, XMMRegister src) {
4784   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4785   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4786   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4787   emit_int8(0x54);
4788   emit_int8((unsigned char)(0xC0 | encode));
4789 }
4790 
4791 void Assembler::andps(XMMRegister dst, Address src) {
4792   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4793   InstructionMark im(this);
4794   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4795   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4796   simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4797   emit_int8(0x54);
4798   emit_operand(dst, src);
4799 }
4800 
4801 void Assembler::andpd(XMMRegister dst, Address src) {
4802   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4803   InstructionMark im(this);
4804   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4805   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4806   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4807   emit_int8(0x54);
4808   emit_operand(dst, src);
4809 }
4810 
4811 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4812   assert(VM_Version::supports_avx(), "");
4813   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);

4814   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4815   emit_int8(0x54);
4816   emit_int8((unsigned char)(0xC0 | encode));
4817 }
4818 
4819 void Assembler::vandps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4820   assert(VM_Version::supports_avx(), "");
4821   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4822   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4823   emit_int8(0x54);
4824   emit_int8((unsigned char)(0xC0 | encode));
4825 }
4826 
4827 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4828   assert(VM_Version::supports_avx(), "");
4829   InstructionMark im(this);
4830   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4831   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4832   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4833   emit_int8(0x54);
4834   emit_operand(dst, src);
4835 }
4836 
4837 void Assembler::vandps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4838   assert(VM_Version::supports_avx(), "");
4839   InstructionMark im(this);
4840   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4841   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4842   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4843   emit_int8(0x54);
4844   emit_operand(dst, src);
4845 }
4846 
4847 void Assembler::unpckhpd(XMMRegister dst, XMMRegister src) {
4848   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4849   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4850   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4851   emit_int8(0x15);
4852   emit_int8((unsigned char)(0xC0 | encode));
4853 }
4854 
4855 void Assembler::unpcklpd(XMMRegister dst, XMMRegister src) {
4856   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4857   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4858   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4859   emit_int8(0x14);
4860   emit_int8((unsigned char)(0xC0 | encode));
4861 }
4862 
4863 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
4864   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4865   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);

4866   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4867   emit_int8(0x57);
4868   emit_int8((unsigned char)(0xC0 | encode));
4869 }
4870 
4871 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
4872   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4873   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4874   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4875   emit_int8(0x57);
4876   emit_int8((unsigned char)(0xC0 | encode));
4877 }
4878 
4879 void Assembler::xorpd(XMMRegister dst, Address src) {
4880   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4881   InstructionMark im(this);
4882   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4883   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4884   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4885   emit_int8(0x57);
4886   emit_operand(dst, src);
4887 }
4888 
4889 void Assembler::xorps(XMMRegister dst, Address src) {
4890   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4891   InstructionMark im(this);
4892   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4893   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4894   simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4895   emit_int8(0x57);
4896   emit_operand(dst, src);
4897 }
4898 
4899 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4900   assert(VM_Version::supports_avx(), "");
4901   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);

4902   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4903   emit_int8(0x57);
4904   emit_int8((unsigned char)(0xC0 | encode));
4905 }
4906 
4907 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4908   assert(VM_Version::supports_avx(), "");
4909   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4910   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4911   emit_int8(0x57);
4912   emit_int8((unsigned char)(0xC0 | encode));
4913 }
4914 
4915 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4916   assert(VM_Version::supports_avx(), "");
4917   InstructionMark im(this);
4918   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4919   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

4920   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4921   emit_int8(0x57);
4922   emit_operand(dst, src);
4923 }
4924 
4925 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4926   assert(VM_Version::supports_avx(), "");
4927   InstructionMark im(this);
4928   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4929   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4930   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4931   emit_int8(0x57);
4932   emit_operand(dst, src);
4933 }
4934 
4935 // Integer vector arithmetic
4936 void Assembler::vphaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4937   assert(VM_Version::supports_avx() && (vector_len == 0) ||
4938          VM_Version::supports_avx2(), "256 bit integer vectors requires AVX2");
4939   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);


4970 void Assembler::paddd(XMMRegister dst, XMMRegister src) {
4971   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4972   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4973   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4974   emit_int8((unsigned char)0xFE);
4975   emit_int8((unsigned char)(0xC0 | encode));
4976 }
4977 
4978 void Assembler::paddd(XMMRegister dst, Address src) {
4979   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4980   InstructionMark im(this);
4981   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4982   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4983   emit_int8((unsigned char)0xFE);
4984   emit_operand(dst, src);
4985 }
4986 
4987 void Assembler::paddq(XMMRegister dst, XMMRegister src) {
4988   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4989   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

4990   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4991   emit_int8((unsigned char)0xD4);
4992   emit_int8((unsigned char)(0xC0 | encode));
4993 }
4994 
4995 void Assembler::phaddw(XMMRegister dst, XMMRegister src) {
4996   NOT_LP64(assert(VM_Version::supports_sse3(), ""));
4997   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4998   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
4999   emit_int8(0x01);
5000   emit_int8((unsigned char)(0xC0 | encode));
5001 }
5002 
5003 void Assembler::phaddd(XMMRegister dst, XMMRegister src) {
5004   NOT_LP64(assert(VM_Version::supports_sse3(), ""));
5005   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
5006   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5007   emit_int8(0x02);
5008   emit_int8((unsigned char)(0xC0 | encode));
5009 }


5018 
5019 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5020   assert(UseAVX > 0, "requires some form of AVX");
5021   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5022   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5023   emit_int8((unsigned char)0xFD);
5024   emit_int8((unsigned char)(0xC0 | encode));
5025 }
5026 
5027 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5028   assert(UseAVX > 0, "requires some form of AVX");
5029   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5030   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5031   emit_int8((unsigned char)0xFE);
5032   emit_int8((unsigned char)(0xC0 | encode));
5033 }
5034 
5035 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5036   assert(UseAVX > 0, "requires some form of AVX");
5037   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5038   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5039   emit_int8((unsigned char)0xD4);
5040   emit_int8((unsigned char)(0xC0 | encode));
5041 }
5042 
5043 void Assembler::vpaddb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5044   assert(UseAVX > 0, "requires some form of AVX");
5045   InstructionMark im(this);
5046   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5047   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5048   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5049   emit_int8((unsigned char)0xFC);
5050   emit_operand(dst, src);
5051 }
5052 
5053 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5054   assert(UseAVX > 0, "requires some form of AVX");
5055   InstructionMark im(this);
5056   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5057   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5058   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5059   emit_int8((unsigned char)0xFD);
5060   emit_operand(dst, src);
5061 }
5062 
5063 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5064   assert(UseAVX > 0, "requires some form of AVX");
5065   InstructionMark im(this);
5066   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5067   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5068   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5069   emit_int8((unsigned char)0xFE);
5070   emit_operand(dst, src);
5071 }
5072 
5073 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5074   assert(UseAVX > 0, "requires some form of AVX");
5075   InstructionMark im(this);
5076   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5077   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

5078   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5079   emit_int8((unsigned char)0xD4);
5080   emit_operand(dst, src);
5081 }
5082 
5083 void Assembler::psubb(XMMRegister dst, XMMRegister src) {
5084   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5085   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5086   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5087   emit_int8((unsigned char)0xF8);
5088   emit_int8((unsigned char)(0xC0 | encode));
5089 }
5090 
5091 void Assembler::psubw(XMMRegister dst, XMMRegister src) {
5092   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5093   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5094   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5095   emit_int8((unsigned char)0xF9);
5096   emit_int8((unsigned char)(0xC0 | encode));
5097 }
5098 
5099 void Assembler::psubd(XMMRegister dst, XMMRegister src) {
5100   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5101   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5102   emit_int8((unsigned char)0xFA);
5103   emit_int8((unsigned char)(0xC0 | encode));
5104 }
5105 
5106 void Assembler::psubq(XMMRegister dst, XMMRegister src) {
5107   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5108   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5109   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5110   emit_int8((unsigned char)0xFB);
5111   emit_int8((unsigned char)(0xC0 | encode));
5112 }
5113 
5114 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5115   assert(UseAVX > 0, "requires some form of AVX");
5116   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5117   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5118   emit_int8((unsigned char)0xF8);
5119   emit_int8((unsigned char)(0xC0 | encode));
5120 }
5121 
5122 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5123   assert(UseAVX > 0, "requires some form of AVX");
5124   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5125   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5126   emit_int8((unsigned char)0xF9);
5127   emit_int8((unsigned char)(0xC0 | encode));
5128 }
5129 
5130 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5131   assert(UseAVX > 0, "requires some form of AVX");
5132   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5133   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5134   emit_int8((unsigned char)0xFA);
5135   emit_int8((unsigned char)(0xC0 | encode));
5136 }
5137 
5138 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5139   assert(UseAVX > 0, "requires some form of AVX");
5140   InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5141   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5142   emit_int8((unsigned char)0xFB);
5143   emit_int8((unsigned char)(0xC0 | encode));
5144 }
5145 
5146 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5147   assert(UseAVX > 0, "requires some form of AVX");
5148   InstructionMark im(this);
5149   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5150   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5151   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5152   emit_int8((unsigned char)0xF8);
5153   emit_operand(dst, src);
5154 }
5155 
5156 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5157   assert(UseAVX > 0, "requires some form of AVX");
5158   InstructionMark im(this);
5159   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5160   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5161   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5162   emit_int8((unsigned char)0xF9);
5163   emit_operand(dst, src);
5164 }
5165 
5166 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5167   assert(UseAVX > 0, "requires some form of AVX");
5168   InstructionMark im(this);
5169   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5170   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5171   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5172   emit_int8((unsigned char)0xFA);
5173   emit_operand(dst, src);
5174 }
5175 
5176 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5177   assert(UseAVX > 0, "requires some form of AVX");
5178   InstructionMark im(this);
5179   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5180   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

5181   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5182   emit_int8((unsigned char)0xFB);
5183   emit_operand(dst, src);
5184 }
5185 
5186 void Assembler::pmullw(XMMRegister dst, XMMRegister src) {
5187   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5188   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5189   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5190   emit_int8((unsigned char)0xD5);
5191   emit_int8((unsigned char)(0xC0 | encode));
5192 }
5193 
5194 void Assembler::pmulld(XMMRegister dst, XMMRegister src) {
5195   assert(VM_Version::supports_sse4_1(), "");
5196   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5197   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5198   emit_int8(0x40);
5199   emit_int8((unsigned char)(0xC0 | encode));
5200 }
5201 
5202 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5203   assert(UseAVX > 0, "requires some form of AVX");
5204   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5205   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5206   emit_int8((unsigned char)0xD5);
5207   emit_int8((unsigned char)(0xC0 | encode));
5208 }
5209 
5210 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5211   assert(UseAVX > 0, "requires some form of AVX");
5212   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5213   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5214   emit_int8(0x40);
5215   emit_int8((unsigned char)(0xC0 | encode));
5216 }
5217 
5218 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5219   assert(UseAVX > 2, "requires some form of AVX");
5220   InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);

5221   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5222   emit_int8(0x40);
5223   emit_int8((unsigned char)(0xC0 | encode));
5224 }
5225 
5226 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5227   assert(UseAVX > 0, "requires some form of AVX");
5228   InstructionMark im(this);
5229   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5230   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5231   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5232   emit_int8((unsigned char)0xD5);
5233   emit_operand(dst, src);
5234 }
5235 
5236 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5237   assert(UseAVX > 0, "requires some form of AVX");
5238   InstructionMark im(this);
5239   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5240   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5241   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5242   emit_int8(0x40);
5243   emit_operand(dst, src);
5244 }
5245 
5246 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5247   assert(UseAVX > 0, "requires some form of AVX");
5248   InstructionMark im(this);
5249   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5250   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);

5251   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5252   emit_int8(0x40);
5253   emit_operand(dst, src);
5254 }
5255 
5256 // Shift packed integers left by specified number of bits.
5257 void Assembler::psllw(XMMRegister dst, int shift) {
5258   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5259   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5260   // XMM6 is for /6 encoding: 66 0F 71 /6 ib
5261   int encode = simd_prefix_and_encode(xmm6, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5262   emit_int8(0x71);
5263   emit_int8((unsigned char)(0xC0 | encode));
5264   emit_int8(shift & 0xFF);
5265 }
5266 
5267 void Assembler::pslld(XMMRegister dst, int shift) {
5268   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5269   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5270   // XMM6 is for /6 encoding: 66 0F 72 /6 ib


5286 
5287 void Assembler::psllw(XMMRegister dst, XMMRegister shift) {
5288   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5289   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5290   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5291   emit_int8((unsigned char)0xF1);
5292   emit_int8((unsigned char)(0xC0 | encode));
5293 }
5294 
5295 void Assembler::pslld(XMMRegister dst, XMMRegister shift) {
5296   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5297   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5298   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5299   emit_int8((unsigned char)0xF2);
5300   emit_int8((unsigned char)(0xC0 | encode));
5301 }
5302 
5303 void Assembler::psllq(XMMRegister dst, XMMRegister shift) {
5304   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5305   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5306   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5307   emit_int8((unsigned char)0xF3);
5308   emit_int8((unsigned char)(0xC0 | encode));
5309 }
5310 
5311 void Assembler::vpsllw(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5312   assert(UseAVX > 0, "requires some form of AVX");
5313   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5314   // XMM6 is for /6 encoding: 66 0F 71 /6 ib
5315   int encode = vex_prefix_and_encode(xmm6->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5316   emit_int8(0x71);
5317   emit_int8((unsigned char)(0xC0 | encode));
5318   emit_int8(shift & 0xFF);
5319 }
5320 
5321 void Assembler::vpslld(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5322   assert(UseAVX > 0, "requires some form of AVX");
5323   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5324   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5325   // XMM6 is for /6 encoding: 66 0F 72 /6 ib
5326   int encode = vex_prefix_and_encode(xmm6->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5327   emit_int8(0x72);
5328   emit_int8((unsigned char)(0xC0 | encode));
5329   emit_int8(shift & 0xFF);
5330 }
5331 
5332 void Assembler::vpsllq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5333   assert(UseAVX > 0, "requires some form of AVX");
5334   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5335   // XMM6 is for /6 encoding: 66 0F 73 /6 ib
5336   int encode = vex_prefix_and_encode(xmm6->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5337   emit_int8(0x73);
5338   emit_int8((unsigned char)(0xC0 | encode));
5339   emit_int8(shift & 0xFF);
5340 }
5341 
5342 void Assembler::vpsllw(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5343   assert(UseAVX > 0, "requires some form of AVX");
5344   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5345   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5346   emit_int8((unsigned char)0xF1);
5347   emit_int8((unsigned char)(0xC0 | encode));
5348 }
5349 
5350 void Assembler::vpslld(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5351   assert(UseAVX > 0, "requires some form of AVX");
5352   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5353   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5354   emit_int8((unsigned char)0xF2);
5355   emit_int8((unsigned char)(0xC0 | encode));
5356 }
5357 
5358 void Assembler::vpsllq(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5359   assert(UseAVX > 0, "requires some form of AVX");
5360   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5361   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5362   emit_int8((unsigned char)0xF3);
5363   emit_int8((unsigned char)(0xC0 | encode));
5364 }
5365 
5366 // Shift packed integers logically right by specified number of bits.
5367 void Assembler::psrlw(XMMRegister dst, int shift) {
5368   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5369   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5370   // XMM2 is for /2 encoding: 66 0F 71 /2 ib
5371   int encode = simd_prefix_and_encode(xmm2, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5372   emit_int8(0x71);
5373   emit_int8((unsigned char)(0xC0 | encode));
5374   emit_int8(shift & 0xFF);
5375 }
5376 
5377 void Assembler::psrld(XMMRegister dst, int shift) {
5378   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5379   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5380   // XMM2 is for /2 encoding: 66 0F 72 /2 ib
5381   int encode = simd_prefix_and_encode(xmm2, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5382   emit_int8(0x72);
5383   emit_int8((unsigned char)(0xC0 | encode));
5384   emit_int8(shift & 0xFF);
5385 }
5386 
5387 void Assembler::psrlq(XMMRegister dst, int shift) {
5388   // Do not confuse it with psrldq SSE2 instruction which
5389   // shifts 128 bit value in xmm register by number of bytes.
5390   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5391   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5392   // XMM2 is for /2 encoding: 66 0F 73 /2 ib
5393   int encode = simd_prefix_and_encode(xmm2, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5394   emit_int8(0x73);
5395   emit_int8((unsigned char)(0xC0 | encode));
5396   emit_int8(shift & 0xFF);
5397 }
5398 
5399 void Assembler::psrlw(XMMRegister dst, XMMRegister shift) {
5400   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5401   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5402   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5403   emit_int8((unsigned char)0xD1);
5404   emit_int8((unsigned char)(0xC0 | encode));
5405 }
5406 
5407 void Assembler::psrld(XMMRegister dst, XMMRegister shift) {
5408   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5409   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5410   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5411   emit_int8((unsigned char)0xD2);
5412   emit_int8((unsigned char)(0xC0 | encode));
5413 }
5414 
5415 void Assembler::psrlq(XMMRegister dst, XMMRegister shift) {
5416   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5417   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5418   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5419   emit_int8((unsigned char)0xD3);
5420   emit_int8((unsigned char)(0xC0 | encode));
5421 }
5422 
5423 void Assembler::vpsrlw(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5424   assert(UseAVX > 0, "requires some form of AVX");
5425   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5426   // XMM2 is for /2 encoding: 66 0F 71 /2 ib
5427   int encode = vex_prefix_and_encode(xmm2->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5428   emit_int8(0x71);
5429   emit_int8((unsigned char)(0xC0 | encode));
5430   emit_int8(shift & 0xFF);
5431 }
5432 
5433 void Assembler::vpsrld(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5434   assert(UseAVX > 0, "requires some form of AVX");
5435   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5436   // XMM2 is for /2 encoding: 66 0F 72 /2 ib
5437   int encode = vex_prefix_and_encode(xmm2->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5438   emit_int8(0x72);
5439   emit_int8((unsigned char)(0xC0 | encode));
5440   emit_int8(shift & 0xFF);
5441 }
5442 
5443 void Assembler::vpsrlq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5444   assert(UseAVX > 0, "requires some form of AVX");
5445   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5446   // XMM2 is for /2 encoding: 66 0F 73 /2 ib
5447   int encode = vex_prefix_and_encode(xmm2->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5448   emit_int8(0x73);
5449   emit_int8((unsigned char)(0xC0 | encode));
5450   emit_int8(shift & 0xFF);
5451 }
5452 
5453 void Assembler::vpsrlw(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5454   assert(UseAVX > 0, "requires some form of AVX");
5455   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5456   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5457   emit_int8((unsigned char)0xD1);
5458   emit_int8((unsigned char)(0xC0 | encode));
5459 }
5460 
5461 void Assembler::vpsrld(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5462   assert(UseAVX > 0, "requires some form of AVX");
5463   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5464   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5465   emit_int8((unsigned char)0xD2);
5466   emit_int8((unsigned char)(0xC0 | encode));
5467 }
5468 
5469 void Assembler::vpsrlq(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5470   assert(UseAVX > 0, "requires some form of AVX");
5471   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5472   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5473   emit_int8((unsigned char)0xD3);
5474   emit_int8((unsigned char)(0xC0 | encode));
5475 }
5476 
5477 // Shift packed integers arithmetically right by specified number of bits.
5478 void Assembler::psraw(XMMRegister dst, int shift) {
5479   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5480   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5481   // XMM4 is for /4 encoding: 66 0F 71 /4 ib
5482   int encode = simd_prefix_and_encode(xmm4, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5483   emit_int8(0x71);
5484   emit_int8((unsigned char)(0xC0 | encode));
5485   emit_int8(shift & 0xFF);
5486 }
5487 
5488 void Assembler::psrad(XMMRegister dst, int shift) {
5489   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5490   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5491   // XMM4 is for /4 encoding: 66 0F 72 /4 ib


5561   assert(UseAVX > 0, "requires some form of AVX");
5562   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5563   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5564   emit_int8((unsigned char)0xDB);
5565   emit_int8((unsigned char)(0xC0 | encode));
5566 }
5567 
5568 void Assembler::vpand(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5569   assert(UseAVX > 0, "requires some form of AVX");
5570   InstructionMark im(this);
5571   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5572   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5573   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5574   emit_int8((unsigned char)0xDB);
5575   emit_operand(dst, src);
5576 }
5577 
5578 void Assembler::pandn(XMMRegister dst, XMMRegister src) {
5579   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5580   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

5581   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5582   emit_int8((unsigned char)0xDF);
5583   emit_int8((unsigned char)(0xC0 | encode));
5584 }
5585 
5586 void Assembler::por(XMMRegister dst, XMMRegister src) {
5587   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5588   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5589   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5590   emit_int8((unsigned char)0xEB);
5591   emit_int8((unsigned char)(0xC0 | encode));
5592 }
5593 
5594 void Assembler::vpor(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5595   assert(UseAVX > 0, "requires some form of AVX");
5596   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5597   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5598   emit_int8((unsigned char)0xEB);
5599   emit_int8((unsigned char)(0xC0 | encode));
5600 }


5850 }
5851 
5852 void Assembler::vextracti32x4(Address dst, XMMRegister src, uint8_t imm8) {
5853   assert(VM_Version::supports_evex(), "");
5854   assert(src != xnoreg, "sanity");
5855   assert(imm8 <= 0x03, "imm8: %u", imm8);
5856   InstructionMark im(this);
5857   InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5858   attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5859   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5860   emit_int8(0x39);
5861   emit_operand(src, dst);
5862   // 0x00 - extract from bits 127:0
5863   // 0x01 - extract from bits 255:128
5864   // 0x02 - extract from bits 383:256
5865   // 0x03 - extract from bits 511:384
5866   emit_int8(imm8 & 0x03);
5867 }
5868 
5869 void Assembler::vextracti64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5870   assert(VM_Version::supports_evex(), "");
5871   assert(imm8 <= 0x03, "imm8: %u", imm8);
5872   InstructionAttr attributes(AVX_512bit, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5873   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5874   emit_int8(0x39);
5875   emit_int8((unsigned char)(0xC0 | encode));
5876   // 0x00 - extract from bits 127:0
5877   // 0x01 - extract from bits 255:128
5878   // 0x02 - extract from bits 383:256
5879   // 0x03 - extract from bits 511:384
5880   emit_int8(imm8 & 0x03);
5881 }
5882 
5883 void Assembler::vextracti64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5884   assert(VM_Version::supports_evex(), "");
5885   assert(imm8 <= 0x01, "imm8: %u", imm8);
5886   InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5887   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5888   emit_int8(0x3B);
5889   emit_int8((unsigned char)(0xC0 | encode));
5890   // 0x00 - extract from lower 256 bits
5891   // 0x01 - extract from upper 256 bits
5892   emit_int8(imm8 & 0x01);


5940 }
5941 
5942 void Assembler::vextractf32x4(Address dst, XMMRegister src, uint8_t imm8) {
5943   assert(VM_Version::supports_evex(), "");
5944   assert(src != xnoreg, "sanity");
5945   assert(imm8 <= 0x03, "imm8: %u", imm8);
5946   InstructionMark im(this);
5947   InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5948   attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5949   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5950   emit_int8(0x19);
5951   emit_operand(src, dst);
5952   // 0x00 - extract from bits 127:0
5953   // 0x01 - extract from bits 255:128
5954   // 0x02 - extract from bits 383:256
5955   // 0x03 - extract from bits 511:384
5956   emit_int8(imm8 & 0x03);
5957 }
5958 
5959 void Assembler::vextractf64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5960   assert(VM_Version::supports_evex(), "");
5961   assert(imm8 <= 0x03, "imm8: %u", imm8);
5962   InstructionAttr attributes(AVX_512bit, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5963   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5964   emit_int8(0x19);
5965   emit_int8((unsigned char)(0xC0 | encode));
5966   // 0x00 - extract from bits 127:0
5967   // 0x01 - extract from bits 255:128
5968   // 0x02 - extract from bits 383:256
5969   // 0x03 - extract from bits 511:384
5970   emit_int8(imm8 & 0x03);
5971 }
5972 
5973 void Assembler::vextractf64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5974   assert(VM_Version::supports_evex(), "");
5975   assert(imm8 <= 0x01, "imm8: %u", imm8);
5976   InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5977   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5978   emit_int8(0x1B);
5979   emit_int8((unsigned char)(0xC0 | encode));
5980   // 0x00 - extract from lower 256 bits
5981   // 0x01 - extract from upper 256 bits
5982   emit_int8(imm8 & 0x01);


6067   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6068   emit_int8(0x58);
6069   emit_int8((unsigned char)(0xC0 | encode));
6070 }
6071 
6072 void Assembler::evpbroadcastd(XMMRegister dst, Address src, int vector_len) {
6073   assert(VM_Version::supports_evex(), "");
6074   assert(dst != xnoreg, "sanity");
6075   InstructionMark im(this);
6076   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6077   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6078   // swap src<->dst for encoding
6079   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6080   emit_int8(0x58);
6081   emit_operand(dst, src);
6082 }
6083 
6084 // duplicate 8-byte integer data from src into programmed locations in dest : requires AVX512VL
6085 void Assembler::evpbroadcastq(XMMRegister dst, XMMRegister src, int vector_len) {
6086   assert(VM_Version::supports_evex(), "");
6087   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

6088   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6089   emit_int8(0x59);
6090   emit_int8((unsigned char)(0xC0 | encode));
6091 }
6092 
6093 void Assembler::evpbroadcastq(XMMRegister dst, Address src, int vector_len) {
6094   assert(VM_Version::supports_evex(), "");
6095   assert(dst != xnoreg, "sanity");
6096   InstructionMark im(this);
6097   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

6098   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
6099   // swap src<->dst for encoding
6100   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6101   emit_int8(0x59);
6102   emit_operand(dst, src);
6103 }
6104 
6105 
6106 // scalar single/double precision replicate
6107 
6108 // duplicate single precision data from src into programmed locations in dest : requires AVX512VL
6109 void Assembler::evpbroadcastss(XMMRegister dst, XMMRegister src, int vector_len) {
6110   assert(VM_Version::supports_evex(), "");
6111   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6112   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6113   emit_int8(0x18);
6114   emit_int8((unsigned char)(0xC0 | encode));
6115 }
6116 
6117 void Assembler::evpbroadcastss(XMMRegister dst, Address src, int vector_len) {
6118   assert(VM_Version::supports_evex(), "");
6119   assert(dst != xnoreg, "sanity");
6120   InstructionMark im(this);
6121   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6122   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6123   // swap src<->dst for encoding
6124   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6125   emit_int8(0x18);
6126   emit_operand(dst, src);
6127 }
6128 
6129 // duplicate double precision data from src into programmed locations in dest : requires AVX512VL
6130 void Assembler::evpbroadcastsd(XMMRegister dst, XMMRegister src, int vector_len) {
6131   assert(VM_Version::supports_evex(), "");
6132   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

6133   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6134   emit_int8(0x19);
6135   emit_int8((unsigned char)(0xC0 | encode));
6136 }
6137 
6138 void Assembler::evpbroadcastsd(XMMRegister dst, Address src, int vector_len) {
6139   assert(VM_Version::supports_evex(), "");
6140   assert(dst != xnoreg, "sanity");
6141   InstructionMark im(this);
6142   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6143   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);

6144   // swap src<->dst for encoding
6145   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6146   emit_int8(0x19);
6147   emit_operand(dst, src);
6148 }
6149 
6150 
6151 // gpr source broadcast forms
6152 
6153 // duplicate 1-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6154 void Assembler::evpbroadcastb(XMMRegister dst, Register src, int vector_len) {
6155   assert(VM_Version::supports_evex(), "");
6156   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);

6157   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6158   if (attributes.is_evex_instruction()) {
6159     emit_int8(0x7A);
6160   } else {
6161     emit_int8(0x78);
6162   }
6163   emit_int8((unsigned char)(0xC0 | encode));
6164 }
6165 
6166 // duplicate 2-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6167 void Assembler::evpbroadcastw(XMMRegister dst, Register src, int vector_len) {
6168   assert(VM_Version::supports_evex(), "");
6169   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);

6170   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6171   if (attributes.is_evex_instruction()) {
6172     emit_int8(0x7B);
6173   } else {
6174     emit_int8(0x79);
6175   }
6176   emit_int8((unsigned char)(0xC0 | encode));
6177 }
6178 
6179 // duplicate 4-byte integer data from src into programmed locations in dest : requires AVX512VL
6180 void Assembler::evpbroadcastd(XMMRegister dst, Register src, int vector_len) {
6181   assert(VM_Version::supports_evex(), "");
6182   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

6183   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6184   if (attributes.is_evex_instruction()) {
6185     emit_int8(0x7C);
6186   } else {
6187     emit_int8(0x58);
6188   }
6189   emit_int8((unsigned char)(0xC0 | encode));
6190 }
6191 
6192 // duplicate 8-byte integer data from src into programmed locations in dest : requires AVX512VL
6193 void Assembler::evpbroadcastq(XMMRegister dst, Register src, int vector_len) {
6194   assert(VM_Version::supports_evex(), "");
6195   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

6196   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6197   if (attributes.is_evex_instruction()) {
6198     emit_int8(0x7C);
6199   } else {
6200     emit_int8(0x59);
6201   }
6202   emit_int8((unsigned char)(0xC0 | encode));
6203 }
6204 
6205 
6206 // Carry-Less Multiplication Quadword
6207 void Assembler::pclmulqdq(XMMRegister dst, XMMRegister src, int mask) {
6208   assert(VM_Version::supports_clmul(), "");
6209   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6210   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6211   emit_int8(0x44);
6212   emit_int8((unsigned char)(0xC0 | encode));
6213   emit_int8((unsigned char)mask);
6214 }
6215 
6216 // Carry-Less Multiplication Quadword
6217 void Assembler::vpclmulqdq(XMMRegister dst, XMMRegister nds, XMMRegister src, int mask) {
6218   assert(VM_Version::supports_avx() && VM_Version::supports_clmul(), "");
6219   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6220   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6221   emit_int8(0x44);


6836       bool check_register_bank = NOT_IA32(true) IA32_ONLY(false);
6837       if (check_register_bank) {
6838         // check nds_enc and xreg_enc for upper bank usage
6839         if (nds_enc < 16 && xreg_enc < 16) {
6840           attributes->set_is_legacy_mode();
6841         }
6842       } else {
6843         attributes->set_is_legacy_mode();
6844       }
6845     }
6846   }
6847 
6848   _is_managed = false;
6849   if (UseAVX > 2 && !attributes->is_legacy_mode())
6850   {
6851     bool evex_r = (xreg_enc >= 16);
6852     bool evex_v = (nds_enc >= 16);
6853     attributes->set_is_evex_instruction();
6854     evex_prefix(vex_r, vex_b, vex_x, evex_r, evex_v, nds_enc, pre, opc);
6855   } else {



6856     vex_prefix(vex_r, vex_b, vex_x, nds_enc, pre, opc);
6857   }
6858 }
6859 
6860 int Assembler::vex_prefix_and_encode(int dst_enc, int nds_enc, int src_enc, VexSimdPrefix pre, VexOpcode opc, InstructionAttr *attributes) {
6861   bool vex_r = ((dst_enc & 8) == 8) ? 1 : 0;
6862   bool vex_b = ((src_enc & 8) == 8) ? 1 : 0;
6863   bool vex_x = false;
6864   set_attributes(attributes);
6865   attributes->set_current_assembler(this);
6866   bool check_register_bank = NOT_IA32(true) IA32_ONLY(false);
6867 
6868   // if vector length is turned off, revert to AVX for vectors smaller than 512-bit
6869   if (UseAVX > 2 && _legacy_mode_vl && attributes->uses_vl()) {
6870     switch (attributes->get_vector_len()) {
6871     case AVX_128bit:
6872     case AVX_256bit:
6873       if (check_register_bank) {
6874         if (dst_enc >= 16 || nds_enc >= 16 || src_enc >= 16) {
6875           // up propagate arithmetic instructions to meet RA requirements


6895         // check dst_enc, nds_enc and src_enc for upper bank usage
6896         if (dst_enc < 16 && nds_enc < 16 && src_enc < 16) {
6897           attributes->set_is_legacy_mode();
6898         }
6899       } else {
6900         attributes->set_is_legacy_mode();
6901       }
6902     }
6903   }
6904 
6905   _is_managed = false;
6906   if (UseAVX > 2 && !attributes->is_legacy_mode())
6907   {
6908     bool evex_r = (dst_enc >= 16);
6909     bool evex_v = (nds_enc >= 16);
6910     // can use vex_x as bank extender on rm encoding
6911     vex_x = (src_enc >= 16);
6912     attributes->set_is_evex_instruction();
6913     evex_prefix(vex_r, vex_b, vex_x, evex_r, evex_v, nds_enc, pre, opc);
6914   } else {



6915     vex_prefix(vex_r, vex_b, vex_x, nds_enc, pre, opc);
6916   }
6917 
6918   // return modrm byte components for operands
6919   return (((dst_enc & 7) << 3) | (src_enc & 7));
6920 }
6921 
6922 
6923 void Assembler::simd_prefix(XMMRegister xreg, XMMRegister nds, Address adr, VexSimdPrefix pre,
6924                             VexOpcode opc, InstructionAttr *attributes) {
6925   if (UseAVX > 0) {
6926     int xreg_enc = xreg->encoding();
6927     int nds_enc = nds->is_valid() ? nds->encoding() : 0;
6928     vex_prefix(adr, nds_enc, xreg_enc, pre, opc, attributes);
6929   } else {
6930     assert((nds == xreg) || (nds == xnoreg), "wrong sse encoding");
6931     rex_prefix(adr, xreg, pre, opc, attributes->is_rex_vex_w());
6932   }
6933 }
6934 




1223   emit_int8(0x1F);
1224   emit_int8((unsigned char)0x80);
1225                    // emit_rm(cbuf, 0x2, EAX_enc, EAX_enc);
1226   emit_int32(0);   // 32-bits offset (4 bytes)
1227 }
1228 
1229 void Assembler::addr_nop_8() {
1230   assert(UseAddressNop, "no CPU support");
1231   // 8 bytes: NOP DWORD PTR [EAX+EAX*0+0] 32-bits offset
1232   emit_int8(0x0F);
1233   emit_int8(0x1F);
1234   emit_int8((unsigned char)0x84);
1235                    // emit_rm(cbuf, 0x2, EAX_enc, 0x4);
1236   emit_int8(0x00); // emit_rm(cbuf, 0x0, EAX_enc, EAX_enc);
1237   emit_int32(0);   // 32-bits offset (4 bytes)
1238 }
1239 
1240 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
1241   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1242   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1243   attributes.set_rex_vex_w_reverted();
1244   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1245   emit_int8(0x58);
1246   emit_int8((unsigned char)(0xC0 | encode));
1247 }
1248 
1249 void Assembler::addsd(XMMRegister dst, Address src) {
1250   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1251   InstructionMark im(this);
1252   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1253   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1254   attributes.set_rex_vex_w_reverted();
1255   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1256   emit_int8(0x58);
1257   emit_operand(dst, src);
1258 }
1259 
1260 void Assembler::addss(XMMRegister dst, XMMRegister src) {
1261   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1262   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1263   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1264   emit_int8(0x58);
1265   emit_int8((unsigned char)(0xC0 | encode));
1266 }
1267 
1268 void Assembler::addss(XMMRegister dst, Address src) {
1269   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1270   InstructionMark im(this);
1271   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1272   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1273   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1274   emit_int8(0x58);


1584 }
1585 
1586 // The 8-bit cmpxchg compares the value at adr with the contents of rax,
1587 // and stores reg into adr if so; otherwise, the value at adr is loaded into rax,.
1588 // The ZF is set if the compared values were equal, and cleared otherwise.
1589 void Assembler::cmpxchgb(Register reg, Address adr) { // cmpxchg
1590   InstructionMark im(this);
1591   prefix(adr, reg, true);
1592   emit_int8(0x0F);
1593   emit_int8((unsigned char)0xB0);
1594   emit_operand(reg, adr);
1595 }
1596 
1597 void Assembler::comisd(XMMRegister dst, Address src) {
1598   // NOTE: dbx seems to decode this as comiss even though the
1599   // 0x66 is there. Strangly ucomisd comes out correct
1600   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1601   InstructionMark im(this);
1602   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);;
1603   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1604   attributes.set_rex_vex_w_reverted();
1605   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1606   emit_int8(0x2F);
1607   emit_operand(dst, src);
1608 }
1609 
1610 void Assembler::comisd(XMMRegister dst, XMMRegister src) {
1611   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1612   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1613   attributes.set_rex_vex_w_reverted();
1614   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
1615   emit_int8(0x2F);
1616   emit_int8((unsigned char)(0xC0 | encode));
1617 }
1618 
1619 void Assembler::comiss(XMMRegister dst, Address src) {
1620   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1621   InstructionMark im(this);
1622   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1623   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1624   simd_prefix(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1625   emit_int8(0x2F);
1626   emit_operand(dst, src);
1627 }
1628 
1629 void Assembler::comiss(XMMRegister dst, XMMRegister src) {
1630   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1631   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1632   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1633   emit_int8(0x2F);


1720 
1721 void Assembler::cvtdq2pd(XMMRegister dst, XMMRegister src) {
1722   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1723   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1724   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1725   emit_int8((unsigned char)0xE6);
1726   emit_int8((unsigned char)(0xC0 | encode));
1727 }
1728 
1729 void Assembler::cvtdq2ps(XMMRegister dst, XMMRegister src) {
1730   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1731   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
1732   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
1733   emit_int8(0x5B);
1734   emit_int8((unsigned char)(0xC0 | encode));
1735 }
1736 
1737 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
1738   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1739   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1740   attributes.set_rex_vex_w_reverted();
1741   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1742   emit_int8(0x5A);
1743   emit_int8((unsigned char)(0xC0 | encode));
1744 }
1745 
1746 void Assembler::cvtsd2ss(XMMRegister dst, Address src) {
1747   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1748   InstructionMark im(this);
1749   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1750   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1751   attributes.set_rex_vex_w_reverted();
1752   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1753   emit_int8(0x5A);
1754   emit_operand(dst, src);
1755 }
1756 
1757 void Assembler::cvtsi2sdl(XMMRegister dst, Register src) {
1758   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1759   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1760   int encode = simd_prefix_and_encode(dst, dst, as_XMMRegister(src->encoding()), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1761   emit_int8(0x2A);
1762   emit_int8((unsigned char)(0xC0 | encode));
1763 }
1764 
1765 void Assembler::cvtsi2sdl(XMMRegister dst, Address src) {
1766   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1767   InstructionMark im(this);
1768   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1769   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1770   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1771   emit_int8(0x2A);


1829   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1830   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
1831   int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1832   emit_int8(0x2C);
1833   emit_int8((unsigned char)(0xC0 | encode));
1834 }
1835 
1836 void Assembler::decl(Address dst) {
1837   // Don't use it directly. Use MacroAssembler::decrement() instead.
1838   InstructionMark im(this);
1839   prefix(dst);
1840   emit_int8((unsigned char)0xFF);
1841   emit_operand(rcx, dst);
1842 }
1843 
1844 void Assembler::divsd(XMMRegister dst, Address src) {
1845   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1846   InstructionMark im(this);
1847   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1848   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
1849   attributes.set_rex_vex_w_reverted();
1850   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1851   emit_int8(0x5E);
1852   emit_operand(dst, src);
1853 }
1854 
1855 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
1856   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1857   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1858   attributes.set_rex_vex_w_reverted();
1859   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
1860   emit_int8(0x5E);
1861   emit_int8((unsigned char)(0xC0 | encode));
1862 }
1863 
1864 void Assembler::divss(XMMRegister dst, Address src) {
1865   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1866   InstructionMark im(this);
1867   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1868   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
1869   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1870   emit_int8(0x5E);
1871   emit_operand(dst, src);
1872 }
1873 
1874 void Assembler::divss(XMMRegister dst, XMMRegister src) {
1875   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1876   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
1877   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
1878   emit_int8(0x5E);


2113   emit_int8((unsigned char)0xBD);
2114   emit_int8((unsigned char)(0xC0 | encode));
2115 }
2116 
2117 // Emit mfence instruction
2118 void Assembler::mfence() {
2119   NOT_LP64(assert(VM_Version::supports_sse2(), "unsupported");)
2120   emit_int8(0x0F);
2121   emit_int8((unsigned char)0xAE);
2122   emit_int8((unsigned char)0xF0);
2123 }
2124 
2125 void Assembler::mov(Register dst, Register src) {
2126   LP64_ONLY(movq(dst, src)) NOT_LP64(movl(dst, src));
2127 }
2128 
2129 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2130   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2131   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2132   InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2133   attributes.set_rex_vex_w_reverted();
2134   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2135   emit_int8(0x28);
2136   emit_int8((unsigned char)(0xC0 | encode));
2137 }
2138 
2139 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2140   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2141   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2142   InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2143   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2144   emit_int8(0x28);
2145   emit_int8((unsigned char)(0xC0 | encode));
2146 }
2147 
2148 void Assembler::movlhps(XMMRegister dst, XMMRegister src) {
2149   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2150   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2151   int encode = simd_prefix_and_encode(dst, src, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2152   emit_int8(0x16);
2153   emit_int8((unsigned char)(0xC0 | encode));
2154 }
2155 
2156 void Assembler::movb(Register dst, Address src) {
2157   NOT_LP64(assert(dst->has_byte_register(), "must have byte register"));
2158   InstructionMark im(this);
2159   prefix(src, dst, true);
2160   emit_int8((unsigned char)0x8A);
2161   emit_operand(dst, src);
2162 }
2163 
2164 void Assembler::movddup(XMMRegister dst, XMMRegister src) {
2165   NOT_LP64(assert(VM_Version::supports_sse3(), ""));
2166   int vector_len = VM_Version::supports_avx512novl() ? AVX_512bit : AVX_128bit;
2167   InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2168   attributes.set_rex_vex_w_reverted();
2169   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2170   emit_int8(0x12);
2171   emit_int8(0xC0 | encode);
2172 }
2173 
2174 void Assembler::kmovbl(KRegister dst, Register src) {
2175   assert(VM_Version::supports_avx512dq(), "");
2176   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2177   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2178   emit_int8((unsigned char)0x92);
2179   emit_int8((unsigned char)(0xC0 | encode));
2180 }
2181 
2182 void Assembler::kmovbl(Register dst, KRegister src) {
2183   assert(VM_Version::supports_avx512dq(), "");
2184   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2185   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2186   emit_int8((unsigned char)0x93);
2187   emit_int8((unsigned char)(0xC0 | encode));
2188 }
2189 
2190 void Assembler::kmovwl(KRegister dst, Register src) {
2191   assert(VM_Version::supports_evex(), "");
2192   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2193   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2194   emit_int8((unsigned char)0x92);
2195   emit_int8((unsigned char)(0xC0 | encode));
2196 }
2197 
2198 void Assembler::kmovwl(Register dst, KRegister src) {
2199   assert(VM_Version::supports_evex(), "");
2200   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2201   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2202   emit_int8((unsigned char)0x93);
2203   emit_int8((unsigned char)(0xC0 | encode));
2204 }
2205 
2206 void Assembler::kmovwl(KRegister dst, Address src) {
2207   assert(VM_Version::supports_evex(), "");
2208   InstructionMark im(this);
2209   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2210   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2211   emit_int8((unsigned char)0x90);
2212   emit_operand((Register)dst, src);
2213 }
2214 
2215 void Assembler::kmovdl(KRegister dst, Register src) {
2216   assert(VM_Version::supports_avx512bw(), "");
2217   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2218   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2219   emit_int8((unsigned char)0x92);
2220   emit_int8((unsigned char)(0xC0 | encode));
2221 }
2222 
2223 void Assembler::kmovdl(Register dst, KRegister src) {
2224   assert(VM_Version::supports_avx512bw(), "");
2225   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2226   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2227   emit_int8((unsigned char)0x93);
2228   emit_int8((unsigned char)(0xC0 | encode));
2229 }
2230 
2231 void Assembler::kmovql(KRegister dst, KRegister src) {
2232   assert(VM_Version::supports_avx512bw(), "");
2233   InstructionAttr attributes(AVX_128bit, /* rex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2234   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);


2425   emit_int8(0x6F);
2426   emit_operand(dst, src);
2427 }
2428 
2429 void Assembler::vmovdqu(Address dst, XMMRegister src) {
2430   assert(UseAVX > 0, "");
2431   InstructionMark im(this);
2432   InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2433   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2434   // swap src<->dst for encoding
2435   assert(src != xnoreg, "sanity");
2436   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2437   emit_int8(0x7F);
2438   emit_operand(src, dst);
2439 }
2440 
2441 // Move Unaligned EVEX enabled Vector (programmable : 8,16,32,64)
2442 void Assembler::evmovdqub(XMMRegister dst, XMMRegister src, int vector_len) {
2443   assert(VM_Version::supports_evex(), "");
2444   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2445   attributes.set_is_evex_instruction();
2446   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2447   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2448   emit_int8(0x6F);
2449   emit_int8((unsigned char)(0xC0 | encode));
2450 }
2451 
2452 void Assembler::evmovdqub(XMMRegister dst, Address src, int vector_len) {
2453   assert(VM_Version::supports_evex(), "");
2454   InstructionMark im(this);
2455   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2456   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2457   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2458   attributes.set_is_evex_instruction();
2459   vex_prefix(src, 0, dst->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2460   emit_int8(0x6F);
2461   emit_operand(dst, src);
2462 }
2463 
2464 void Assembler::evmovdqub(Address dst, XMMRegister src, int vector_len) {
2465   assert(VM_Version::supports_evex(), "");
2466   assert(src != xnoreg, "sanity");
2467   InstructionMark im(this);
2468   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2469   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2470   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2471   attributes.set_is_evex_instruction();
2472   vex_prefix(dst, 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2473   emit_int8(0x7F);
2474   emit_operand(src, dst);
2475 }
2476 
2477 void Assembler::evmovdquw(XMMRegister dst, XMMRegister src, int vector_len) {
2478   assert(VM_Version::supports_evex(), "");
2479   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2480   attributes.set_is_evex_instruction();
2481   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2482   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2483   emit_int8(0x6F);
2484   emit_int8((unsigned char)(0xC0 | encode));
2485 }
2486 
2487 void Assembler::evmovdquw(XMMRegister dst, Address src, int vector_len) {
2488   assert(VM_Version::supports_evex(), "");
2489   InstructionMark im(this);
2490   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2491   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2492   attributes.set_is_evex_instruction();
2493   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2494   vex_prefix(src, 0, dst->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2495   emit_int8(0x6F);
2496   emit_operand(dst, src);
2497 }
2498 
2499 void Assembler::evmovdquw(Address dst, XMMRegister src, int vector_len) {
2500   assert(VM_Version::supports_evex(), "");
2501   assert(src != xnoreg, "sanity");
2502   InstructionMark im(this);
2503   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2504   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2505   attributes.set_is_evex_instruction();
2506   int prefix = (_legacy_mode_bw) ? VEX_SIMD_F2 : VEX_SIMD_F3;
2507   vex_prefix(dst, 0, src->encoding(), (Assembler::VexSimdPrefix)prefix, VEX_OPCODE_0F, &attributes);
2508   emit_int8(0x7F);
2509   emit_operand(src, dst);
2510 }
2511 
2512 void Assembler::evmovdqul(XMMRegister dst, XMMRegister src, int vector_len) {
2513   assert(VM_Version::supports_evex(), "");
2514   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2515   attributes.set_is_evex_instruction();
2516   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2517   emit_int8(0x6F);
2518   emit_int8((unsigned char)(0xC0 | encode));
2519 }
2520 
2521 void Assembler::evmovdqul(XMMRegister dst, Address src, int vector_len) {
2522   assert(VM_Version::supports_evex(), "");
2523   InstructionMark im(this);
2524   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false , /* uses_vl */ true);
2525   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2526   attributes.set_is_evex_instruction();
2527   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2528   emit_int8(0x6F);
2529   emit_operand(dst, src);
2530 }
2531 
2532 void Assembler::evmovdqul(Address dst, XMMRegister src, int vector_len) {
2533   assert(VM_Version::supports_evex(), "");
2534   assert(src != xnoreg, "sanity");
2535   InstructionMark im(this);
2536   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

2537   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2538   attributes.set_is_evex_instruction();
2539   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2540   emit_int8(0x7F);
2541   emit_operand(src, dst);
2542 }
2543 
2544 void Assembler::evmovdquq(XMMRegister dst, XMMRegister src, int vector_len) {
2545   assert(VM_Version::supports_evex(), "");
2546   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2547   attributes.set_is_evex_instruction();
2548   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2549   emit_int8(0x6F);
2550   emit_int8((unsigned char)(0xC0 | encode));
2551 }
2552 
2553 void Assembler::evmovdquq(XMMRegister dst, Address src, int vector_len) {
2554   assert(VM_Version::supports_evex(), "");
2555   InstructionMark im(this);
2556   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

2557   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2558   attributes.set_is_evex_instruction();
2559   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2560   emit_int8(0x6F);
2561   emit_operand(dst, src);
2562 }
2563 
2564 void Assembler::evmovdquq(Address dst, XMMRegister src, int vector_len) {
2565   assert(VM_Version::supports_evex(), "");
2566   assert(src != xnoreg, "sanity");
2567   InstructionMark im(this);
2568   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);

2569   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2570   attributes.set_is_evex_instruction();
2571   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2572   emit_int8(0x7F);
2573   emit_operand(src, dst);
2574 }
2575 
2576 // Uses zero extension on 64bit
2577 
2578 void Assembler::movl(Register dst, int32_t imm32) {
2579   int encode = prefix_and_encode(dst->encoding());
2580   emit_int8((unsigned char)(0xB8 | encode));
2581   emit_int32(imm32);
2582 }
2583 
2584 void Assembler::movl(Register dst, Register src) {
2585   int encode = prefix_and_encode(dst->encoding(), src->encoding());
2586   emit_int8((unsigned char)0x8B);
2587   emit_int8((unsigned char)(0xC0 | encode));
2588 }
2589 
2590 void Assembler::movl(Register dst, Address src) {


2600   emit_int8((unsigned char)0xC7);
2601   emit_operand(rax, dst, 4);
2602   emit_int32(imm32);
2603 }
2604 
2605 void Assembler::movl(Address dst, Register src) {
2606   InstructionMark im(this);
2607   prefix(dst, src);
2608   emit_int8((unsigned char)0x89);
2609   emit_operand(src, dst);
2610 }
2611 
2612 // New cpus require to use movsd and movss to avoid partial register stall
2613 // when loading from memory. But for old Opteron use movlpd instead of movsd.
2614 // The selection is done in MacroAssembler::movdbl() and movflt().
2615 void Assembler::movlpd(XMMRegister dst, Address src) {
2616   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2617   InstructionMark im(this);
2618   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2619   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
2620   attributes.set_rex_vex_w_reverted();
2621   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2622   emit_int8(0x12);
2623   emit_operand(dst, src);
2624 }
2625 
2626 void Assembler::movq( MMXRegister dst, Address src ) {
2627   assert( VM_Version::supports_mmx(), "" );
2628   emit_int8(0x0F);
2629   emit_int8(0x6F);
2630   emit_operand(dst, src);
2631 }
2632 
2633 void Assembler::movq( Address dst, MMXRegister src ) {
2634   assert( VM_Version::supports_mmx(), "" );
2635   emit_int8(0x0F);
2636   emit_int8(0x7F);
2637   // workaround gcc (3.2.1-7a) bug
2638   // In that version of gcc with only an emit_operand(MMX, Address)
2639   // gcc will tail jump and try and reverse the parameters completely
2640   // obliterating dst in the process. By having a version available
2641   // that doesn't need to swap the args at the tail jump the bug is
2642   // avoided.
2643   emit_operand(dst, src);
2644 }
2645 
2646 void Assembler::movq(XMMRegister dst, Address src) {
2647   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2648   InstructionMark im(this);
2649   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2650   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
2651   attributes.set_rex_vex_w_reverted();
2652   simd_prefix(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2653   emit_int8(0x7E);
2654   emit_operand(dst, src);
2655 }
2656 
2657 void Assembler::movq(Address dst, XMMRegister src) {
2658   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2659   InstructionMark im(this);
2660   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
2661   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
2662   attributes.set_rex_vex_w_reverted();
2663   simd_prefix(src, xnoreg, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2664   emit_int8((unsigned char)0xD6);
2665   emit_operand(src, dst);
2666 }
2667 
2668 void Assembler::movsbl(Register dst, Address src) { // movsxb
2669   InstructionMark im(this);
2670   prefix(src, dst);
2671   emit_int8(0x0F);
2672   emit_int8((unsigned char)0xBE);
2673   emit_operand(dst, src);
2674 }
2675 
2676 void Assembler::movsbl(Register dst, Register src) { // movsxb
2677   NOT_LP64(assert(src->has_byte_register(), "must have byte register"));
2678   int encode = prefix_and_encode(dst->encoding(), false, src->encoding(), true);
2679   emit_int8(0x0F);
2680   emit_int8((unsigned char)0xBE);
2681   emit_int8((unsigned char)(0xC0 | encode));
2682 }
2683 
2684 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2685   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2686   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2687   attributes.set_rex_vex_w_reverted();
2688   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2689   emit_int8(0x10);
2690   emit_int8((unsigned char)(0xC0 | encode));
2691 }
2692 
2693 void Assembler::movsd(XMMRegister dst, Address src) {
2694   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2695   InstructionMark im(this);
2696   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2697   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
2698   attributes.set_rex_vex_w_reverted();
2699   simd_prefix(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2700   emit_int8(0x10);
2701   emit_operand(dst, src);
2702 }
2703 
2704 void Assembler::movsd(Address dst, XMMRegister src) {
2705   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2706   InstructionMark im(this);
2707   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2708   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
2709   attributes.set_rex_vex_w_reverted();
2710   simd_prefix(src, xnoreg, dst, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2711   emit_int8(0x11);
2712   emit_operand(src, dst);
2713 }
2714 
2715 void Assembler::movss(XMMRegister dst, XMMRegister src) {
2716   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2717   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2718   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2719   emit_int8(0x10);
2720   emit_int8((unsigned char)(0xC0 | encode));
2721 }
2722 
2723 void Assembler::movss(XMMRegister dst, Address src) {
2724   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2725   InstructionMark im(this);
2726   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2727   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
2728   simd_prefix(dst, xnoreg, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2729   emit_int8(0x10);


2813 }
2814 
2815 void Assembler::mull(Address src) {
2816   InstructionMark im(this);
2817   prefix(src);
2818   emit_int8((unsigned char)0xF7);
2819   emit_operand(rsp, src);
2820 }
2821 
2822 void Assembler::mull(Register src) {
2823   int encode = prefix_and_encode(src->encoding());
2824   emit_int8((unsigned char)0xF7);
2825   emit_int8((unsigned char)(0xE0 | encode));
2826 }
2827 
2828 void Assembler::mulsd(XMMRegister dst, Address src) {
2829   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2830   InstructionMark im(this);
2831   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2832   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
2833   attributes.set_rex_vex_w_reverted();
2834   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2835   emit_int8(0x59);
2836   emit_operand(dst, src);
2837 }
2838 
2839 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2840   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
2841   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2842   attributes.set_rex_vex_w_reverted();
2843   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2844   emit_int8(0x59);
2845   emit_int8((unsigned char)(0xC0 | encode));
2846 }
2847 
2848 void Assembler::mulss(XMMRegister dst, Address src) {
2849   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2850   InstructionMark im(this);
2851   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2852   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
2853   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2854   emit_int8(0x59);
2855   emit_operand(dst, src);
2856 }
2857 
2858 void Assembler::mulss(XMMRegister dst, XMMRegister src) {
2859   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2860   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
2861   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2862   emit_int8(0x59);


3802   assert((UseAVX > 0), "SSE mode requires address alignment 16 bytes");
3803   InstructionMark im(this);
3804   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3805   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
3806   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3807   emit_int8(0x62);
3808   emit_operand(dst, src);
3809 }
3810 
3811 void Assembler::punpckldq(XMMRegister dst, XMMRegister src) {
3812   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3813   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3814   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3815   emit_int8(0x62);
3816   emit_int8((unsigned char)(0xC0 | encode));
3817 }
3818 
3819 void Assembler::punpcklqdq(XMMRegister dst, XMMRegister src) {
3820   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3821   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3822   attributes.set_rex_vex_w_reverted();
3823   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3824   emit_int8(0x6C);
3825   emit_int8((unsigned char)(0xC0 | encode));
3826 }
3827 
3828 void Assembler::push(int32_t imm32) {
3829   // in 64bits we push 64bits onto the stack but only
3830   // take a 32bit immediate
3831   emit_int8(0x68);
3832   emit_int32(imm32);
3833 }
3834 
3835 void Assembler::push(Register src) {
3836   int encode = prefix_and_encode(src->encoding());
3837 
3838   emit_int8(0x50 | encode);
3839 }
3840 
3841 void Assembler::pushf() {
3842   emit_int8((unsigned char)0x9C);


4098   int encode = prefix_and_encode(dst->encoding());
4099   emit_int8((unsigned char)0xC1);
4100   emit_int8((unsigned char)(0xE8 | encode));
4101   emit_int8(imm8);
4102 }
4103 
4104 void Assembler::shrl(Register dst) {
4105   int encode = prefix_and_encode(dst->encoding());
4106   emit_int8((unsigned char)0xD3);
4107   emit_int8((unsigned char)(0xE8 | encode));
4108 }
4109 
4110 // copies a single word from [esi] to [edi]
4111 void Assembler::smovl() {
4112   emit_int8((unsigned char)0xA5);
4113 }
4114 
4115 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
4116   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4117   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4118   attributes.set_rex_vex_w_reverted();
4119   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4120   emit_int8(0x51);
4121   emit_int8((unsigned char)(0xC0 | encode));
4122 }
4123 
4124 void Assembler::sqrtsd(XMMRegister dst, Address src) {
4125   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4126   InstructionMark im(this);
4127   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4128   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4129   attributes.set_rex_vex_w_reverted();
4130   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4131   emit_int8(0x51);
4132   emit_operand(dst, src);
4133 }
4134 
4135 void Assembler::sqrtss(XMMRegister dst, XMMRegister src) {
4136   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4137   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4138   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4139   emit_int8(0x51);
4140   emit_int8((unsigned char)(0xC0 | encode));
4141 }
4142 
4143 void Assembler::std() {
4144   emit_int8((unsigned char)0xFD);
4145 }
4146 
4147 void Assembler::sqrtss(XMMRegister dst, Address src) {
4148   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4149   InstructionMark im(this);


4185 void Assembler::subl_imm32(Register dst, int32_t imm32) {
4186   prefix(dst);
4187   emit_arith_imm32(0x81, 0xE8, dst, imm32);
4188 }
4189 
4190 void Assembler::subl(Register dst, Address src) {
4191   InstructionMark im(this);
4192   prefix(src, dst);
4193   emit_int8(0x2B);
4194   emit_operand(dst, src);
4195 }
4196 
4197 void Assembler::subl(Register dst, Register src) {
4198   (void) prefix_and_encode(dst->encoding(), src->encoding());
4199   emit_arith(0x2B, 0xC0, dst, src);
4200 }
4201 
4202 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
4203   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4204   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4205   attributes.set_rex_vex_w_reverted();
4206   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4207   emit_int8(0x5C);
4208   emit_int8((unsigned char)(0xC0 | encode));
4209 }
4210 
4211 void Assembler::subsd(XMMRegister dst, Address src) {
4212   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4213   InstructionMark im(this);
4214   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4215   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4216   attributes.set_rex_vex_w_reverted();
4217   simd_prefix(dst, dst, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4218   emit_int8(0x5C);
4219   emit_operand(dst, src);
4220 }
4221 
4222 void Assembler::subss(XMMRegister dst, XMMRegister src) {
4223   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4224   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false , /* uses_vl */ false);
4225   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4226   emit_int8(0x5C);
4227   emit_int8((unsigned char)(0xC0 | encode));
4228 }
4229 
4230 void Assembler::subss(XMMRegister dst, Address src) {
4231   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4232   InstructionMark im(this);
4233   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4234   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4235   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4236   emit_int8(0x5C);


4284   int encode = prefix_and_encode(dst->encoding(), src->encoding());
4285   emit_int8(0x0F);
4286   emit_int8((unsigned char)0xBC);
4287   emit_int8((unsigned char)0xC0 | encode);
4288 }
4289 
4290 void Assembler::tzcntq(Register dst, Register src) {
4291   assert(VM_Version::supports_bmi1(), "tzcnt instruction not supported");
4292   emit_int8((unsigned char)0xF3);
4293   int encode = prefixq_and_encode(dst->encoding(), src->encoding());
4294   emit_int8(0x0F);
4295   emit_int8((unsigned char)0xBC);
4296   emit_int8((unsigned char)(0xC0 | encode));
4297 }
4298 
4299 void Assembler::ucomisd(XMMRegister dst, Address src) {
4300   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4301   InstructionMark im(this);
4302   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4303   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4304   attributes.set_rex_vex_w_reverted();
4305   simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4306   emit_int8(0x2E);
4307   emit_operand(dst, src);
4308 }
4309 
4310 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
4311   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4312   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4313   attributes.set_rex_vex_w_reverted();
4314   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4315   emit_int8(0x2E);
4316   emit_int8((unsigned char)(0xC0 | encode));
4317 }
4318 
4319 void Assembler::ucomiss(XMMRegister dst, Address src) {
4320   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4321   InstructionMark im(this);
4322   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4323   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4324   simd_prefix(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4325   emit_int8(0x2E);
4326   emit_operand(dst, src);
4327 }
4328 
4329 void Assembler::ucomiss(XMMRegister dst, XMMRegister src) {
4330   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4331   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
4332   int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4333   emit_int8(0x2E);


4405 
4406 void Assembler::xorl(Register dst, Register src) {
4407   (void) prefix_and_encode(dst->encoding(), src->encoding());
4408   emit_arith(0x33, 0xC0, dst, src);
4409 }
4410 
4411 void Assembler::xorb(Register dst, Address src) {
4412   InstructionMark im(this);
4413   prefix(src, dst);
4414   emit_int8(0x32);
4415   emit_operand(dst, src);
4416 }
4417 
4418 // AVX 3-operands scalar float-point arithmetic instructions
4419 
4420 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, Address src) {
4421   assert(VM_Version::supports_avx(), "");
4422   InstructionMark im(this);
4423   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4424   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4425   attributes.set_rex_vex_w_reverted();
4426   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4427   emit_int8(0x58);
4428   emit_operand(dst, src);
4429 }
4430 
4431 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4432   assert(VM_Version::supports_avx(), "");
4433   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4434   attributes.set_rex_vex_w_reverted();
4435   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4436   emit_int8(0x58);
4437   emit_int8((unsigned char)(0xC0 | encode));
4438 }
4439 
4440 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, Address src) {
4441   assert(VM_Version::supports_avx(), "");
4442   InstructionMark im(this);
4443   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4444   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4445   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4446   emit_int8(0x58);
4447   emit_operand(dst, src);
4448 }
4449 
4450 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4451   assert(VM_Version::supports_avx(), "");
4452   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4453   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4454   emit_int8(0x58);
4455   emit_int8((unsigned char)(0xC0 | encode));
4456 }
4457 
4458 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, Address src) {
4459   assert(VM_Version::supports_avx(), "");
4460   InstructionMark im(this);
4461   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4462   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4463   attributes.set_rex_vex_w_reverted();
4464   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4465   emit_int8(0x5E);
4466   emit_operand(dst, src);
4467 }
4468 
4469 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4470   assert(VM_Version::supports_avx(), "");
4471   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4472   attributes.set_rex_vex_w_reverted();
4473   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4474   emit_int8(0x5E);
4475   emit_int8((unsigned char)(0xC0 | encode));
4476 }
4477 
4478 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, Address src) {
4479   assert(VM_Version::supports_avx(), "");
4480   InstructionMark im(this);
4481   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4482   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4483   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4484   emit_int8(0x5E);
4485   emit_operand(dst, src);
4486 }
4487 
4488 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4489   assert(VM_Version::supports_avx(), "");
4490   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4491   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4492   emit_int8(0x5E);
4493   emit_int8((unsigned char)(0xC0 | encode));
4494 }
4495 
4496 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, Address src) {
4497   assert(VM_Version::supports_avx(), "");
4498   InstructionMark im(this);
4499   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4500   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4501   attributes.set_rex_vex_w_reverted();
4502   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4503   emit_int8(0x59);
4504   emit_operand(dst, src);
4505 }
4506 
4507 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4508   assert(VM_Version::supports_avx(), "");
4509   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4510   attributes.set_rex_vex_w_reverted();
4511   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4512   emit_int8(0x59);
4513   emit_int8((unsigned char)(0xC0 | encode));
4514 }
4515 
4516 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, Address src) {
4517   assert(VM_Version::supports_avx(), "");
4518   InstructionMark im(this);
4519   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4520   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4521   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4522   emit_int8(0x59);
4523   emit_operand(dst, src);
4524 }
4525 
4526 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4527   assert(VM_Version::supports_avx(), "");
4528   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4529   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4530   emit_int8(0x59);
4531   emit_int8((unsigned char)(0xC0 | encode));
4532 }
4533 
4534 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, Address src) {
4535   assert(VM_Version::supports_avx(), "");
4536   InstructionMark im(this);
4537   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4538   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
4539   attributes.set_rex_vex_w_reverted();
4540   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4541   emit_int8(0x5C);
4542   emit_operand(dst, src);
4543 }
4544 
4545 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4546   assert(VM_Version::supports_avx(), "");
4547   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4548   attributes.set_rex_vex_w_reverted();
4549   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
4550   emit_int8(0x5C);
4551   emit_int8((unsigned char)(0xC0 | encode));
4552 }
4553 
4554 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, Address src) {
4555   assert(VM_Version::supports_avx(), "");
4556   InstructionMark im(this);
4557   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4558   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4559   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4560   emit_int8(0x5C);
4561   emit_operand(dst, src);
4562 }
4563 
4564 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
4565   assert(VM_Version::supports_avx(), "");
4566   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4567   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4568   emit_int8(0x5C);
4569   emit_int8((unsigned char)(0xC0 | encode));
4570 }
4571 
4572 //====================VECTOR ARITHMETIC=====================================
4573 
4574 // Float-point vector arithmetic
4575 
4576 void Assembler::addpd(XMMRegister dst, XMMRegister src) {
4577   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4578   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4579   attributes.set_rex_vex_w_reverted();
4580   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4581   emit_int8(0x58);
4582   emit_int8((unsigned char)(0xC0 | encode));
4583 }
4584 
4585 void Assembler::addpd(XMMRegister dst, Address src) {
4586   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4587   InstructionMark im(this);
4588   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4589   attributes.set_rex_vex_w_reverted();
4590   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4591   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4592   emit_int8(0x58);
4593   emit_operand(dst, src);
4594 }
4595 
4596 
4597 void Assembler::addps(XMMRegister dst, XMMRegister src) {
4598   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4599   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4600   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4601   emit_int8(0x58);
4602   emit_int8((unsigned char)(0xC0 | encode));
4603 }
4604 
4605 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4606   assert(VM_Version::supports_avx(), "");
4607   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4608   attributes.set_rex_vex_w_reverted();
4609   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4610   emit_int8(0x58);
4611   emit_int8((unsigned char)(0xC0 | encode));
4612 }
4613 
4614 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4615   assert(VM_Version::supports_avx(), "");
4616   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4617   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4618   emit_int8(0x58);
4619   emit_int8((unsigned char)(0xC0 | encode));
4620 }
4621 
4622 void Assembler::vaddpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4623   assert(VM_Version::supports_avx(), "");
4624   InstructionMark im(this);
4625   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4626   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4627   attributes.set_rex_vex_w_reverted();
4628   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4629   emit_int8(0x58);
4630   emit_operand(dst, src);
4631 }
4632 
4633 void Assembler::vaddps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4634   assert(VM_Version::supports_avx(), "");
4635   InstructionMark im(this);
4636   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4637   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4638   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4639   emit_int8(0x58);
4640   emit_operand(dst, src);
4641 }
4642 
4643 void Assembler::subpd(XMMRegister dst, XMMRegister src) {
4644   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4645   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4646   attributes.set_rex_vex_w_reverted();
4647   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4648   emit_int8(0x5C);
4649   emit_int8((unsigned char)(0xC0 | encode));
4650 }
4651 
4652 void Assembler::subps(XMMRegister dst, XMMRegister src) {
4653   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4654   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4655   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4656   emit_int8(0x5C);
4657   emit_int8((unsigned char)(0xC0 | encode));
4658 }
4659 
4660 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4661   assert(VM_Version::supports_avx(), "");
4662   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4663   attributes.set_rex_vex_w_reverted();
4664   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4665   emit_int8(0x5C);
4666   emit_int8((unsigned char)(0xC0 | encode));
4667 }
4668 
4669 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4670   assert(VM_Version::supports_avx(), "");
4671   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4672   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4673   emit_int8(0x5C);
4674   emit_int8((unsigned char)(0xC0 | encode));
4675 }
4676 
4677 void Assembler::vsubpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4678   assert(VM_Version::supports_avx(), "");
4679   InstructionMark im(this);
4680   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4681   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4682   attributes.set_rex_vex_w_reverted();
4683   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4684   emit_int8(0x5C);
4685   emit_operand(dst, src);
4686 }
4687 
4688 void Assembler::vsubps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4689   assert(VM_Version::supports_avx(), "");
4690   InstructionMark im(this);
4691   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4692   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4693   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4694   emit_int8(0x5C);
4695   emit_operand(dst, src);
4696 }
4697 
4698 void Assembler::mulpd(XMMRegister dst, XMMRegister src) {
4699   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4700   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4701   attributes.set_rex_vex_w_reverted();
4702   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4703   emit_int8(0x59);
4704   emit_int8((unsigned char)(0xC0 | encode));
4705 }
4706 
4707 void Assembler::mulpd(XMMRegister dst, Address src) {
4708   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4709   InstructionMark im(this);
4710   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4711   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4712   attributes.set_rex_vex_w_reverted();
4713   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4714   emit_int8(0x59);
4715   emit_operand(dst, src);
4716 }
4717 
4718 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
4719   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4720   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4721   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4722   emit_int8(0x59);
4723   emit_int8((unsigned char)(0xC0 | encode));
4724 }
4725 
4726 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4727   assert(VM_Version::supports_avx(), "");
4728   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4729   attributes.set_rex_vex_w_reverted();
4730   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4731   emit_int8(0x59);
4732   emit_int8((unsigned char)(0xC0 | encode));
4733 }
4734 
4735 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4736   assert(VM_Version::supports_avx(), "");
4737   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4738   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4739   emit_int8(0x59);
4740   emit_int8((unsigned char)(0xC0 | encode));
4741 }
4742 
4743 void Assembler::vmulpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4744   assert(VM_Version::supports_avx(), "");
4745   InstructionMark im(this);
4746   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4747   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4748   attributes.set_rex_vex_w_reverted();
4749   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4750   emit_int8(0x59);
4751   emit_operand(dst, src);
4752 }
4753 
4754 void Assembler::vmulps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4755   assert(VM_Version::supports_avx(), "");
4756   InstructionMark im(this);
4757   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4758   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4759   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4760   emit_int8(0x59);
4761   emit_operand(dst, src);
4762 }
4763 
4764 void Assembler::divpd(XMMRegister dst, XMMRegister src) {
4765   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4766   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4767   attributes.set_rex_vex_w_reverted();
4768   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4769   emit_int8(0x5E);
4770   emit_int8((unsigned char)(0xC0 | encode));
4771 }
4772 
4773 void Assembler::divps(XMMRegister dst, XMMRegister src) {
4774   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4775   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4776   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4777   emit_int8(0x5E);
4778   emit_int8((unsigned char)(0xC0 | encode));
4779 }
4780 
4781 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4782   assert(VM_Version::supports_avx(), "");
4783   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4784   attributes.set_rex_vex_w_reverted();
4785   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4786   emit_int8(0x5E);
4787   emit_int8((unsigned char)(0xC0 | encode));
4788 }
4789 
4790 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4791   assert(VM_Version::supports_avx(), "");
4792   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4793   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4794   emit_int8(0x5E);
4795   emit_int8((unsigned char)(0xC0 | encode));
4796 }
4797 
4798 void Assembler::vdivpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4799   assert(VM_Version::supports_avx(), "");
4800   InstructionMark im(this);
4801   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4802   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4803   attributes.set_rex_vex_w_reverted();
4804   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4805   emit_int8(0x5E);
4806   emit_operand(dst, src);
4807 }
4808 
4809 void Assembler::vdivps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4810   assert(VM_Version::supports_avx(), "");
4811   InstructionMark im(this);
4812   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4813   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4814   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4815   emit_int8(0x5E);
4816   emit_operand(dst, src);
4817 }
4818 
4819 void Assembler::vsqrtpd(XMMRegister dst, XMMRegister src, int vector_len) {
4820   assert(VM_Version::supports_avx(), "");
4821   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4822   attributes.set_rex_vex_w_reverted();
4823   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4824   emit_int8(0x51);
4825   emit_int8((unsigned char)(0xC0 | encode));
4826 }
4827 
4828 void Assembler::vsqrtpd(XMMRegister dst, Address src, int vector_len) {
4829   assert(VM_Version::supports_avx(), "");
4830   InstructionMark im(this);
4831   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4832   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4833   attributes.set_rex_vex_w_reverted();
4834   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4835   emit_int8(0x51);
4836   emit_operand(dst, src);
4837 }
4838 
4839 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
4840   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4841   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4842   attributes.set_rex_vex_w_reverted();
4843   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4844   emit_int8(0x54);
4845   emit_int8((unsigned char)(0xC0 | encode));
4846 }
4847 
4848 void Assembler::andps(XMMRegister dst, XMMRegister src) {
4849   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4850   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4851   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4852   emit_int8(0x54);
4853   emit_int8((unsigned char)(0xC0 | encode));
4854 }
4855 
4856 void Assembler::andps(XMMRegister dst, Address src) {
4857   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4858   InstructionMark im(this);
4859   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4860   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4861   simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4862   emit_int8(0x54);
4863   emit_operand(dst, src);
4864 }
4865 
4866 void Assembler::andpd(XMMRegister dst, Address src) {
4867   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4868   InstructionMark im(this);
4869   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4870   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4871   attributes.set_rex_vex_w_reverted();
4872   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4873   emit_int8(0x54);
4874   emit_operand(dst, src);
4875 }
4876 
4877 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4878   assert(VM_Version::supports_avx(), "");
4879   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4880   attributes.set_rex_vex_w_reverted();
4881   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4882   emit_int8(0x54);
4883   emit_int8((unsigned char)(0xC0 | encode));
4884 }
4885 
4886 void Assembler::vandps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4887   assert(VM_Version::supports_avx(), "");
4888   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4889   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4890   emit_int8(0x54);
4891   emit_int8((unsigned char)(0xC0 | encode));
4892 }
4893 
4894 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4895   assert(VM_Version::supports_avx(), "");
4896   InstructionMark im(this);
4897   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4898   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4899   attributes.set_rex_vex_w_reverted();
4900   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4901   emit_int8(0x54);
4902   emit_operand(dst, src);
4903 }
4904 
4905 void Assembler::vandps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4906   assert(VM_Version::supports_avx(), "");
4907   InstructionMark im(this);
4908   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4909   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4910   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4911   emit_int8(0x54);
4912   emit_operand(dst, src);
4913 }
4914 
4915 void Assembler::unpckhpd(XMMRegister dst, XMMRegister src) {
4916   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4917   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4918   attributes.set_rex_vex_w_reverted();
4919   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4920   emit_int8(0x15);
4921   emit_int8((unsigned char)(0xC0 | encode));
4922 }
4923 
4924 void Assembler::unpcklpd(XMMRegister dst, XMMRegister src) {
4925   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4926   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
4927   attributes.set_rex_vex_w_reverted();
4928   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4929   emit_int8(0x14);
4930   emit_int8((unsigned char)(0xC0 | encode));
4931 }
4932 
4933 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
4934   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4935   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4936   attributes.set_rex_vex_w_reverted();
4937   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4938   emit_int8(0x57);
4939   emit_int8((unsigned char)(0xC0 | encode));
4940 }
4941 
4942 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
4943   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4944   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4945   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4946   emit_int8(0x57);
4947   emit_int8((unsigned char)(0xC0 | encode));
4948 }
4949 
4950 void Assembler::xorpd(XMMRegister dst, Address src) {
4951   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
4952   InstructionMark im(this);
4953   InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4954   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4955   attributes.set_rex_vex_w_reverted();
4956   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4957   emit_int8(0x57);
4958   emit_operand(dst, src);
4959 }
4960 
4961 void Assembler::xorps(XMMRegister dst, Address src) {
4962   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4963   InstructionMark im(this);
4964   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4965   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
4966   simd_prefix(dst, dst, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4967   emit_int8(0x57);
4968   emit_operand(dst, src);
4969 }
4970 
4971 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4972   assert(VM_Version::supports_avx(), "");
4973   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4974   attributes.set_rex_vex_w_reverted();
4975   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4976   emit_int8(0x57);
4977   emit_int8((unsigned char)(0xC0 | encode));
4978 }
4979 
4980 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
4981   assert(VM_Version::supports_avx(), "");
4982   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4983   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4984   emit_int8(0x57);
4985   emit_int8((unsigned char)(0xC0 | encode));
4986 }
4987 
4988 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
4989   assert(VM_Version::supports_avx(), "");
4990   InstructionMark im(this);
4991   InstructionAttr attributes(vector_len, /* vex_w */ !_legacy_mode_dq, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
4992   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
4993   attributes.set_rex_vex_w_reverted();
4994   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
4995   emit_int8(0x57);
4996   emit_operand(dst, src);
4997 }
4998 
4999 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5000   assert(VM_Version::supports_avx(), "");
5001   InstructionMark im(this);
5002   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5003   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5004   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
5005   emit_int8(0x57);
5006   emit_operand(dst, src);
5007 }
5008 
5009 // Integer vector arithmetic
5010 void Assembler::vphaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5011   assert(VM_Version::supports_avx() && (vector_len == 0) ||
5012          VM_Version::supports_avx2(), "256 bit integer vectors requires AVX2");
5013   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);


5044 void Assembler::paddd(XMMRegister dst, XMMRegister src) {
5045   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5046   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5047   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5048   emit_int8((unsigned char)0xFE);
5049   emit_int8((unsigned char)(0xC0 | encode));
5050 }
5051 
5052 void Assembler::paddd(XMMRegister dst, Address src) {
5053   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5054   InstructionMark im(this);
5055   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5056   simd_prefix(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5057   emit_int8((unsigned char)0xFE);
5058   emit_operand(dst, src);
5059 }
5060 
5061 void Assembler::paddq(XMMRegister dst, XMMRegister src) {
5062   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5063   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5064   attributes.set_rex_vex_w_reverted();
5065   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5066   emit_int8((unsigned char)0xD4);
5067   emit_int8((unsigned char)(0xC0 | encode));
5068 }
5069 
5070 void Assembler::phaddw(XMMRegister dst, XMMRegister src) {
5071   NOT_LP64(assert(VM_Version::supports_sse3(), ""));
5072   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
5073   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5074   emit_int8(0x01);
5075   emit_int8((unsigned char)(0xC0 | encode));
5076 }
5077 
5078 void Assembler::phaddd(XMMRegister dst, XMMRegister src) {
5079   NOT_LP64(assert(VM_Version::supports_sse3(), ""));
5080   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
5081   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5082   emit_int8(0x02);
5083   emit_int8((unsigned char)(0xC0 | encode));
5084 }


5093 
5094 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5095   assert(UseAVX > 0, "requires some form of AVX");
5096   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5097   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5098   emit_int8((unsigned char)0xFD);
5099   emit_int8((unsigned char)(0xC0 | encode));
5100 }
5101 
5102 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5103   assert(UseAVX > 0, "requires some form of AVX");
5104   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5105   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5106   emit_int8((unsigned char)0xFE);
5107   emit_int8((unsigned char)(0xC0 | encode));
5108 }
5109 
5110 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5111   assert(UseAVX > 0, "requires some form of AVX");
5112   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5113   attributes.set_rex_vex_w_reverted();
5114   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5115   emit_int8((unsigned char)0xD4);
5116   emit_int8((unsigned char)(0xC0 | encode));
5117 }
5118 
5119 void Assembler::vpaddb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5120   assert(UseAVX > 0, "requires some form of AVX");
5121   InstructionMark im(this);
5122   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5123   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5124   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5125   emit_int8((unsigned char)0xFC);
5126   emit_operand(dst, src);
5127 }
5128 
5129 void Assembler::vpaddw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5130   assert(UseAVX > 0, "requires some form of AVX");
5131   InstructionMark im(this);
5132   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5133   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5134   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5135   emit_int8((unsigned char)0xFD);
5136   emit_operand(dst, src);
5137 }
5138 
5139 void Assembler::vpaddd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5140   assert(UseAVX > 0, "requires some form of AVX");
5141   InstructionMark im(this);
5142   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5143   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5144   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5145   emit_int8((unsigned char)0xFE);
5146   emit_operand(dst, src);
5147 }
5148 
5149 void Assembler::vpaddq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5150   assert(UseAVX > 0, "requires some form of AVX");
5151   InstructionMark im(this);
5152   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5153   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5154   attributes.set_rex_vex_w_reverted();
5155   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5156   emit_int8((unsigned char)0xD4);
5157   emit_operand(dst, src);
5158 }
5159 
5160 void Assembler::psubb(XMMRegister dst, XMMRegister src) {
5161   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5162   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5163   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5164   emit_int8((unsigned char)0xF8);
5165   emit_int8((unsigned char)(0xC0 | encode));
5166 }
5167 
5168 void Assembler::psubw(XMMRegister dst, XMMRegister src) {
5169   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5170   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5171   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5172   emit_int8((unsigned char)0xF9);
5173   emit_int8((unsigned char)(0xC0 | encode));
5174 }
5175 
5176 void Assembler::psubd(XMMRegister dst, XMMRegister src) {
5177   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5178   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5179   emit_int8((unsigned char)0xFA);
5180   emit_int8((unsigned char)(0xC0 | encode));
5181 }
5182 
5183 void Assembler::psubq(XMMRegister dst, XMMRegister src) {
5184   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5185   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5186   attributes.set_rex_vex_w_reverted();
5187   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5188   emit_int8((unsigned char)0xFB);
5189   emit_int8((unsigned char)(0xC0 | encode));
5190 }
5191 
5192 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5193   assert(UseAVX > 0, "requires some form of AVX");
5194   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5195   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5196   emit_int8((unsigned char)0xF8);
5197   emit_int8((unsigned char)(0xC0 | encode));
5198 }
5199 
5200 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5201   assert(UseAVX > 0, "requires some form of AVX");
5202   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5203   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5204   emit_int8((unsigned char)0xF9);
5205   emit_int8((unsigned char)(0xC0 | encode));
5206 }
5207 
5208 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5209   assert(UseAVX > 0, "requires some form of AVX");
5210   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5211   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5212   emit_int8((unsigned char)0xFA);
5213   emit_int8((unsigned char)(0xC0 | encode));
5214 }
5215 
5216 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5217   assert(UseAVX > 0, "requires some form of AVX");
5218   InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5219   attributes.set_rex_vex_w_reverted();
5220   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5221   emit_int8((unsigned char)0xFB);
5222   emit_int8((unsigned char)(0xC0 | encode));
5223 }
5224 
5225 void Assembler::vpsubb(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5226   assert(UseAVX > 0, "requires some form of AVX");
5227   InstructionMark im(this);
5228   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5229   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5230   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5231   emit_int8((unsigned char)0xF8);
5232   emit_operand(dst, src);
5233 }
5234 
5235 void Assembler::vpsubw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5236   assert(UseAVX > 0, "requires some form of AVX");
5237   InstructionMark im(this);
5238   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5239   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5240   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5241   emit_int8((unsigned char)0xF9);
5242   emit_operand(dst, src);
5243 }
5244 
5245 void Assembler::vpsubd(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5246   assert(UseAVX > 0, "requires some form of AVX");
5247   InstructionMark im(this);
5248   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5249   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5250   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5251   emit_int8((unsigned char)0xFA);
5252   emit_operand(dst, src);
5253 }
5254 
5255 void Assembler::vpsubq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5256   assert(UseAVX > 0, "requires some form of AVX");
5257   InstructionMark im(this);
5258   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5259   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5260   attributes.set_rex_vex_w_reverted();
5261   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5262   emit_int8((unsigned char)0xFB);
5263   emit_operand(dst, src);
5264 }
5265 
5266 void Assembler::pmullw(XMMRegister dst, XMMRegister src) {
5267   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5268   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5269   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5270   emit_int8((unsigned char)0xD5);
5271   emit_int8((unsigned char)(0xC0 | encode));
5272 }
5273 
5274 void Assembler::pmulld(XMMRegister dst, XMMRegister src) {
5275   assert(VM_Version::supports_sse4_1(), "");
5276   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5277   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5278   emit_int8(0x40);
5279   emit_int8((unsigned char)(0xC0 | encode));
5280 }
5281 
5282 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5283   assert(UseAVX > 0, "requires some form of AVX");
5284   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5285   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5286   emit_int8((unsigned char)0xD5);
5287   emit_int8((unsigned char)(0xC0 | encode));
5288 }
5289 
5290 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5291   assert(UseAVX > 0, "requires some form of AVX");
5292   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5293   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5294   emit_int8(0x40);
5295   emit_int8((unsigned char)(0xC0 | encode));
5296 }
5297 
5298 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5299   assert(UseAVX > 2, "requires some form of EVEX");
5300   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5301   attributes.set_is_evex_instruction();
5302   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5303   emit_int8(0x40);
5304   emit_int8((unsigned char)(0xC0 | encode));
5305 }
5306 
5307 void Assembler::vpmullw(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5308   assert(UseAVX > 0, "requires some form of AVX");
5309   InstructionMark im(this);
5310   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5311   attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
5312   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5313   emit_int8((unsigned char)0xD5);
5314   emit_operand(dst, src);
5315 }
5316 
5317 void Assembler::vpmulld(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5318   assert(UseAVX > 0, "requires some form of AVX");
5319   InstructionMark im(this);
5320   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5321   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5322   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5323   emit_int8(0x40);
5324   emit_operand(dst, src);
5325 }
5326 
5327 void Assembler::vpmullq(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5328   assert(UseAVX > 2, "requires some form of EVEX");
5329   InstructionMark im(this);
5330   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ false, /* uses_vl */ true);
5331   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
5332   attributes.set_is_evex_instruction();
5333   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
5334   emit_int8(0x40);
5335   emit_operand(dst, src);
5336 }
5337 
5338 // Shift packed integers left by specified number of bits.
5339 void Assembler::psllw(XMMRegister dst, int shift) {
5340   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5341   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5342   // XMM6 is for /6 encoding: 66 0F 71 /6 ib
5343   int encode = simd_prefix_and_encode(xmm6, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5344   emit_int8(0x71);
5345   emit_int8((unsigned char)(0xC0 | encode));
5346   emit_int8(shift & 0xFF);
5347 }
5348 
5349 void Assembler::pslld(XMMRegister dst, int shift) {
5350   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5351   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5352   // XMM6 is for /6 encoding: 66 0F 72 /6 ib


5368 
5369 void Assembler::psllw(XMMRegister dst, XMMRegister shift) {
5370   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5371   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5372   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5373   emit_int8((unsigned char)0xF1);
5374   emit_int8((unsigned char)(0xC0 | encode));
5375 }
5376 
5377 void Assembler::pslld(XMMRegister dst, XMMRegister shift) {
5378   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5379   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5380   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5381   emit_int8((unsigned char)0xF2);
5382   emit_int8((unsigned char)(0xC0 | encode));
5383 }
5384 
5385 void Assembler::psllq(XMMRegister dst, XMMRegister shift) {
5386   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5387   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5388   attributes.set_rex_vex_w_reverted();
5389   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5390   emit_int8((unsigned char)0xF3);
5391   emit_int8((unsigned char)(0xC0 | encode));
5392 }
5393 
5394 void Assembler::vpsllw(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5395   assert(UseAVX > 0, "requires some form of AVX");
5396   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5397   // XMM6 is for /6 encoding: 66 0F 71 /6 ib
5398   int encode = vex_prefix_and_encode(xmm6->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5399   emit_int8(0x71);
5400   emit_int8((unsigned char)(0xC0 | encode));
5401   emit_int8(shift & 0xFF);
5402 }
5403 
5404 void Assembler::vpslld(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5405   assert(UseAVX > 0, "requires some form of AVX");
5406   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5407   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5408   // XMM6 is for /6 encoding: 66 0F 72 /6 ib
5409   int encode = vex_prefix_and_encode(xmm6->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5410   emit_int8(0x72);
5411   emit_int8((unsigned char)(0xC0 | encode));
5412   emit_int8(shift & 0xFF);
5413 }
5414 
5415 void Assembler::vpsllq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5416   assert(UseAVX > 0, "requires some form of AVX");
5417   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5418   attributes.set_rex_vex_w_reverted();
5419   // XMM6 is for /6 encoding: 66 0F 73 /6 ib
5420   int encode = vex_prefix_and_encode(xmm6->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5421   emit_int8(0x73);
5422   emit_int8((unsigned char)(0xC0 | encode));
5423   emit_int8(shift & 0xFF);
5424 }
5425 
5426 void Assembler::vpsllw(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5427   assert(UseAVX > 0, "requires some form of AVX");
5428   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5429   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5430   emit_int8((unsigned char)0xF1);
5431   emit_int8((unsigned char)(0xC0 | encode));
5432 }
5433 
5434 void Assembler::vpslld(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5435   assert(UseAVX > 0, "requires some form of AVX");
5436   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5437   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5438   emit_int8((unsigned char)0xF2);
5439   emit_int8((unsigned char)(0xC0 | encode));
5440 }
5441 
5442 void Assembler::vpsllq(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5443   assert(UseAVX > 0, "requires some form of AVX");
5444   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5445   attributes.set_rex_vex_w_reverted();
5446   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5447   emit_int8((unsigned char)0xF3);
5448   emit_int8((unsigned char)(0xC0 | encode));
5449 }
5450 
5451 // Shift packed integers logically right by specified number of bits.
5452 void Assembler::psrlw(XMMRegister dst, int shift) {
5453   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5454   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5455   // XMM2 is for /2 encoding: 66 0F 71 /2 ib
5456   int encode = simd_prefix_and_encode(xmm2, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5457   emit_int8(0x71);
5458   emit_int8((unsigned char)(0xC0 | encode));
5459   emit_int8(shift & 0xFF);
5460 }
5461 
5462 void Assembler::psrld(XMMRegister dst, int shift) {
5463   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5464   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5465   // XMM2 is for /2 encoding: 66 0F 72 /2 ib
5466   int encode = simd_prefix_and_encode(xmm2, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5467   emit_int8(0x72);
5468   emit_int8((unsigned char)(0xC0 | encode));
5469   emit_int8(shift & 0xFF);
5470 }
5471 
5472 void Assembler::psrlq(XMMRegister dst, int shift) {
5473   // Do not confuse it with psrldq SSE2 instruction which
5474   // shifts 128 bit value in xmm register by number of bytes.
5475   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5476   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5477   attributes.set_rex_vex_w_reverted();
5478   // XMM2 is for /2 encoding: 66 0F 73 /2 ib
5479   int encode = simd_prefix_and_encode(xmm2, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5480   emit_int8(0x73);
5481   emit_int8((unsigned char)(0xC0 | encode));
5482   emit_int8(shift & 0xFF);
5483 }
5484 
5485 void Assembler::psrlw(XMMRegister dst, XMMRegister shift) {
5486   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5487   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5488   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5489   emit_int8((unsigned char)0xD1);
5490   emit_int8((unsigned char)(0xC0 | encode));
5491 }
5492 
5493 void Assembler::psrld(XMMRegister dst, XMMRegister shift) {
5494   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5495   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5496   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5497   emit_int8((unsigned char)0xD2);
5498   emit_int8((unsigned char)(0xC0 | encode));
5499 }
5500 
5501 void Assembler::psrlq(XMMRegister dst, XMMRegister shift) {
5502   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5503   InstructionAttr attributes(AVX_128bit, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5504   attributes.set_rex_vex_w_reverted();
5505   int encode = simd_prefix_and_encode(dst, dst, shift, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5506   emit_int8((unsigned char)0xD3);
5507   emit_int8((unsigned char)(0xC0 | encode));
5508 }
5509 
5510 void Assembler::vpsrlw(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5511   assert(UseAVX > 0, "requires some form of AVX");
5512   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5513   // XMM2 is for /2 encoding: 66 0F 71 /2 ib
5514   int encode = vex_prefix_and_encode(xmm2->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5515   emit_int8(0x71);
5516   emit_int8((unsigned char)(0xC0 | encode));
5517   emit_int8(shift & 0xFF);
5518 }
5519 
5520 void Assembler::vpsrld(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5521   assert(UseAVX > 0, "requires some form of AVX");
5522   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5523   // XMM2 is for /2 encoding: 66 0F 72 /2 ib
5524   int encode = vex_prefix_and_encode(xmm2->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5525   emit_int8(0x72);
5526   emit_int8((unsigned char)(0xC0 | encode));
5527   emit_int8(shift & 0xFF);
5528 }
5529 
5530 void Assembler::vpsrlq(XMMRegister dst, XMMRegister src, int shift, int vector_len) {
5531   assert(UseAVX > 0, "requires some form of AVX");
5532   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5533   attributes.set_rex_vex_w_reverted();
5534   // XMM2 is for /2 encoding: 66 0F 73 /2 ib
5535   int encode = vex_prefix_and_encode(xmm2->encoding(), dst->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5536   emit_int8(0x73);
5537   emit_int8((unsigned char)(0xC0 | encode));
5538   emit_int8(shift & 0xFF);
5539 }
5540 
5541 void Assembler::vpsrlw(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5542   assert(UseAVX > 0, "requires some form of AVX");
5543   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5544   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5545   emit_int8((unsigned char)0xD1);
5546   emit_int8((unsigned char)(0xC0 | encode));
5547 }
5548 
5549 void Assembler::vpsrld(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5550   assert(UseAVX > 0, "requires some form of AVX");
5551   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5552   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5553   emit_int8((unsigned char)0xD2);
5554   emit_int8((unsigned char)(0xC0 | encode));
5555 }
5556 
5557 void Assembler::vpsrlq(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len) {
5558   assert(UseAVX > 0, "requires some form of AVX");
5559   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5560   attributes.set_rex_vex_w_reverted();
5561   int encode = vex_prefix_and_encode(dst->encoding(), src->encoding(), shift->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5562   emit_int8((unsigned char)0xD3);
5563   emit_int8((unsigned char)(0xC0 | encode));
5564 }
5565 
5566 // Shift packed integers arithmetically right by specified number of bits.
5567 void Assembler::psraw(XMMRegister dst, int shift) {
5568   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5569   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
5570   // XMM4 is for /4 encoding: 66 0F 71 /4 ib
5571   int encode = simd_prefix_and_encode(xmm4, dst, dst, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5572   emit_int8(0x71);
5573   emit_int8((unsigned char)(0xC0 | encode));
5574   emit_int8(shift & 0xFF);
5575 }
5576 
5577 void Assembler::psrad(XMMRegister dst, int shift) {
5578   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5579   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5580   // XMM4 is for /4 encoding: 66 0F 72 /4 ib


5650   assert(UseAVX > 0, "requires some form of AVX");
5651   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5652   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5653   emit_int8((unsigned char)0xDB);
5654   emit_int8((unsigned char)(0xC0 | encode));
5655 }
5656 
5657 void Assembler::vpand(XMMRegister dst, XMMRegister nds, Address src, int vector_len) {
5658   assert(UseAVX > 0, "requires some form of AVX");
5659   InstructionMark im(this);
5660   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5661   attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_32bit);
5662   vex_prefix(src, nds->encoding(), dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5663   emit_int8((unsigned char)0xDB);
5664   emit_operand(dst, src);
5665 }
5666 
5667 void Assembler::pandn(XMMRegister dst, XMMRegister src) {
5668   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5669   InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5670   attributes.set_rex_vex_w_reverted();
5671   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5672   emit_int8((unsigned char)0xDF);
5673   emit_int8((unsigned char)(0xC0 | encode));
5674 }
5675 
5676 void Assembler::por(XMMRegister dst, XMMRegister src) {
5677   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
5678   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5679   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5680   emit_int8((unsigned char)0xEB);
5681   emit_int8((unsigned char)(0xC0 | encode));
5682 }
5683 
5684 void Assembler::vpor(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
5685   assert(UseAVX > 0, "requires some form of AVX");
5686   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
5687   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
5688   emit_int8((unsigned char)0xEB);
5689   emit_int8((unsigned char)(0xC0 | encode));
5690 }


5940 }
5941 
5942 void Assembler::vextracti32x4(Address dst, XMMRegister src, uint8_t imm8) {
5943   assert(VM_Version::supports_evex(), "");
5944   assert(src != xnoreg, "sanity");
5945   assert(imm8 <= 0x03, "imm8: %u", imm8);
5946   InstructionMark im(this);
5947   InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5948   attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
5949   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5950   emit_int8(0x39);
5951   emit_operand(src, dst);
5952   // 0x00 - extract from bits 127:0
5953   // 0x01 - extract from bits 255:128
5954   // 0x02 - extract from bits 383:256
5955   // 0x03 - extract from bits 511:384
5956   emit_int8(imm8 & 0x03);
5957 }
5958 
5959 void Assembler::vextracti64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5960   assert(VM_Version::supports_avx512dq(), "");
5961   assert(imm8 <= 0x03, "imm8: %u", imm8);
5962   InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5963   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5964   emit_int8(0x39);
5965   emit_int8((unsigned char)(0xC0 | encode));
5966   // 0x00 - extract from bits 127:0
5967   // 0x01 - extract from bits 255:128
5968   // 0x02 - extract from bits 383:256
5969   // 0x03 - extract from bits 511:384
5970   emit_int8(imm8 & 0x03);
5971 }
5972 
5973 void Assembler::vextracti64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
5974   assert(VM_Version::supports_evex(), "");
5975   assert(imm8 <= 0x01, "imm8: %u", imm8);
5976   InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
5977   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
5978   emit_int8(0x3B);
5979   emit_int8((unsigned char)(0xC0 | encode));
5980   // 0x00 - extract from lower 256 bits
5981   // 0x01 - extract from upper 256 bits
5982   emit_int8(imm8 & 0x01);


6030 }
6031 
6032 void Assembler::vextractf32x4(Address dst, XMMRegister src, uint8_t imm8) {
6033   assert(VM_Version::supports_evex(), "");
6034   assert(src != xnoreg, "sanity");
6035   assert(imm8 <= 0x03, "imm8: %u", imm8);
6036   InstructionMark im(this);
6037   InstructionAttr attributes(AVX_512bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
6038   attributes.set_address_attributes(/* tuple_type */ EVEX_T4, /* input_size_in_bits */ EVEX_32bit);
6039   vex_prefix(dst, 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6040   emit_int8(0x19);
6041   emit_operand(src, dst);
6042   // 0x00 - extract from bits 127:0
6043   // 0x01 - extract from bits 255:128
6044   // 0x02 - extract from bits 383:256
6045   // 0x03 - extract from bits 511:384
6046   emit_int8(imm8 & 0x03);
6047 }
6048 
6049 void Assembler::vextractf64x2(XMMRegister dst, XMMRegister src, uint8_t imm8) {
6050   assert(VM_Version::supports_avx512dq(), "");
6051   assert(imm8 <= 0x03, "imm8: %u", imm8);
6052   InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
6053   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6054   emit_int8(0x19);
6055   emit_int8((unsigned char)(0xC0 | encode));
6056   // 0x00 - extract from bits 127:0
6057   // 0x01 - extract from bits 255:128
6058   // 0x02 - extract from bits 383:256
6059   // 0x03 - extract from bits 511:384
6060   emit_int8(imm8 & 0x03);
6061 }
6062 
6063 void Assembler::vextractf64x4(XMMRegister dst, XMMRegister src, uint8_t imm8) {
6064   assert(VM_Version::supports_evex(), "");
6065   assert(imm8 <= 0x01, "imm8: %u", imm8);
6066   InstructionAttr attributes(AVX_512bit, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
6067   int encode = vex_prefix_and_encode(src->encoding(), 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6068   emit_int8(0x1B);
6069   emit_int8((unsigned char)(0xC0 | encode));
6070   // 0x00 - extract from lower 256 bits
6071   // 0x01 - extract from upper 256 bits
6072   emit_int8(imm8 & 0x01);


6157   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6158   emit_int8(0x58);
6159   emit_int8((unsigned char)(0xC0 | encode));
6160 }
6161 
6162 void Assembler::evpbroadcastd(XMMRegister dst, Address src, int vector_len) {
6163   assert(VM_Version::supports_evex(), "");
6164   assert(dst != xnoreg, "sanity");
6165   InstructionMark im(this);
6166   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6167   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6168   // swap src<->dst for encoding
6169   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6170   emit_int8(0x58);
6171   emit_operand(dst, src);
6172 }
6173 
6174 // duplicate 8-byte integer data from src into programmed locations in dest : requires AVX512VL
6175 void Assembler::evpbroadcastq(XMMRegister dst, XMMRegister src, int vector_len) {
6176   assert(VM_Version::supports_evex(), "");
6177   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6178   attributes.set_rex_vex_w_reverted();
6179   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6180   emit_int8(0x59);
6181   emit_int8((unsigned char)(0xC0 | encode));
6182 }
6183 
6184 void Assembler::evpbroadcastq(XMMRegister dst, Address src, int vector_len) {
6185   assert(VM_Version::supports_evex(), "");
6186   assert(dst != xnoreg, "sanity");
6187   InstructionMark im(this);
6188   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6189   attributes.set_rex_vex_w_reverted();
6190   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
6191   // swap src<->dst for encoding
6192   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6193   emit_int8(0x59);
6194   emit_operand(dst, src);
6195 }
6196 
6197 
6198 // scalar single/double precision replicate
6199 
6200 // duplicate single precision data from src into programmed locations in dest : requires AVX512VL
6201 void Assembler::evpbroadcastss(XMMRegister dst, XMMRegister src, int vector_len) {
6202   assert(VM_Version::supports_evex(), "");
6203   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6204   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6205   emit_int8(0x18);
6206   emit_int8((unsigned char)(0xC0 | encode));
6207 }
6208 
6209 void Assembler::evpbroadcastss(XMMRegister dst, Address src, int vector_len) {
6210   assert(VM_Version::supports_evex(), "");
6211   assert(dst != xnoreg, "sanity");
6212   InstructionMark im(this);
6213   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6214   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
6215   // swap src<->dst for encoding
6216   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6217   emit_int8(0x18);
6218   emit_operand(dst, src);
6219 }
6220 
6221 // duplicate double precision data from src into programmed locations in dest : requires AVX512VL
6222 void Assembler::evpbroadcastsd(XMMRegister dst, XMMRegister src, int vector_len) {
6223   assert(VM_Version::supports_evex(), "");
6224   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6225   attributes.set_rex_vex_w_reverted();
6226   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6227   emit_int8(0x19);
6228   emit_int8((unsigned char)(0xC0 | encode));
6229 }
6230 
6231 void Assembler::evpbroadcastsd(XMMRegister dst, Address src, int vector_len) {
6232   assert(VM_Version::supports_evex(), "");
6233   assert(dst != xnoreg, "sanity");
6234   InstructionMark im(this);
6235   InstructionAttr attributes(vector_len, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6236   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
6237   attributes.set_rex_vex_w_reverted();
6238   // swap src<->dst for encoding
6239   vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
6240   emit_int8(0x19);
6241   emit_operand(dst, src);
6242 }
6243 
6244 
6245 // gpr source broadcast forms
6246 
6247 // duplicate 1-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6248 void Assembler::evpbroadcastb(XMMRegister dst, Register src, int vector_len) {
6249   assert(VM_Version::supports_evex(), "");
6250   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6251   attributes.set_is_evex_instruction();
6252   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);

6253   emit_int8(0x7A);



6254   emit_int8((unsigned char)(0xC0 | encode));
6255 }
6256 
6257 // duplicate 2-byte integer data from src into programmed locations in dest : requires AVX512BW and AVX512VL
6258 void Assembler::evpbroadcastw(XMMRegister dst, Register src, int vector_len) {
6259   assert(VM_Version::supports_evex(), "");
6260   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
6261   attributes.set_is_evex_instruction();
6262   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);

6263   emit_int8(0x7B);



6264   emit_int8((unsigned char)(0xC0 | encode));
6265 }
6266 
6267 // duplicate 4-byte integer data from src into programmed locations in dest : requires AVX512VL
6268 void Assembler::evpbroadcastd(XMMRegister dst, Register src, int vector_len) {
6269   assert(VM_Version::supports_evex(), "");
6270   InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6271   attributes.set_is_evex_instruction();
6272   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);

6273   emit_int8(0x7C);



6274   emit_int8((unsigned char)(0xC0 | encode));
6275 }
6276 
6277 // duplicate 8-byte integer data from src into programmed locations in dest : requires AVX512VL
6278 void Assembler::evpbroadcastq(XMMRegister dst, Register src, int vector_len) {
6279   assert(VM_Version::supports_evex(), "");
6280   InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
6281   attributes.set_is_evex_instruction();
6282   int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);

6283   emit_int8(0x7C);



6284   emit_int8((unsigned char)(0xC0 | encode));
6285 }
6286 
6287 
6288 // Carry-Less Multiplication Quadword
6289 void Assembler::pclmulqdq(XMMRegister dst, XMMRegister src, int mask) {
6290   assert(VM_Version::supports_clmul(), "");
6291   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6292   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6293   emit_int8(0x44);
6294   emit_int8((unsigned char)(0xC0 | encode));
6295   emit_int8((unsigned char)mask);
6296 }
6297 
6298 // Carry-Less Multiplication Quadword
6299 void Assembler::vpclmulqdq(XMMRegister dst, XMMRegister nds, XMMRegister src, int mask) {
6300   assert(VM_Version::supports_avx() && VM_Version::supports_clmul(), "");
6301   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6302   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6303   emit_int8(0x44);


6918       bool check_register_bank = NOT_IA32(true) IA32_ONLY(false);
6919       if (check_register_bank) {
6920         // check nds_enc and xreg_enc for upper bank usage
6921         if (nds_enc < 16 && xreg_enc < 16) {
6922           attributes->set_is_legacy_mode();
6923         }
6924       } else {
6925         attributes->set_is_legacy_mode();
6926       }
6927     }
6928   }
6929 
6930   _is_managed = false;
6931   if (UseAVX > 2 && !attributes->is_legacy_mode())
6932   {
6933     bool evex_r = (xreg_enc >= 16);
6934     bool evex_v = (nds_enc >= 16);
6935     attributes->set_is_evex_instruction();
6936     evex_prefix(vex_r, vex_b, vex_x, evex_r, evex_v, nds_enc, pre, opc);
6937   } else {
6938     if (UseAVX > 2 && attributes->is_rex_vex_w_reverted()) {
6939       attributes->set_rex_vex_w(false);
6940     }
6941     vex_prefix(vex_r, vex_b, vex_x, nds_enc, pre, opc);
6942   }
6943 }
6944 
6945 int Assembler::vex_prefix_and_encode(int dst_enc, int nds_enc, int src_enc, VexSimdPrefix pre, VexOpcode opc, InstructionAttr *attributes) {
6946   bool vex_r = ((dst_enc & 8) == 8) ? 1 : 0;
6947   bool vex_b = ((src_enc & 8) == 8) ? 1 : 0;
6948   bool vex_x = false;
6949   set_attributes(attributes);
6950   attributes->set_current_assembler(this);
6951   bool check_register_bank = NOT_IA32(true) IA32_ONLY(false);
6952 
6953   // if vector length is turned off, revert to AVX for vectors smaller than 512-bit
6954   if (UseAVX > 2 && _legacy_mode_vl && attributes->uses_vl()) {
6955     switch (attributes->get_vector_len()) {
6956     case AVX_128bit:
6957     case AVX_256bit:
6958       if (check_register_bank) {
6959         if (dst_enc >= 16 || nds_enc >= 16 || src_enc >= 16) {
6960           // up propagate arithmetic instructions to meet RA requirements


6980         // check dst_enc, nds_enc and src_enc for upper bank usage
6981         if (dst_enc < 16 && nds_enc < 16 && src_enc < 16) {
6982           attributes->set_is_legacy_mode();
6983         }
6984       } else {
6985         attributes->set_is_legacy_mode();
6986       }
6987     }
6988   }
6989 
6990   _is_managed = false;
6991   if (UseAVX > 2 && !attributes->is_legacy_mode())
6992   {
6993     bool evex_r = (dst_enc >= 16);
6994     bool evex_v = (nds_enc >= 16);
6995     // can use vex_x as bank extender on rm encoding
6996     vex_x = (src_enc >= 16);
6997     attributes->set_is_evex_instruction();
6998     evex_prefix(vex_r, vex_b, vex_x, evex_r, evex_v, nds_enc, pre, opc);
6999   } else {
7000     if (UseAVX > 2 && attributes->is_rex_vex_w_reverted()) {
7001       attributes->set_rex_vex_w(false);
7002     }
7003     vex_prefix(vex_r, vex_b, vex_x, nds_enc, pre, opc);
7004   }
7005 
7006   // return modrm byte components for operands
7007   return (((dst_enc & 7) << 3) | (src_enc & 7));
7008 }
7009 
7010 
7011 void Assembler::simd_prefix(XMMRegister xreg, XMMRegister nds, Address adr, VexSimdPrefix pre,
7012                             VexOpcode opc, InstructionAttr *attributes) {
7013   if (UseAVX > 0) {
7014     int xreg_enc = xreg->encoding();
7015     int nds_enc = nds->is_valid() ? nds->encoding() : 0;
7016     vex_prefix(adr, nds_enc, xreg_enc, pre, opc, attributes);
7017   } else {
7018     assert((nds == xreg) || (nds == xnoreg), "wrong sse encoding");
7019     rex_prefix(adr, xreg, pre, opc, attributes->is_rex_vex_w());
7020   }
7021 }
7022 


< prev index next >