2135 emit_int8((unsigned char)(0xC0 | encode));
2136 }
2137
2138 void Assembler::movb(Register dst, Address src) {
2139 NOT_LP64(assert(dst->has_byte_register(), "must have byte register"));
2140 InstructionMark im(this);
2141 prefix(src, dst, true);
2142 emit_int8((unsigned char)0x8A);
2143 emit_operand(dst, src);
2144 }
2145
2146 void Assembler::movddup(XMMRegister dst, XMMRegister src) {
2147 NOT_LP64(assert(VM_Version::supports_sse3(), ""));
2148 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_128bit;
2149 InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2150 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2151 emit_int8(0x12);
2152 emit_int8(0xC0 | encode);
2153 }
2154
2155 void Assembler::kmovql(KRegister dst, KRegister src) {
2156 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2157 InstructionAttr attributes(AVX_128bit, /* rex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2158 int encode = kreg_prefix_and_encode(dst, knoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2159 emit_int8((unsigned char)0x90);
2160 emit_int8((unsigned char)(0xC0 | encode));
2161 }
2162
2163 void Assembler::kmovql(KRegister dst, Address src) {
2164 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2165 InstructionMark im(this);
2166 InstructionAttr attributes(AVX_128bit, /* vex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2167 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2168 emit_int8((unsigned char)0x90);
2169 emit_operand((Register)dst, src);
2170 }
2171
2172 void Assembler::kmovql(Address dst, KRegister src) {
2173 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2174 InstructionMark im(this);
2175 InstructionAttr attributes(AVX_128bit, /* vex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2176 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2177 emit_int8((unsigned char)0x90);
2178 emit_operand((Register)src, dst);
2179 }
2180
2181 void Assembler::kmovql(KRegister dst, Register src) {
2182 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2183 VexSimdPrefix pre = !_legacy_mode_bw ? VEX_SIMD_F2 : VEX_SIMD_NONE;
2184 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_bw, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2185 int encode = kreg_prefix_and_encode(dst, knoreg, src, pre, VEX_OPCODE_0F, &attributes);
2186 emit_int8((unsigned char)0x92);
2187 emit_int8((unsigned char)(0xC0 | encode));
2188 }
2189
2190 void Assembler::kmovdl(KRegister dst, Register src) {
2191 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2192 VexSimdPrefix pre = !_legacy_mode_bw ? VEX_SIMD_F2 : VEX_SIMD_NONE;
2193 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2194 int encode = kreg_prefix_and_encode(dst, knoreg, src, pre, VEX_OPCODE_0F, &attributes);
2195 emit_int8((unsigned char)0x92);
2196 emit_int8((unsigned char)(0xC0 | encode));
2197 }
2198
2199 void Assembler::kmovwl(KRegister dst, Register src) {
2200 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2201 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2202 int encode = kreg_prefix_and_encode(dst, knoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2203 emit_int8((unsigned char)0x92);
2204 emit_int8((unsigned char)(0xC0 | encode));
2205 }
2206
2207 void Assembler::movb(Address dst, int imm8) {
2208 InstructionMark im(this);
2209 prefix(dst);
2210 emit_int8((unsigned char)0xC6);
2211 emit_operand(rax, dst, 1);
2212 emit_int8(imm8);
2213 }
2214
2215
2216 void Assembler::movb(Address dst, Register src) {
2217 assert(src->has_byte_register(), "must have byte register");
2218 InstructionMark im(this);
2219 prefix(dst, src, true);
2220 emit_int8((unsigned char)0x88);
2221 emit_operand(src, dst);
2222 }
2223
2320 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
2321 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2322 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2323 emit_int8(0x6F);
2324 emit_operand(dst, src);
2325 }
2326
2327 void Assembler::vmovdqu(Address dst, XMMRegister src) {
2328 assert(UseAVX > 0, "");
2329 InstructionMark im(this);
2330 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2331 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2332 // swap src<->dst for encoding
2333 assert(src != xnoreg, "sanity");
2334 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2335 emit_int8(0x7F);
2336 emit_operand(src, dst);
2337 }
2338
2339 // Move Unaligned EVEX enabled Vector (programmable : 8,16,32,64)
2340 void Assembler::evmovdqul(XMMRegister dst, XMMRegister src, int vector_len) {
2341 assert(VM_Version::supports_evex(), "");
2342 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2343 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2344 emit_int8(0x6F);
2345 emit_int8((unsigned char)(0xC0 | encode));
2346 }
2347
2348 void Assembler::evmovdqul(XMMRegister dst, Address src, int vector_len) {
2349 assert(VM_Version::supports_evex(), "");
2350 InstructionMark im(this);
2351 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
2352 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2353 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2354 emit_int8(0x6F);
2355 emit_operand(dst, src);
2356 }
2357
2358 void Assembler::evmovdqul(Address dst, XMMRegister src, int vector_len) {
2359 assert(VM_Version::supports_evex(), "");
3016
3017 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3018 assert(VM_Version::supports_sse4_2(), "");
3019 InstructionMark im(this);
3020 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3021 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3022 emit_int8(0x61);
3023 emit_operand(dst, src);
3024 emit_int8(imm8);
3025 }
3026
3027 void Assembler::pcmpestri(XMMRegister dst, XMMRegister src, int imm8) {
3028 assert(VM_Version::supports_sse4_2(), "");
3029 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3030 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3031 emit_int8(0x61);
3032 emit_int8((unsigned char)(0xC0 | encode));
3033 emit_int8(imm8);
3034 }
3035
3036 void Assembler::pcmpeqw(XMMRegister dst, XMMRegister src) {
3037 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3038 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3039 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3040 emit_int8(0x75);
3041 emit_int8((unsigned char)(0xC0 | encode));
3042 }
3043
3044 void Assembler::vpcmpeqw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3045 assert(VM_Version::supports_avx(), "");
3046 assert(!VM_Version::supports_evex(), "");
3047 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3048 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3049 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3050 emit_int8(0x75);
3051 emit_int8((unsigned char)(0xC0 | encode));
3052 }
3053
3054 void Assembler::pmovmskb(Register dst, XMMRegister src) {
3055 assert(VM_Version::supports_sse2(), "");
3056 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3057 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3058 emit_int8((unsigned char)0xD7);
3059 emit_int8((unsigned char)(0xC0 | encode));
3060 }
3061
3062 void Assembler::vpmovmskb(Register dst, XMMRegister src) {
3063 assert(VM_Version::supports_avx2(), "");
3064 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3065 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3066 emit_int8((unsigned char)0xD7);
3067 emit_int8((unsigned char)(0xC0 | encode));
3068 }
3069
3070 void Assembler::pextrd(Register dst, XMMRegister src, int imm8) {
3071 assert(VM_Version::supports_sse4_1(), "");
3072 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ true, /* uses_vl */ false);
3073 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3122 }
3123
3124 void Assembler::pmovzxbw(XMMRegister dst, Address src) {
3125 assert(VM_Version::supports_sse4_1(), "");
3126 InstructionMark im(this);
3127 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3128 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3129 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3130 emit_int8(0x30);
3131 emit_operand(dst, src);
3132 }
3133
3134 void Assembler::pmovzxbw(XMMRegister dst, XMMRegister src) {
3135 assert(VM_Version::supports_sse4_1(), "");
3136 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3137 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3138 emit_int8(0x30);
3139 emit_int8((unsigned char)(0xC0 | encode));
3140 }
3141
3142 void Assembler::vpmovzxbw(XMMRegister dst, Address src) {
3143 assert(VM_Version::supports_avx(), "");
3144 InstructionMark im(this);
3145 assert(dst != xnoreg, "sanity");
3146 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3147 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3148 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3149 emit_int8(0x30);
3150 emit_operand(dst, src);
3151 }
3152
3153 // generic
3154 void Assembler::pop(Register dst) {
3155 int encode = prefix_and_encode(dst->encoding());
3156 emit_int8(0x58 | encode);
3157 }
3158
3159 void Assembler::popcntl(Register dst, Address src) {
3160 assert(VM_Version::supports_popcnt(), "must support");
3161 InstructionMark im(this);
3162 emit_int8((unsigned char)0xF3);
3163 prefix(src, dst);
3164 emit_int8(0x0F);
3165 emit_int8((unsigned char)0xB8);
3166 emit_operand(dst, src);
|
2135 emit_int8((unsigned char)(0xC0 | encode));
2136 }
2137
2138 void Assembler::movb(Register dst, Address src) {
2139 NOT_LP64(assert(dst->has_byte_register(), "must have byte register"));
2140 InstructionMark im(this);
2141 prefix(src, dst, true);
2142 emit_int8((unsigned char)0x8A);
2143 emit_operand(dst, src);
2144 }
2145
2146 void Assembler::movddup(XMMRegister dst, XMMRegister src) {
2147 NOT_LP64(assert(VM_Version::supports_sse3(), ""));
2148 int vector_len = VM_Version::supports_evex() ? AVX_512bit : AVX_128bit;
2149 InstructionAttr attributes(vector_len, /* rex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2150 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2151 emit_int8(0x12);
2152 emit_int8(0xC0 | encode);
2153 }
2154
2155 void Assembler::kmovwl(KRegister dst, Register src) {
2156 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2157 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2158 int encode = kreg_prefix_and_encode(dst, knoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2159 emit_int8((unsigned char)0x92);
2160 emit_int8((unsigned char)(0xC0 | encode));
2161 }
2162
2163 void Assembler::kmovdl(KRegister dst, Register src) {
2164 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2165 VexSimdPrefix pre = !_legacy_mode_bw ? VEX_SIMD_F2 : VEX_SIMD_NONE;
2166 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2167 int encode = kreg_prefix_and_encode(dst, knoreg, src, pre, VEX_OPCODE_0F, &attributes);
2168 emit_int8((unsigned char)0x92);
2169 emit_int8((unsigned char)(0xC0 | encode));
2170 }
2171
2172 void Assembler::kmovql(KRegister dst, KRegister src) {
2173 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2174 InstructionAttr attributes(AVX_128bit, /* rex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2175 int encode = kreg_prefix_and_encode(dst, knoreg, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2176 emit_int8((unsigned char)0x90);
2177 emit_int8((unsigned char)(0xC0 | encode));
2178 }
2179
2180 void Assembler::kmovql(KRegister dst, Address src) {
2181 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2182 InstructionMark im(this);
2183 InstructionAttr attributes(AVX_128bit, /* vex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2184 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2185 emit_int8((unsigned char)0x90);
2186 emit_operand((Register)dst, src);
2187 }
2188
2189 void Assembler::kmovql(Address dst, KRegister src) {
2190 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2191 InstructionMark im(this);
2192 InstructionAttr attributes(AVX_128bit, /* vex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2193 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2194 emit_int8((unsigned char)0x90);
2195 emit_operand((Register)src, dst);
2196 }
2197
2198 void Assembler::kmovql(KRegister dst, Register src) {
2199 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2200 VexSimdPrefix pre = !_legacy_mode_bw ? VEX_SIMD_F2 : VEX_SIMD_NONE;
2201 InstructionAttr attributes(AVX_128bit, /* rex_w */ !_legacy_mode_bw, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2202 int encode = kreg_prefix_and_encode(dst, knoreg, src, pre, VEX_OPCODE_0F, &attributes);
2203 emit_int8((unsigned char)0x92);
2204 emit_int8((unsigned char)(0xC0 | encode));
2205 }
2206
2207 // This instruction produces ZF or CF flags
2208 void Assembler::kortestbl(KRegister src1, KRegister src2) {
2209 NOT_LP64(assert(VM_Version::supports_avx512dq(), ""));
2210 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2211 int encode = kreg_prefix_and_encode(src1, knoreg, src2, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2212 emit_int8((unsigned char)0x98);
2213 emit_int8((unsigned char)(0xC0 | encode));
2214 }
2215
2216 // This instruction produces ZF or CF flags
2217 void Assembler::kortestwl(KRegister src1, KRegister src2) {
2218 NOT_LP64(assert(VM_Version::supports_evex(), ""));
2219 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2220 int encode = kreg_prefix_and_encode(src1, knoreg, src2, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2221 emit_int8((unsigned char)0x98);
2222 emit_int8((unsigned char)(0xC0 | encode));
2223 }
2224
2225 // This instruction produces ZF or CF flags
2226 void Assembler::kortestdl(KRegister src1, KRegister src2) {
2227 NOT_LP64(assert(VM_Version::supports_avx512bw(), ""));
2228 InstructionAttr attributes(AVX_128bit, /* rex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2229 int encode = kreg_prefix_and_encode(src1, knoreg, src2, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
2230 emit_int8((unsigned char)0x98);
2231 emit_int8((unsigned char)(0xC0 | encode));
2232 }
2233
2234 // This instruction produces ZF or CF flags
2235 void Assembler::kortestql(KRegister src1, KRegister src2) {
2236 NOT_LP64(assert(VM_Version::supports_avx512bw(), ""));
2237 InstructionAttr attributes(AVX_128bit, /* rex_w */ true, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ false);
2238 int encode = kreg_prefix_and_encode(src1, knoreg, src2, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2239 emit_int8((unsigned char)0x98);
2240 emit_int8((unsigned char)(0xC0 | encode));
2241 }
2242
2243 void Assembler::movb(Address dst, int imm8) {
2244 InstructionMark im(this);
2245 prefix(dst);
2246 emit_int8((unsigned char)0xC6);
2247 emit_operand(rax, dst, 1);
2248 emit_int8(imm8);
2249 }
2250
2251
2252 void Assembler::movb(Address dst, Register src) {
2253 assert(src->has_byte_register(), "must have byte register");
2254 InstructionMark im(this);
2255 prefix(dst, src, true);
2256 emit_int8((unsigned char)0x88);
2257 emit_operand(src, dst);
2258 }
2259
2356 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
2357 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2358 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2359 emit_int8(0x6F);
2360 emit_operand(dst, src);
2361 }
2362
2363 void Assembler::vmovdqu(Address dst, XMMRegister src) {
2364 assert(UseAVX > 0, "");
2365 InstructionMark im(this);
2366 InstructionAttr attributes(AVX_256bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2367 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2368 // swap src<->dst for encoding
2369 assert(src != xnoreg, "sanity");
2370 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2371 emit_int8(0x7F);
2372 emit_operand(src, dst);
2373 }
2374
2375 // Move Unaligned EVEX enabled Vector (programmable : 8,16,32,64)
2376 void Assembler::evmovdqub(XMMRegister dst, XMMRegister src, int vector_len) {
2377 assert(VM_Version::supports_evex(), "");
2378 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ true);
2379 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2380 emit_int8(0x6F);
2381 emit_int8((unsigned char)(0xC0 | encode));
2382 }
2383
2384 void Assembler::evmovdqub(XMMRegister dst, Address src, int vector_len) {
2385 assert(VM_Version::supports_evex(), "");
2386 InstructionMark im(this);
2387 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2388 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2389 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2390 emit_int8(0x6F);
2391 emit_operand(dst, src);
2392 }
2393
2394 void Assembler::evmovdqub(Address dst, XMMRegister src, int vector_len) {
2395 assert(VM_Version::supports_evex(), "");
2396 assert(src != xnoreg, "sanity");
2397 InstructionMark im(this);
2398 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ true);
2399 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2400 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2401 emit_int8(0x7F);
2402 emit_operand(src, dst);
2403 }
2404
2405 void Assembler::evmovdquw(XMMRegister dst, XMMRegister src, int vector_len) {
2406 assert(VM_Version::supports_evex(), "");
2407 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ true);
2408 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2409 emit_int8(0x6F);
2410 emit_int8((unsigned char)(0xC0 | encode));
2411 }
2412
2413 void Assembler::evmovdquw(XMMRegister dst, Address src, int vector_len) {
2414 assert(VM_Version::supports_evex(), "");
2415 InstructionMark im(this);
2416 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true);
2417 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2418 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2419 emit_int8(0x6F);
2420 emit_operand(dst, src);
2421 }
2422
2423 void Assembler::evmovdquw(Address dst, XMMRegister src, int vector_len) {
2424 assert(VM_Version::supports_evex(), "");
2425 assert(src != xnoreg, "sanity");
2426 InstructionMark im(this);
2427 InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ true);
2428 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2429 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
2430 emit_int8(0x7F);
2431 emit_operand(src, dst);
2432 }
2433 void Assembler::evmovdqul(XMMRegister dst, XMMRegister src, int vector_len) {
2434 assert(VM_Version::supports_evex(), "");
2435 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
2436 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2437 emit_int8(0x6F);
2438 emit_int8((unsigned char)(0xC0 | encode));
2439 }
2440
2441 void Assembler::evmovdqul(XMMRegister dst, Address src, int vector_len) {
2442 assert(VM_Version::supports_evex(), "");
2443 InstructionMark im(this);
2444 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
2445 attributes.set_address_attributes(/* tuple_type */ EVEX_FVM, /* input_size_in_bits */ EVEX_NObit);
2446 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
2447 emit_int8(0x6F);
2448 emit_operand(dst, src);
2449 }
2450
2451 void Assembler::evmovdqul(Address dst, XMMRegister src, int vector_len) {
2452 assert(VM_Version::supports_evex(), "");
3109
3110 void Assembler::pcmpestri(XMMRegister dst, Address src, int imm8) {
3111 assert(VM_Version::supports_sse4_2(), "");
3112 InstructionMark im(this);
3113 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3114 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3115 emit_int8(0x61);
3116 emit_operand(dst, src);
3117 emit_int8(imm8);
3118 }
3119
3120 void Assembler::pcmpestri(XMMRegister dst, XMMRegister src, int imm8) {
3121 assert(VM_Version::supports_sse4_2(), "");
3122 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3123 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3124 emit_int8(0x61);
3125 emit_int8((unsigned char)(0xC0 | encode));
3126 emit_int8(imm8);
3127 }
3128
3129 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3130 void Assembler::pcmpeqb(XMMRegister dst, XMMRegister src) {
3131 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3132 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3133 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3134 emit_int8(0x74);
3135 emit_int8((unsigned char)(0xC0 | encode));
3136 }
3137
3138 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3139 void Assembler::vpcmpeqb(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3140 assert(VM_Version::supports_avx(), "");
3141 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3142 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3143 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3144 emit_int8(0x74);
3145 emit_int8((unsigned char)(0xC0 | encode));
3146 }
3147
3148 // In this context, kdst is written the mask used to process the equal components
3149 void Assembler::evpcmpeqb(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3150 assert(VM_Version::supports_avx512bw(), "");
3151 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3152 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3153 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3154 emit_int8(0x74);
3155 emit_int8((unsigned char)(0xC0 | encode));
3156 }
3157
3158 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3159 void Assembler::pcmpeqw(XMMRegister dst, XMMRegister src) {
3160 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3161 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3162 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3163 emit_int8(0x75);
3164 emit_int8((unsigned char)(0xC0 | encode));
3165 }
3166
3167 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3168 void Assembler::vpcmpeqw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3169 assert(VM_Version::supports_avx(), "");
3170 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3171 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3172 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3173 emit_int8(0x75);
3174 emit_int8((unsigned char)(0xC0 | encode));
3175 }
3176
3177 // In this context, kdst is written the mask used to process the equal components
3178 void Assembler::evpcmpeqw(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3179 assert(VM_Version::supports_avx512bw(), "");
3180 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3181 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3182 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3183 emit_int8(0x75);
3184 emit_int8((unsigned char)(0xC0 | encode));
3185 }
3186
3187 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3188 void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
3189 NOT_LP64(assert(VM_Version::supports_sse2(), ""));
3190 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3191 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3192 emit_int8(0x76);
3193 emit_int8((unsigned char)(0xC0 | encode));
3194 }
3195
3196 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3197 void Assembler::vpcmpeqd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3198 assert(VM_Version::supports_avx(), "");
3199 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3200 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3201 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3202 emit_int8(0x76);
3203 emit_int8((unsigned char)(0xC0 | encode));
3204 }
3205
3206 // In this context, kdst is written the mask used to process the equal components
3207 void Assembler::evpcmpeqd(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3208 assert(VM_Version::supports_evex(), "");
3209 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3210 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3211 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3212 emit_int8(0x76);
3213 emit_int8((unsigned char)(0xC0 | encode));
3214 }
3215
3216 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3217 void Assembler::pcmpeqq(XMMRegister dst, XMMRegister src) {
3218 NOT_LP64(assert(VM_Version::supports_sse4_1(), ""));
3219 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3220 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3221 emit_int8(0x29);
3222 emit_int8((unsigned char)(0xC0 | encode));
3223 }
3224
3225 // In this context, the dst vector contains the components that are equal, non equal components are zeroed in dst
3226 void Assembler::vpcmpeqq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
3227 assert(VM_Version::supports_avx(), "");
3228 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3229 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3230 int encode = vex_prefix_and_encode(dst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3231 emit_int8(0x29);
3232 emit_int8((unsigned char)(0xC0 | encode));
3233 }
3234
3235 // In this context, kdst is written the mask used to process the equal components
3236 void Assembler::evpcmpeqq(KRegister kdst, XMMRegister nds, XMMRegister src, int vector_len) {
3237 assert(VM_Version::supports_evex(), "");
3238 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3239 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3240 int encode = vex_prefix_and_encode(kdst->encoding(), nds_enc, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3241 emit_int8(0x29);
3242 emit_int8((unsigned char)(0xC0 | encode));
3243 }
3244
3245 // In this context, kdst is written the mask used to process the equal components
3246 void Assembler::evpcmpeqq(KRegister kdst, XMMRegister nds, Address src, int vector_len) {
3247 assert(VM_Version::supports_evex(), "");
3248 InstructionMark im(this);
3249 InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
3250 attributes.set_address_attributes(/* tuple_type */ EVEX_FV, /* input_size_in_bits */ EVEX_64bit);
3251 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
3252 int dst_enc = kdst->encoding();
3253 vex_prefix(src, nds_enc, dst_enc, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3254 emit_int8(0x29);
3255 emit_operand(as_Register(dst_enc), src);
3256 }
3257
3258 void Assembler::pmovmskb(Register dst, XMMRegister src) {
3259 assert(VM_Version::supports_sse2(), "");
3260 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3261 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3262 emit_int8((unsigned char)0xD7);
3263 emit_int8((unsigned char)(0xC0 | encode));
3264 }
3265
3266 void Assembler::vpmovmskb(Register dst, XMMRegister src) {
3267 assert(VM_Version::supports_avx2(), "");
3268 InstructionAttr attributes(AVX_256bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
3269 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
3270 emit_int8((unsigned char)0xD7);
3271 emit_int8((unsigned char)(0xC0 | encode));
3272 }
3273
3274 void Assembler::pextrd(Register dst, XMMRegister src, int imm8) {
3275 assert(VM_Version::supports_sse4_1(), "");
3276 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_dq, /* no_mask_reg */ true, /* uses_vl */ false);
3277 int encode = simd_prefix_and_encode(as_XMMRegister(dst->encoding()), xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
3326 }
3327
3328 void Assembler::pmovzxbw(XMMRegister dst, Address src) {
3329 assert(VM_Version::supports_sse4_1(), "");
3330 InstructionMark im(this);
3331 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3332 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3333 simd_prefix(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3334 emit_int8(0x30);
3335 emit_operand(dst, src);
3336 }
3337
3338 void Assembler::pmovzxbw(XMMRegister dst, XMMRegister src) {
3339 assert(VM_Version::supports_sse4_1(), "");
3340 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3341 int encode = simd_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3342 emit_int8(0x30);
3343 emit_int8((unsigned char)(0xC0 | encode));
3344 }
3345
3346 void Assembler::vpmovzxbw(XMMRegister dst, Address src, int vector_len) {
3347 assert(VM_Version::supports_avx(), "");
3348 InstructionMark im(this);
3349 assert(dst != xnoreg, "sanity");
3350 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ false, /* uses_vl */ false);
3351 attributes.set_address_attributes(/* tuple_type */ EVEX_HVM, /* input_size_in_bits */ EVEX_NObit);
3352 vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
3353 emit_int8(0x30);
3354 emit_operand(dst, src);
3355 }
3356
3357 // generic
3358 void Assembler::pop(Register dst) {
3359 int encode = prefix_and_encode(dst->encoding());
3360 emit_int8(0x58 | encode);
3361 }
3362
3363 void Assembler::popcntl(Register dst, Address src) {
3364 assert(VM_Version::supports_popcnt(), "must support");
3365 InstructionMark im(this);
3366 emit_int8((unsigned char)0xF3);
3367 prefix(src, dst);
3368 emit_int8(0x0F);
3369 emit_int8((unsigned char)0xB8);
3370 emit_operand(dst, src);
|