3949 assert(src != xnoreg, "sanity"); 3950 InstructionMark im(this); 3951 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true); 3952 attributes.set_address_attributes(/* tuple_type */ EVEX_QVM, /* input_size_in_bits */ EVEX_NObit); 3953 attributes.set_is_evex_instruction(); 3954 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes); 3955 emit_int8(0x31); 3956 emit_operand(src, dst); 3957 } 3958 3959 void Assembler::vpmovzxwd(XMMRegister dst, XMMRegister src, int vector_len) { 3960 assert(vector_len == AVX_128bit? VM_Version::supports_avx() : 3961 vector_len == AVX_256bit? VM_Version::supports_avx2() : 3962 vector_len == AVX_512bit? VM_Version::supports_evex() : 0, " "); 3963 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true); 3964 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes); 3965 emit_int8(0x33); 3966 emit_int8((unsigned char)(0xC0 | encode)); 3967 } 3968 3969 // generic 3970 void Assembler::pop(Register dst) { 3971 int encode = prefix_and_encode(dst->encoding()); 3972 emit_int8(0x58 | encode); 3973 } 3974 3975 void Assembler::popcntl(Register dst, Address src) { 3976 assert(VM_Version::supports_popcnt(), "must support"); 3977 InstructionMark im(this); 3978 emit_int8((unsigned char)0xF3); 3979 prefix(src, dst); 3980 emit_int8(0x0F); 3981 emit_int8((unsigned char)0xB8); 3982 emit_operand(dst, src); 3983 } 3984 3985 void Assembler::popcntl(Register dst, Register src) { 3986 assert(VM_Version::supports_popcnt(), "must support"); 3987 emit_int8((unsigned char)0xF3); 3988 int encode = prefix_and_encode(dst->encoding(), src->encoding()); | 3949 assert(src != xnoreg, "sanity"); 3950 InstructionMark im(this); 3951 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true); 3952 attributes.set_address_attributes(/* tuple_type */ EVEX_QVM, /* input_size_in_bits */ EVEX_NObit); 3953 attributes.set_is_evex_instruction(); 3954 vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes); 3955 emit_int8(0x31); 3956 emit_operand(src, dst); 3957 } 3958 3959 void Assembler::vpmovzxwd(XMMRegister dst, XMMRegister src, int vector_len) { 3960 assert(vector_len == AVX_128bit? VM_Version::supports_avx() : 3961 vector_len == AVX_256bit? VM_Version::supports_avx2() : 3962 vector_len == AVX_512bit? VM_Version::supports_evex() : 0, " "); 3963 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true); 3964 int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes); 3965 emit_int8(0x33); 3966 emit_int8((unsigned char)(0xC0 | encode)); 3967 } 3968 3969 void Assembler::pmaddwd(XMMRegister dst, XMMRegister src) { 3970 NOT_LP64(assert(VM_Version::supports_sse2(), "")); 3971 InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true); 3972 int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes); 3973 emit_int8((unsigned char)0xF5); 3974 emit_int8((unsigned char)(0xC0 | encode)); 3975 } 3976 3977 void Assembler::vpmaddwd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) { 3978 assert(vector_len == AVX_128bit ? VM_Version::supports_avx() : 3979 (vector_len == AVX_256bit ? VM_Version::supports_avx2() : 3980 (vector_len == AVX_512bit ? VM_Version::supports_evex() : 0)), ""); 3981 InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ true); 3982 int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes); 3983 emit_int8((unsigned char)0xF5); 3984 emit_int8((unsigned char)(0xC0 | encode)); 3985 } 3986 3987 void Assembler::evpdpwssd(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) { 3988 assert(VM_Version::supports_evex(), ""); 3989 assert(VM_Version::supports_vnni(), "must support vnni"); 3990 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true); 3991 attributes.set_is_evex_instruction(); 3992 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes); 3993 emit_int8(0x52); 3994 emit_int8((unsigned char)(0xC0 | encode)); 3995 } 3996 3997 // generic 3998 void Assembler::pop(Register dst) { 3999 int encode = prefix_and_encode(dst->encoding()); 4000 emit_int8(0x58 | encode); 4001 } 4002 4003 void Assembler::popcntl(Register dst, Address src) { 4004 assert(VM_Version::supports_popcnt(), "must support"); 4005 InstructionMark im(this); 4006 emit_int8((unsigned char)0xF3); 4007 prefix(src, dst); 4008 emit_int8(0x0F); 4009 emit_int8((unsigned char)0xB8); 4010 emit_operand(dst, src); 4011 } 4012 4013 void Assembler::popcntl(Register dst, Register src) { 4014 assert(VM_Version::supports_popcnt(), "must support"); 4015 emit_int8((unsigned char)0xF3); 4016 int encode = prefix_and_encode(dst->encoding(), src->encoding()); |