1 /*
2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
7748 vex_prefix(adr, nds_enc, xreg_enc, pre, opc, attributes);
7749 } else {
7750 assert((nds == xreg) || (nds == xnoreg), "wrong sse encoding");
7751 rex_prefix(adr, xreg, pre, opc, attributes->is_rex_vex_w());
7752 }
7753 }
7754
7755 int Assembler::simd_prefix_and_encode(XMMRegister dst, XMMRegister nds, XMMRegister src, VexSimdPrefix pre,
7756 VexOpcode opc, InstructionAttr *attributes) {
7757 int dst_enc = dst->encoding();
7758 int src_enc = src->encoding();
7759 if (UseAVX > 0) {
7760 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
7761 return vex_prefix_and_encode(dst_enc, nds_enc, src_enc, pre, opc, attributes);
7762 } else {
7763 assert((nds == dst) || (nds == src) || (nds == xnoreg), "wrong sse encoding");
7764 return rex_prefix_and_encode(dst_enc, src_enc, pre, opc, attributes->is_rex_vex_w());
7765 }
7766 }
7767
7768 void Assembler::cmppd(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len) {
7769 assert(VM_Version::supports_avx(), "");
7770 assert(!VM_Version::supports_evex(), "");
7771 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7772 int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
7773 emit_int8((unsigned char)0xC2);
7774 emit_int8((unsigned char)(0xC0 | encode));
7775 emit_int8((unsigned char)(0xF & cop));
7776 }
7777
7778 void Assembler::blendvpd(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len) {
7779 assert(VM_Version::supports_avx(), "");
7780 assert(!VM_Version::supports_evex(), "");
7781 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7782 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src1->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
7783 emit_int8((unsigned char)0x4B);
7784 emit_int8((unsigned char)(0xC0 | encode));
7785 int src2_enc = src2->encoding();
7786 emit_int8((unsigned char)(0xF0 & src2_enc<<4));
7787 }
7788
7789 void Assembler::cmpps(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len) {
7790 assert(VM_Version::supports_avx(), "");
7791 assert(!VM_Version::supports_evex(), "");
7792 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7793 int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
7794 emit_int8((unsigned char)0xC2);
7795 emit_int8((unsigned char)(0xC0 | encode));
7796 emit_int8((unsigned char)(0xF & cop));
7797 }
7798
7799 void Assembler::blendvps(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len) {
7800 assert(VM_Version::supports_avx(), "");
7801 assert(!VM_Version::supports_evex(), "");
7802 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7803 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src1->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
7804 emit_int8((unsigned char)0x4A);
7805 emit_int8((unsigned char)(0xC0 | encode));
7806 int src2_enc = src2->encoding();
7807 emit_int8((unsigned char)(0xF0 & src2_enc<<4));
7808 }
7809
7810 void Assembler::vpblendd(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8, int vector_len) {
7811 assert(VM_Version::supports_avx2(), "");
7812 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7813 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
7814 emit_int8((unsigned char)0x02);
7815 emit_int8((unsigned char)(0xC0 | encode));
7816 emit_int8((unsigned char)imm8);
7817 }
7818
7819 void Assembler::shlxl(Register dst, Register src1, Register src2) {
7820 assert(VM_Version::supports_bmi2(), "");
7821 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
|
1 /*
2 * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
7748 vex_prefix(adr, nds_enc, xreg_enc, pre, opc, attributes);
7749 } else {
7750 assert((nds == xreg) || (nds == xnoreg), "wrong sse encoding");
7751 rex_prefix(adr, xreg, pre, opc, attributes->is_rex_vex_w());
7752 }
7753 }
7754
7755 int Assembler::simd_prefix_and_encode(XMMRegister dst, XMMRegister nds, XMMRegister src, VexSimdPrefix pre,
7756 VexOpcode opc, InstructionAttr *attributes) {
7757 int dst_enc = dst->encoding();
7758 int src_enc = src->encoding();
7759 if (UseAVX > 0) {
7760 int nds_enc = nds->is_valid() ? nds->encoding() : 0;
7761 return vex_prefix_and_encode(dst_enc, nds_enc, src_enc, pre, opc, attributes);
7762 } else {
7763 assert((nds == dst) || (nds == src) || (nds == xnoreg), "wrong sse encoding");
7764 return rex_prefix_and_encode(dst_enc, src_enc, pre, opc, attributes->is_rex_vex_w());
7765 }
7766 }
7767
7768 void Assembler::vmaxss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
7769 assert(VM_Version::supports_avx(), "");
7770 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
7771 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
7772 emit_int8(0x5F);
7773 emit_int8((unsigned char)(0xC0 | encode));
7774 }
7775
7776 void Assembler::vmaxsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
7777 assert(VM_Version::supports_avx(), "");
7778 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
7779 attributes.set_rex_vex_w_reverted();
7780 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
7781 emit_int8(0x5F);
7782 emit_int8((unsigned char)(0xC0 | encode));
7783 }
7784
7785 void Assembler::vminss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
7786 assert(VM_Version::supports_avx(), "");
7787 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
7788 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
7789 emit_int8(0x5D);
7790 emit_int8((unsigned char)(0xC0 | encode));
7791 }
7792
7793 void Assembler::vminsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
7794 assert(VM_Version::supports_avx(), "");
7795 InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
7796 attributes.set_rex_vex_w_reverted();
7797 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_F2, VEX_OPCODE_0F, &attributes);
7798 emit_int8(0x5D);
7799 emit_int8((unsigned char)(0xC0 | encode));
7800 }
7801
7802 void Assembler::cmppd(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len) {
7803 assert(VM_Version::supports_avx(), "");
7804 assert(vector_len <= AVX_256bit, "");
7805 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7806 int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
7807 emit_int8((unsigned char)0xC2);
7808 emit_int8((unsigned char)(0xC0 | encode));
7809 emit_int8((unsigned char)(0xF & cop));
7810 }
7811
7812 void Assembler::blendvpd(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len) {
7813 assert(VM_Version::supports_avx(), "");
7814 assert(vector_len <= AVX_256bit, "");
7815 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7816 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src1->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
7817 emit_int8((unsigned char)0x4B);
7818 emit_int8((unsigned char)(0xC0 | encode));
7819 int src2_enc = src2->encoding();
7820 emit_int8((unsigned char)(0xF0 & src2_enc<<4));
7821 }
7822
7823 void Assembler::cmpps(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len) {
7824 assert(VM_Version::supports_avx(), "");
7825 assert(vector_len <= AVX_256bit, "");
7826 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7827 int encode = simd_prefix_and_encode(dst, nds, src, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
7828 emit_int8((unsigned char)0xC2);
7829 emit_int8((unsigned char)(0xC0 | encode));
7830 emit_int8((unsigned char)(0xF & cop));
7831 }
7832
7833 void Assembler::blendvps(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len) {
7834 assert(VM_Version::supports_avx(), "");
7835 assert(vector_len <= AVX_256bit, "");
7836 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7837 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src1->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
7838 emit_int8((unsigned char)0x4A);
7839 emit_int8((unsigned char)(0xC0 | encode));
7840 int src2_enc = src2->encoding();
7841 emit_int8((unsigned char)(0xF0 & src2_enc<<4));
7842 }
7843
7844 void Assembler::vpblendd(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8, int vector_len) {
7845 assert(VM_Version::supports_avx2(), "");
7846 InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
7847 int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
7848 emit_int8((unsigned char)0x02);
7849 emit_int8((unsigned char)(0xC0 | encode));
7850 emit_int8((unsigned char)imm8);
7851 }
7852
7853 void Assembler::shlxl(Register dst, Register src1, Register src2) {
7854 assert(VM_Version::supports_bmi2(), "");
7855 InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ true, /* uses_vl */ true);
|