src/cpu/x86/vm/assembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7119644 Sdiff src/cpu/x86/vm

src/cpu/x86/vm/assembler_x86.cpp

Print this page




1620 }
1621 
1622 void Assembler::mov(Register dst, Register src) {
1623   LP64_ONLY(movq(dst, src)) NOT_LP64(movl(dst, src));
1624 }
1625 
1626 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
1627   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1628   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_66);
1629   emit_byte(0x28);
1630   emit_byte(0xC0 | encode);
1631 }
1632 
1633 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
1634   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1635   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_NONE);
1636   emit_byte(0x28);
1637   emit_byte(0xC0 | encode);
1638 }
1639 







1640 void Assembler::movb(Register dst, Address src) {
1641   NOT_LP64(assert(dst->has_byte_register(), "must have byte register"));
1642   InstructionMark im(this);
1643   prefix(src, dst, true);
1644   emit_byte(0x8A);
1645   emit_operand(dst, src);
1646 }
1647 
1648 
1649 void Assembler::movb(Address dst, int imm8) {
1650   InstructionMark im(this);
1651    prefix(dst);
1652   emit_byte(0xC6);
1653   emit_operand(rax, dst, 1);
1654   emit_byte(imm8);
1655 }
1656 
1657 
1658 void Assembler::movb(Address dst, Register src) {
1659   assert(src->has_byte_register(), "must have byte register");


1669   emit_byte(0x6E);
1670   emit_byte(0xC0 | encode);
1671 }
1672 
1673 void Assembler::movdl(Register dst, XMMRegister src) {
1674   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1675   // swap src/dst to get correct prefix
1676   int encode = simd_prefix_and_encode(src, dst, VEX_SIMD_66);
1677   emit_byte(0x7E);
1678   emit_byte(0xC0 | encode);
1679 }
1680 
1681 void Assembler::movdl(XMMRegister dst, Address src) {
1682   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1683   InstructionMark im(this);
1684   simd_prefix(dst, src, VEX_SIMD_66);
1685   emit_byte(0x6E);
1686   emit_operand(dst, src);
1687 }
1688 








1689 void Assembler::movdqa(XMMRegister dst, XMMRegister src) {
1690   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1691   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_66);
1692   emit_byte(0x6F);
1693   emit_byte(0xC0 | encode);
1694 }
1695 
1696 void Assembler::movdqu(XMMRegister dst, Address src) {
1697   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1698   InstructionMark im(this);
1699   simd_prefix(dst, src, VEX_SIMD_F3);
1700   emit_byte(0x6F);
1701   emit_operand(dst, src);
1702 }
1703 
1704 void Assembler::movdqu(XMMRegister dst, XMMRegister src) {
1705   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1706   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_F3);
1707   emit_byte(0x6F);
1708   emit_byte(0xC0 | encode);
1709 }
1710 
1711 void Assembler::movdqu(Address dst, XMMRegister src) {
1712   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1713   InstructionMark im(this);
1714   simd_prefix(dst, src, VEX_SIMD_F3);
1715   emit_byte(0x7F);
1716   emit_operand(src, dst);
1717 }
1718 





























1719 // Uses zero extension on 64bit
1720 
1721 void Assembler::movl(Register dst, int32_t imm32) {
1722   int encode = prefix_and_encode(dst->encoding());
1723   emit_byte(0xB8 | encode);
1724   emit_long(imm32);
1725 }
1726 
1727 void Assembler::movl(Register dst, Register src) {
1728   int encode = prefix_and_encode(dst->encoding(), src->encoding());
1729   emit_byte(0x8B);
1730   emit_byte(0xC0 | encode);
1731 }
1732 
1733 void Assembler::movl(Register dst, Address src) {
1734   InstructionMark im(this);
1735   prefix(src, dst);
1736   emit_byte(0x8B);
1737   emit_operand(dst, src);
1738 }


3095   vex_prefix(dst, nds, src, VEX_SIMD_F3);
3096   emit_byte(0x5C);
3097   emit_operand(dst, src);
3098 }
3099 
3100 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3101   assert(VM_Version::supports_avx(), "");
3102   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F3);
3103   emit_byte(0x5C);
3104   emit_byte(0xC0 | encode);
3105 }
3106 
3107 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, Address src) {
3108   assert(VM_Version::supports_avx(), "");
3109   InstructionMark im(this);
3110   vex_prefix(dst, nds, src, VEX_SIMD_66); // 128-bit vector
3111   emit_byte(0x57);
3112   emit_operand(dst, src);
3113 }
3114 







3115 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, Address src) {
3116   assert(VM_Version::supports_avx(), "");
3117   InstructionMark im(this);
3118   vex_prefix(dst, nds, src, VEX_SIMD_NONE); // 128-bit vector
3119   emit_byte(0x57);
3120   emit_operand(dst, src);
3121 }
3122 
























3123 
3124 #ifndef _LP64
3125 // 32bit only pieces of the assembler
3126 
3127 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
3128   // NO PREFIX AS NEVER 64BIT
3129   InstructionMark im(this);
3130   emit_byte(0x81);
3131   emit_byte(0xF8 | src1->encoding());
3132   emit_data(imm32, rspec, 0);
3133 }
3134 
3135 void Assembler::cmp_literal32(Address src1, int32_t imm32, RelocationHolder const& rspec) {
3136   // NO PREFIX AS NEVER 64BIT (not even 32bit versions of 64bit regs
3137   InstructionMark im(this);
3138   emit_byte(0x81);
3139   emit_operand(rdi, src1);
3140   emit_data(imm32, rspec, 0);
3141 }
3142 




1620 }
1621 
1622 void Assembler::mov(Register dst, Register src) {
1623   LP64_ONLY(movq(dst, src)) NOT_LP64(movl(dst, src));
1624 }
1625 
1626 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
1627   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1628   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_66);
1629   emit_byte(0x28);
1630   emit_byte(0xC0 | encode);
1631 }
1632 
1633 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
1634   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1635   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_NONE);
1636   emit_byte(0x28);
1637   emit_byte(0xC0 | encode);
1638 }
1639 
1640 void Assembler::movlhps(XMMRegister dst, XMMRegister src) {
1641   NOT_LP64(assert(VM_Version::supports_sse(), ""));
1642   int encode = simd_prefix_and_encode(dst, src, src, VEX_SIMD_NONE);
1643   emit_byte(0x16);
1644   emit_byte(0xC0 | encode);
1645 }
1646 
1647 void Assembler::movb(Register dst, Address src) {
1648   NOT_LP64(assert(dst->has_byte_register(), "must have byte register"));
1649   InstructionMark im(this);
1650   prefix(src, dst, true);
1651   emit_byte(0x8A);
1652   emit_operand(dst, src);
1653 }
1654 
1655 
1656 void Assembler::movb(Address dst, int imm8) {
1657   InstructionMark im(this);
1658    prefix(dst);
1659   emit_byte(0xC6);
1660   emit_operand(rax, dst, 1);
1661   emit_byte(imm8);
1662 }
1663 
1664 
1665 void Assembler::movb(Address dst, Register src) {
1666   assert(src->has_byte_register(), "must have byte register");


1676   emit_byte(0x6E);
1677   emit_byte(0xC0 | encode);
1678 }
1679 
1680 void Assembler::movdl(Register dst, XMMRegister src) {
1681   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1682   // swap src/dst to get correct prefix
1683   int encode = simd_prefix_and_encode(src, dst, VEX_SIMD_66);
1684   emit_byte(0x7E);
1685   emit_byte(0xC0 | encode);
1686 }
1687 
1688 void Assembler::movdl(XMMRegister dst, Address src) {
1689   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1690   InstructionMark im(this);
1691   simd_prefix(dst, src, VEX_SIMD_66);
1692   emit_byte(0x6E);
1693   emit_operand(dst, src);
1694 }
1695 
1696 void Assembler::movdl(Address dst, XMMRegister src) {
1697   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1698   InstructionMark im(this);
1699   simd_prefix(dst, src, VEX_SIMD_66);
1700   emit_byte(0x7E);
1701   emit_operand(src, dst);
1702 }
1703 
1704 void Assembler::movdqa(XMMRegister dst, XMMRegister src) {
1705   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1706   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_66);
1707   emit_byte(0x6F);
1708   emit_byte(0xC0 | encode);
1709 }
1710 
1711 void Assembler::movdqu(XMMRegister dst, Address src) {
1712   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1713   InstructionMark im(this);
1714   simd_prefix(dst, src, VEX_SIMD_F3);
1715   emit_byte(0x6F);
1716   emit_operand(dst, src);
1717 }
1718 
1719 void Assembler::movdqu(XMMRegister dst, XMMRegister src) {
1720   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1721   int encode = simd_prefix_and_encode(dst, src, VEX_SIMD_F3);
1722   emit_byte(0x6F);
1723   emit_byte(0xC0 | encode);
1724 }
1725 
1726 void Assembler::movdqu(Address dst, XMMRegister src) {
1727   NOT_LP64(assert(VM_Version::supports_sse2(), ""));
1728   InstructionMark im(this);
1729   simd_prefix(dst, src, VEX_SIMD_F3);
1730   emit_byte(0x7F);
1731   emit_operand(src, dst);
1732 }
1733 
1734 // Move Unaligned 256bit Vector
1735 void Assembler::vmovdqu(XMMRegister dst, XMMRegister src) {
1736   assert(UseAVX, "");
1737   bool vector256 = true;
1738   int encode = vex_prefix_and_encode(dst, xnoreg, src, VEX_SIMD_F3, vector256);
1739   emit_byte(0x6F);
1740   emit_byte(0xC0 | encode);
1741 }
1742 
1743 void Assembler::vmovdqu(XMMRegister dst, Address src) {
1744   assert(UseAVX, "");
1745   InstructionMark im(this);
1746   bool vector256 = true;
1747   vex_prefix(dst, xnoreg, src, VEX_SIMD_F3, vector256);
1748   emit_byte(0x6F);
1749   emit_operand(dst, src);
1750 }
1751 
1752 void Assembler::vmovdqu(Address dst, XMMRegister src) {
1753   assert(UseAVX, "");
1754   InstructionMark im(this);
1755   bool vector256 = true;
1756   // swap src<->dst for encoding
1757   assert(src != xnoreg, "sanity");
1758   vex_prefix(src, xnoreg, dst, VEX_SIMD_F3, vector256);
1759   emit_byte(0x7F);
1760   emit_operand(src, dst);
1761 }
1762 
1763 // Uses zero extension on 64bit
1764 
1765 void Assembler::movl(Register dst, int32_t imm32) {
1766   int encode = prefix_and_encode(dst->encoding());
1767   emit_byte(0xB8 | encode);
1768   emit_long(imm32);
1769 }
1770 
1771 void Assembler::movl(Register dst, Register src) {
1772   int encode = prefix_and_encode(dst->encoding(), src->encoding());
1773   emit_byte(0x8B);
1774   emit_byte(0xC0 | encode);
1775 }
1776 
1777 void Assembler::movl(Register dst, Address src) {
1778   InstructionMark im(this);
1779   prefix(src, dst);
1780   emit_byte(0x8B);
1781   emit_operand(dst, src);
1782 }


3139   vex_prefix(dst, nds, src, VEX_SIMD_F3);
3140   emit_byte(0x5C);
3141   emit_operand(dst, src);
3142 }
3143 
3144 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3145   assert(VM_Version::supports_avx(), "");
3146   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F3);
3147   emit_byte(0x5C);
3148   emit_byte(0xC0 | encode);
3149 }
3150 
3151 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, Address src) {
3152   assert(VM_Version::supports_avx(), "");
3153   InstructionMark im(this);
3154   vex_prefix(dst, nds, src, VEX_SIMD_66); // 128-bit vector
3155   emit_byte(0x57);
3156   emit_operand(dst, src);
3157 }
3158 
3159 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, XMMRegister src, bool vector256) {
3160   assert(VM_Version::supports_avx(), "");
3161   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_66, vector256);
3162   emit_byte(0x57);
3163   emit_byte(0xC0 | encode);
3164 }
3165 
3166 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, Address src) {
3167   assert(VM_Version::supports_avx(), "");
3168   InstructionMark im(this);
3169   vex_prefix(dst, nds, src, VEX_SIMD_NONE); // 128-bit vector
3170   emit_byte(0x57);
3171   emit_operand(dst, src);
3172 }
3173 
3174 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, XMMRegister src, bool vector256) {
3175   assert(VM_Version::supports_avx(), "");
3176   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_NONE, vector256);
3177   emit_byte(0x57);
3178   emit_byte(0xC0 | encode);
3179 }
3180 
3181 void Assembler::vinsertf128h(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3182   assert(VM_Version::supports_avx(), "");
3183   bool vector256 = true;
3184   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_66, vector256, VEX_OPCODE_0F_3A);
3185   emit_byte(0x18);
3186   emit_byte(0xC0 | encode);
3187   // 0x00 - insert into lower 128 bits
3188   // 0x01 - insert into upper 128 bits
3189   emit_byte(0x01);
3190 }
3191 
3192 void Assembler::vzeroupper() {
3193   assert(VM_Version::supports_avx(), "");
3194   (void)vex_prefix_and_encode(xmm0, xmm0, xmm0, VEX_SIMD_NONE);
3195   emit_byte(0x77);
3196 }
3197 
3198 
3199 #ifndef _LP64
3200 // 32bit only pieces of the assembler
3201 
3202 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
3203   // NO PREFIX AS NEVER 64BIT
3204   InstructionMark im(this);
3205   emit_byte(0x81);
3206   emit_byte(0xF8 | src1->encoding());
3207   emit_data(imm32, rspec, 0);
3208 }
3209 
3210 void Assembler::cmp_literal32(Address src1, int32_t imm32, RelocationHolder const& rspec) {
3211   // NO PREFIX AS NEVER 64BIT (not even 32bit versions of 64bit regs
3212   InstructionMark im(this);
3213   emit_byte(0x81);
3214   emit_operand(rdi, src1);
3215   emit_data(imm32, rspec, 0);
3216 }
3217 


src/cpu/x86/vm/assembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File