< prev index next >

src/cpu/x86/vm/assembler_x86.cpp

Print this page




2086 #ifdef ASSERT
2087     intptr_t dist = (intptr_t)entry - ((intptr_t)pc() + short_size);
2088     intptr_t delta = short_branch_delta();
2089     if (delta != 0) {
2090       dist += (dist < 0 ? (-delta) :delta);
2091     }
2092     assert(is8bit(dist), "Dispacement too large for a short jmp");
2093 #endif
2094     intptr_t offs = entry - pc();
2095     emit_int8((unsigned char)0xEB);
2096     emit_int8((offs - short_size) & 0xFF);
2097   } else {
2098     InstructionMark im(this);
2099     L.add_patch_at(code(), locator());
2100     emit_int8((unsigned char)0xEB);
2101     emit_int8(0);
2102   }
2103 }
2104 
2105 void Assembler::ldmxcsr( Address src) {







2106   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2107   InstructionMark im(this);
2108   prefix(src);
2109   emit_int8(0x0F);
2110   emit_int8((unsigned char)0xAE);
2111   emit_operand(as_Register(2), src);

2112 }
2113 
2114 void Assembler::leal(Register dst, Address src) {
2115   InstructionMark im(this);
2116 #ifdef _LP64
2117   emit_int8(0x67); // addr32
2118   prefix(src, dst);
2119 #endif // LP64
2120   emit_int8((unsigned char)0x8D);
2121   emit_operand(dst, src);
2122 }
2123 
2124 void Assembler::lfence() {
2125   emit_int8(0x0F);
2126   emit_int8((unsigned char)0xAE);
2127   emit_int8((unsigned char)0xE8);
2128 }
2129 
2130 void Assembler::lock() {
2131   emit_int8((unsigned char)0xF0);


4399   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4400   emit_int8(0x51);
4401   emit_int8((unsigned char)(0xC0 | encode));
4402 }
4403 
4404 void Assembler::std() {
4405   emit_int8((unsigned char)0xFD);
4406 }
4407 
4408 void Assembler::sqrtss(XMMRegister dst, Address src) {
4409   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4410   InstructionMark im(this);
4411   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4412   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4413   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4414   emit_int8(0x51);
4415   emit_operand(dst, src);
4416 }
4417 
4418 void Assembler::stmxcsr( Address dst) {








4419   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4420   InstructionMark im(this);
4421   prefix(dst);
4422   emit_int8(0x0F);
4423   emit_int8((unsigned char)0xAE);
4424   emit_operand(as_Register(3), dst);

4425 }
4426 
4427 void Assembler::subl(Address dst, int32_t imm32) {
4428   InstructionMark im(this);
4429   prefix(dst);
4430   emit_arith_operand(0x81, rbp, dst, imm32);
4431 }
4432 
4433 void Assembler::subl(Address dst, Register src) {
4434   InstructionMark im(this);
4435   prefix(dst, src);
4436   emit_int8(0x29);
4437   emit_operand(src, dst);
4438 }
4439 
4440 void Assembler::subl(Register dst, int32_t imm32) {
4441   prefix(dst);
4442   emit_arith(0x81, 0xE8, dst, imm32);
4443 }
4444 


6603 void Assembler::pclmulqdq(XMMRegister dst, XMMRegister src, int mask) {
6604   assert(VM_Version::supports_clmul(), "");
6605   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6606   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6607   emit_int8(0x44);
6608   emit_int8((unsigned char)(0xC0 | encode));
6609   emit_int8((unsigned char)mask);
6610 }
6611 
6612 // Carry-Less Multiplication Quadword
6613 void Assembler::vpclmulqdq(XMMRegister dst, XMMRegister nds, XMMRegister src, int mask) {
6614   assert(VM_Version::supports_avx() && VM_Version::supports_clmul(), "");
6615   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6616   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6617   emit_int8(0x44);
6618   emit_int8((unsigned char)(0xC0 | encode));
6619   emit_int8((unsigned char)mask);
6620 }
6621 
6622 void Assembler::vzeroupper() {
6623   assert(VM_Version::supports_avx(), "");
6624   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6625   (void)vex_prefix_and_encode(0, 0, 0, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
6626   emit_int8(0x77);

6627 }
6628 
6629 #ifndef _LP64
6630 // 32bit only pieces of the assembler
6631 
6632 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
6633   // NO PREFIX AS NEVER 64BIT
6634   InstructionMark im(this);
6635   emit_int8((unsigned char)0x81);
6636   emit_int8((unsigned char)(0xF8 | src1->encoding()));
6637   emit_data(imm32, rspec, 0);
6638 }
6639 
6640 void Assembler::cmp_literal32(Address src1, int32_t imm32, RelocationHolder const& rspec) {
6641   // NO PREFIX AS NEVER 64BIT (not even 32bit versions of 64bit regs
6642   InstructionMark im(this);
6643   emit_int8((unsigned char)0x81);
6644   emit_operand(rdi, src1);
6645   emit_data(imm32, rspec, 0);
6646 }




2086 #ifdef ASSERT
2087     intptr_t dist = (intptr_t)entry - ((intptr_t)pc() + short_size);
2088     intptr_t delta = short_branch_delta();
2089     if (delta != 0) {
2090       dist += (dist < 0 ? (-delta) :delta);
2091     }
2092     assert(is8bit(dist), "Dispacement too large for a short jmp");
2093 #endif
2094     intptr_t offs = entry - pc();
2095     emit_int8((unsigned char)0xEB);
2096     emit_int8((offs - short_size) & 0xFF);
2097   } else {
2098     InstructionMark im(this);
2099     L.add_patch_at(code(), locator());
2100     emit_int8((unsigned char)0xEB);
2101     emit_int8(0);
2102   }
2103 }
2104 
2105 void Assembler::ldmxcsr( Address src) {
2106   if (UseAVX > 0 ) {
2107     InstructionMark im(this);
2108     InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
2109     vex_prefix(src, 0, 0, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
2110     emit_int8((unsigned char)0xAE);
2111     emit_operand(as_Register(2), src);
2112   } else {
2113     NOT_LP64(assert(VM_Version::supports_sse(), ""));
2114     InstructionMark im(this);
2115     prefix(src);
2116     emit_int8(0x0F);
2117     emit_int8((unsigned char)0xAE);
2118     emit_operand(as_Register(2), src);
2119   }
2120 }
2121 
2122 void Assembler::leal(Register dst, Address src) {
2123   InstructionMark im(this);
2124 #ifdef _LP64
2125   emit_int8(0x67); // addr32
2126   prefix(src, dst);
2127 #endif // LP64
2128   emit_int8((unsigned char)0x8D);
2129   emit_operand(dst, src);
2130 }
2131 
2132 void Assembler::lfence() {
2133   emit_int8(0x0F);
2134   emit_int8((unsigned char)0xAE);
2135   emit_int8((unsigned char)0xE8);
2136 }
2137 
2138 void Assembler::lock() {
2139   emit_int8((unsigned char)0xF0);


4407   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4408   emit_int8(0x51);
4409   emit_int8((unsigned char)(0xC0 | encode));
4410 }
4411 
4412 void Assembler::std() {
4413   emit_int8((unsigned char)0xFD);
4414 }
4415 
4416 void Assembler::sqrtss(XMMRegister dst, Address src) {
4417   NOT_LP64(assert(VM_Version::supports_sse(), ""));
4418   InstructionMark im(this);
4419   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ false);
4420   attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_32bit);
4421   simd_prefix(dst, dst, src, VEX_SIMD_F3, VEX_OPCODE_0F, &attributes);
4422   emit_int8(0x51);
4423   emit_operand(dst, src);
4424 }
4425 
4426 void Assembler::stmxcsr( Address dst) {
4427   if (UseAVX > 0 ) {
4428     assert(VM_Version::supports_avx(), "");
4429     InstructionMark im(this);
4430     InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
4431     vex_prefix(dst, 0, 0, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
4432     emit_int8((unsigned char)0xAE);
4433     emit_operand(as_Register(3), dst);
4434   } else {
4435     NOT_LP64(assert(VM_Version::supports_sse(), ""));
4436     InstructionMark im(this);
4437     prefix(dst);
4438     emit_int8(0x0F);
4439     emit_int8((unsigned char)0xAE);
4440     emit_operand(as_Register(3), dst);
4441   }
4442 }
4443 
4444 void Assembler::subl(Address dst, int32_t imm32) {
4445   InstructionMark im(this);
4446   prefix(dst);
4447   emit_arith_operand(0x81, rbp, dst, imm32);
4448 }
4449 
4450 void Assembler::subl(Address dst, Register src) {
4451   InstructionMark im(this);
4452   prefix(dst, src);
4453   emit_int8(0x29);
4454   emit_operand(src, dst);
4455 }
4456 
4457 void Assembler::subl(Register dst, int32_t imm32) {
4458   prefix(dst);
4459   emit_arith(0x81, 0xE8, dst, imm32);
4460 }
4461 


6620 void Assembler::pclmulqdq(XMMRegister dst, XMMRegister src, int mask) {
6621   assert(VM_Version::supports_clmul(), "");
6622   InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6623   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6624   emit_int8(0x44);
6625   emit_int8((unsigned char)(0xC0 | encode));
6626   emit_int8((unsigned char)mask);
6627 }
6628 
6629 // Carry-Less Multiplication Quadword
6630 void Assembler::vpclmulqdq(XMMRegister dst, XMMRegister nds, XMMRegister src, int mask) {
6631   assert(VM_Version::supports_avx() && VM_Version::supports_clmul(), "");
6632   InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6633   int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_3A, &attributes);
6634   emit_int8(0x44);
6635   emit_int8((unsigned char)(0xC0 | encode));
6636   emit_int8((unsigned char)mask);
6637 }
6638 
6639 void Assembler::vzeroupper() {
6640   if (VM_Version::supports_vzeroupper()) {
6641     InstructionAttr attributes(AVX_128bit, /* vex_w */ false, /* legacy_mode */ true, /* no_mask_reg */ false, /* uses_vl */ false);
6642     (void)vex_prefix_and_encode(0, 0, 0, VEX_SIMD_NONE, VEX_OPCODE_0F, &attributes);
6643     emit_int8(0x77);
6644   }
6645 }
6646 
6647 #ifndef _LP64
6648 // 32bit only pieces of the assembler
6649 
6650 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
6651   // NO PREFIX AS NEVER 64BIT
6652   InstructionMark im(this);
6653   emit_int8((unsigned char)0x81);
6654   emit_int8((unsigned char)(0xF8 | src1->encoding()));
6655   emit_data(imm32, rspec, 0);
6656 }
6657 
6658 void Assembler::cmp_literal32(Address src1, int32_t imm32, RelocationHolder const& rspec) {
6659   // NO PREFIX AS NEVER 64BIT (not even 32bit versions of 64bit regs
6660   InstructionMark im(this);
6661   emit_int8((unsigned char)0x81);
6662   emit_operand(rdi, src1);
6663   emit_data(imm32, rspec, 0);
6664 }


< prev index next >