1240 }
1241 else {
1242 emit_opcode(cbuf, 0x8D); // LEA reg,[SP+offset]
1243 emit_rm(cbuf, 0x1, reg, 0x04);
1244 emit_rm(cbuf, 0x0, 0x04, ESP_enc);
1245 emit_d8(cbuf, offset);
1246 }
1247 }
1248
1249 uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
1250 int offset = ra_->reg2offset(in_RegMask(0).find_first_elem());
1251 if( offset >= 128 ) {
1252 return 7;
1253 }
1254 else {
1255 return 4;
1256 }
1257 }
1258
1259 //=============================================================================
1260
1261 // emit call stub, compiled java to interpreter
1262 void emit_java_to_interp(CodeBuffer &cbuf ) {
1263 // Stub is fixed up when the corresponding call is converted from calling
1264 // compiled code to calling interpreted code.
1265 // mov rbx,0
1266 // jmp -1
1267
1268 address mark = cbuf.insts_mark(); // get mark within main instrs section
1269
1270 // Note that the code buffer's insts_mark is always relative to insts.
1271 // That's why we must use the macroassembler to generate a stub.
1272 MacroAssembler _masm(&cbuf);
1273
1274 address base =
1275 __ start_a_stub(Compile::MAX_stubs_size);
1276 if (base == NULL) return; // CodeBuffer::expand failed
1277 // static stub relocation stores the instruction address of the call
1278 __ relocate(static_stub_Relocation::spec(mark), RELOC_IMM32);
1279 // static stub relocation also tags the Method* in the code-stream.
1280 __ mov_metadata(rbx, (Metadata*)NULL); // method is zapped till fixup time
1281 // This is recognized as unresolved by relocs/nativeInst/ic code
1282 __ jump(RuntimeAddress(__ pc()));
1283
1284 __ end_a_stub();
1285 // Update current stubs pointer and restore insts_end.
1286 }
1287 // size of call stub, compiled java to interpretor
1288 uint size_java_to_interp() {
1289 return 10; // movl; jmp
1290 }
1291 // relocation entries for call stub, compiled java to interpretor
1292 uint reloc_java_to_interp() {
1293 return 4; // 3 in emit_java_to_interp + 1 in Java_Static_Call
1294 }
1295
1296 //=============================================================================
1297 #ifndef PRODUCT
1298 void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream* st ) const {
1299 st->print_cr( "CMP EAX,[ECX+4]\t# Inline cache check");
1300 st->print_cr("\tJNE SharedRuntime::handle_ic_miss_stub");
1301 st->print_cr("\tNOP");
1302 st->print_cr("\tNOP");
1303 if( !OptoBreakpoint )
1304 st->print_cr("\tNOP");
1305 }
1306 #endif
1307
1308 void MachUEPNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
1309 MacroAssembler masm(&cbuf);
1310 #ifdef ASSERT
1311 uint insts_size = cbuf.insts_size();
1312 #endif
1313 masm.cmpptr(rax, Address(rcx, oopDesc::klass_offset_in_bytes()));
1314 masm.jump_cc(Assembler::notEqual,
1315 RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
1316 /* WARNING these NOPs are critical so that verified entry point is properly
1892 MacroAssembler masm(&cbuf);
1893 masm.fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_24()));
1894 }
1895 %}
1896
1897 enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
1898 // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
1899 // who we intended to call.
1900 cbuf.set_insts_mark();
1901 $$$emit8$primary;
1902 if (!_method) {
1903 emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1904 runtime_call_Relocation::spec(), RELOC_IMM32 );
1905 } else if (_optimized_virtual) {
1906 emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1907 opt_virtual_call_Relocation::spec(), RELOC_IMM32 );
1908 } else {
1909 emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1910 static_call_Relocation::spec(), RELOC_IMM32 );
1911 }
1912 if (_method) { // Emit stub for static call
1913 emit_java_to_interp(cbuf);
1914 }
1915 %}
1916
1917 enc_class Java_Dynamic_Call (method meth) %{ // JAVA DYNAMIC CALL
1918 MacroAssembler _masm(&cbuf);
1919 __ ic_call((address)$meth$$method);
1920 %}
1921
1922 enc_class Java_Compiled_Call (method meth) %{ // JAVA COMPILED CALL
1923 int disp = in_bytes(Method::from_compiled_offset());
1924 assert( -128 <= disp && disp <= 127, "compiled_code_offset isn't small");
1925
1926 // CALL *[EAX+in_bytes(Method::from_compiled_code_entry_point_offset())]
1927 cbuf.set_insts_mark();
1928 $$$emit8$primary;
1929 emit_rm(cbuf, 0x01, $secondary, EAX_enc ); // R/M byte
1930 emit_d8(cbuf, disp); // Displacement
1931
1932 %}
1933
|
1240 }
1241 else {
1242 emit_opcode(cbuf, 0x8D); // LEA reg,[SP+offset]
1243 emit_rm(cbuf, 0x1, reg, 0x04);
1244 emit_rm(cbuf, 0x0, 0x04, ESP_enc);
1245 emit_d8(cbuf, offset);
1246 }
1247 }
1248
1249 uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
1250 int offset = ra_->reg2offset(in_RegMask(0).find_first_elem());
1251 if( offset >= 128 ) {
1252 return 7;
1253 }
1254 else {
1255 return 4;
1256 }
1257 }
1258
1259 //=============================================================================
1260 #ifndef PRODUCT
1261 void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream* st ) const {
1262 st->print_cr( "CMP EAX,[ECX+4]\t# Inline cache check");
1263 st->print_cr("\tJNE SharedRuntime::handle_ic_miss_stub");
1264 st->print_cr("\tNOP");
1265 st->print_cr("\tNOP");
1266 if( !OptoBreakpoint )
1267 st->print_cr("\tNOP");
1268 }
1269 #endif
1270
1271 void MachUEPNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
1272 MacroAssembler masm(&cbuf);
1273 #ifdef ASSERT
1274 uint insts_size = cbuf.insts_size();
1275 #endif
1276 masm.cmpptr(rax, Address(rcx, oopDesc::klass_offset_in_bytes()));
1277 masm.jump_cc(Assembler::notEqual,
1278 RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
1279 /* WARNING these NOPs are critical so that verified entry point is properly
1855 MacroAssembler masm(&cbuf);
1856 masm.fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_24()));
1857 }
1858 %}
1859
1860 enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
1861 // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
1862 // who we intended to call.
1863 cbuf.set_insts_mark();
1864 $$$emit8$primary;
1865 if (!_method) {
1866 emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1867 runtime_call_Relocation::spec(), RELOC_IMM32 );
1868 } else if (_optimized_virtual) {
1869 emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1870 opt_virtual_call_Relocation::spec(), RELOC_IMM32 );
1871 } else {
1872 emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1873 static_call_Relocation::spec(), RELOC_IMM32 );
1874 }
1875 if (_method) { // Emit stub for static call.
1876 CompiledStaticCall::emit_to_interp_stub(cbuf);
1877 }
1878 %}
1879
1880 enc_class Java_Dynamic_Call (method meth) %{ // JAVA DYNAMIC CALL
1881 MacroAssembler _masm(&cbuf);
1882 __ ic_call((address)$meth$$method);
1883 %}
1884
1885 enc_class Java_Compiled_Call (method meth) %{ // JAVA COMPILED CALL
1886 int disp = in_bytes(Method::from_compiled_offset());
1887 assert( -128 <= disp && disp <= 127, "compiled_code_offset isn't small");
1888
1889 // CALL *[EAX+in_bytes(Method::from_compiled_code_entry_point_offset())]
1890 cbuf.set_insts_mark();
1891 $$$emit8$primary;
1892 emit_rm(cbuf, 0x01, $secondary, EAX_enc ); // R/M byte
1893 emit_d8(cbuf, disp); // Displacement
1894
1895 %}
1896
|