src/cpu/sparc/vm/c1_LIRAssembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/c1_LIRAssembler_sparc.cpp

Print this page




 270     //       debugging
 271     __ br(Assembler::always, false, Assembler::pt, *slow_case->entry());
 272     __ delayed()->nop();
 273   }
 274   // done
 275   __ bind(*slow_case->continuation());
 276 }
 277 
 278 
 279 int LIR_Assembler::emit_exception_handler() {
 280   // if the last instruction is a call (typically to do a throw which
 281   // is coming at the end after block reordering) the return address
 282   // must still point into the code area in order to avoid assertion
 283   // failures when searching for the corresponding bci => add a nop
 284   // (was bug 5/14/1999 - gri)
 285   __ nop();
 286 
 287   // generate code for exception handler
 288   ciMethod* method = compilation()->method();
 289 
 290   address handler_base = __ start_a_stub(exception_handler_size);
 291 
 292   if (handler_base == NULL) {
 293     // not enough space left for the handler
 294     bailout("exception handler overflow");
 295     return -1;
 296   }
 297 
 298   int offset = code_offset();
 299 
 300   __ call(Runtime1::entry_for(Runtime1::handle_exception_from_callee_id), relocInfo::runtime_call_type);
 301   __ delayed()->nop();
 302   __ should_not_reach_here();
 303   guarantee(code_offset() - offset <= exception_handler_size, "overflow");
 304   __ end_a_stub();
 305 
 306   return offset;
 307 }
 308 
 309 
 310 // Emit the code to remove the frame from the stack in the exception
 311 // unwind path.
 312 int LIR_Assembler::emit_unwind_handler() {
 313 #ifndef PRODUCT
 314   if (CommentedAssembly) {
 315     _masm->block_comment("Unwind handler");
 316   }
 317 #endif
 318 
 319   int offset = code_offset();
 320 
 321   // Fetch the exception from TLS and clear out exception related thread state
 322   __ ld_ptr(G2_thread, in_bytes(JavaThread::exception_oop_offset()), O0);
 323   __ st_ptr(G0, G2_thread, in_bytes(JavaThread::exception_oop_offset()));


 358 
 359   // Emit the slow path assembly
 360   if (stub != NULL) {
 361     stub->emit_code(this);
 362   }
 363 
 364   return offset;
 365 }
 366 
 367 
 368 int LIR_Assembler::emit_deopt_handler() {
 369   // if the last instruction is a call (typically to do a throw which
 370   // is coming at the end after block reordering) the return address
 371   // must still point into the code area in order to avoid assertion
 372   // failures when searching for the corresponding bci => add a nop
 373   // (was bug 5/14/1999 - gri)
 374   __ nop();
 375 
 376   // generate code for deopt handler
 377   ciMethod* method = compilation()->method();
 378   address handler_base = __ start_a_stub(deopt_handler_size);
 379   if (handler_base == NULL) {
 380     // not enough space left for the handler
 381     bailout("deopt handler overflow");
 382     return -1;
 383   }
 384 
 385   int offset = code_offset();
 386   AddressLiteral deopt_blob(SharedRuntime::deopt_blob()->unpack());
 387   __ JUMP(deopt_blob, G3_scratch, 0); // sethi;jmp
 388   __ delayed()->nop();
 389   guarantee(code_offset() - offset <= deopt_handler_size, "overflow");
 390   __ end_a_stub();
 391 
 392   return offset;
 393 }
 394 
 395 
 396 void LIR_Assembler::jobject2reg(jobject o, Register reg) {
 397   if (o == NULL) {
 398     __ set(NULL_WORD, reg);
 399   } else {
 400     int oop_index = __ oop_recorder()->find_index(o);
 401     assert(Universe::heap()->is_in_reserved(JNIHandles::resolve(o)), "should be real oop");
 402     RelocationHolder rspec = oop_Relocation::spec(oop_index);
 403     __ set(NULL_WORD, reg, rspec); // Will be set when the nmethod is created
 404   }
 405 }
 406 
 407 
 408 void LIR_Assembler::jobject2reg_with_patching(Register reg, CodeEmitInfo *info) {
 409   // Allocate a new index in table to hold the object once it's been patched


1476   __ ld_ptr(L0, 0, G0);
1477   __ ret();
1478   __ delayed()->restore();
1479 }
1480 
1481 
1482 int LIR_Assembler::safepoint_poll(LIR_Opr tmp, CodeEmitInfo* info) {
1483   __ set((intptr_t)os::get_polling_page(), tmp->as_register());
1484   if (info != NULL) {
1485     add_debug_info_for_branch(info);
1486   }
1487   int offset = __ offset();
1488   __ relocate(relocInfo::poll_type);
1489   __ ld_ptr(tmp->as_register(), 0, G0);
1490   return offset;
1491 }
1492 
1493 
1494 void LIR_Assembler::emit_static_call_stub() {
1495   address call_pc = __ pc();
1496   address stub = __ start_a_stub(call_stub_size);
1497   if (stub == NULL) {
1498     bailout("static call stub overflow");
1499     return;
1500   }
1501 
1502   int start = __ offset();
1503   __ relocate(static_stub_Relocation::spec(call_pc));
1504 
1505   __ set_metadata(NULL, G5);
1506   // must be set to -1 at code generation time
1507   AddressLiteral addrlit(-1);
1508   __ jump_to(addrlit, G3);
1509   __ delayed()->nop();
1510 
1511   assert(__ offset() - start <= call_stub_size, "stub too big");
1512   __ end_a_stub();
1513 }
1514 
1515 
1516 void LIR_Assembler::comp_op(LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, LIR_Op2* op) {
1517   if (opr1->is_single_fpu()) {
1518     __ fcmp(FloatRegisterImpl::S, Assembler::fcc0, opr1->as_float_reg(), opr2->as_float_reg());
1519   } else if (opr1->is_double_fpu()) {
1520     __ fcmp(FloatRegisterImpl::D, Assembler::fcc0, opr1->as_double_reg(), opr2->as_double_reg());
1521   } else if (opr1->is_single_cpu()) {
1522     if (opr2->is_constant()) {
1523       switch (opr2->as_constant_ptr()->type()) {
1524         case T_INT:
1525           { jint con = opr2->as_constant_ptr()->as_jint();
1526             if (Assembler::is_simm13(con)) {
1527               __ cmp(opr1->as_register(), con);
1528             } else {
1529               __ set(con, O7);
1530               __ cmp(opr1->as_register(), O7);
1531             }




 270     //       debugging
 271     __ br(Assembler::always, false, Assembler::pt, *slow_case->entry());
 272     __ delayed()->nop();
 273   }
 274   // done
 275   __ bind(*slow_case->continuation());
 276 }
 277 
 278 
 279 int LIR_Assembler::emit_exception_handler() {
 280   // if the last instruction is a call (typically to do a throw which
 281   // is coming at the end after block reordering) the return address
 282   // must still point into the code area in order to avoid assertion
 283   // failures when searching for the corresponding bci => add a nop
 284   // (was bug 5/14/1999 - gri)
 285   __ nop();
 286 
 287   // generate code for exception handler
 288   ciMethod* method = compilation()->method();
 289 
 290   address handler_base = __ start_a_stub(exception_handler_size());
 291 
 292   if (handler_base == NULL) {
 293     // not enough space left for the handler
 294     bailout("exception handler overflow");
 295     return -1;
 296   }
 297 
 298   int offset = code_offset();
 299 
 300   __ call(Runtime1::entry_for(Runtime1::handle_exception_from_callee_id), relocInfo::runtime_call_type);
 301   __ delayed()->nop();
 302   __ should_not_reach_here();
 303   guarantee(code_offset() - offset <= exception_handler_size(), "overflow");
 304   __ end_a_stub();
 305 
 306   return offset;
 307 }
 308 
 309 
 310 // Emit the code to remove the frame from the stack in the exception
 311 // unwind path.
 312 int LIR_Assembler::emit_unwind_handler() {
 313 #ifndef PRODUCT
 314   if (CommentedAssembly) {
 315     _masm->block_comment("Unwind handler");
 316   }
 317 #endif
 318 
 319   int offset = code_offset();
 320 
 321   // Fetch the exception from TLS and clear out exception related thread state
 322   __ ld_ptr(G2_thread, in_bytes(JavaThread::exception_oop_offset()), O0);
 323   __ st_ptr(G0, G2_thread, in_bytes(JavaThread::exception_oop_offset()));


 358 
 359   // Emit the slow path assembly
 360   if (stub != NULL) {
 361     stub->emit_code(this);
 362   }
 363 
 364   return offset;
 365 }
 366 
 367 
 368 int LIR_Assembler::emit_deopt_handler() {
 369   // if the last instruction is a call (typically to do a throw which
 370   // is coming at the end after block reordering) the return address
 371   // must still point into the code area in order to avoid assertion
 372   // failures when searching for the corresponding bci => add a nop
 373   // (was bug 5/14/1999 - gri)
 374   __ nop();
 375 
 376   // generate code for deopt handler
 377   ciMethod* method = compilation()->method();
 378   address handler_base = __ start_a_stub(deopt_handler_size());
 379   if (handler_base == NULL) {
 380     // not enough space left for the handler
 381     bailout("deopt handler overflow");
 382     return -1;
 383   }
 384 
 385   int offset = code_offset();
 386   AddressLiteral deopt_blob(SharedRuntime::deopt_blob()->unpack());
 387   __ JUMP(deopt_blob, G3_scratch, 0); // sethi;jmp
 388   __ delayed()->nop();
 389   guarantee(code_offset() - offset <= deopt_handler_size(), "overflow");
 390   __ end_a_stub();
 391 
 392   return offset;
 393 }
 394 
 395 
 396 void LIR_Assembler::jobject2reg(jobject o, Register reg) {
 397   if (o == NULL) {
 398     __ set(NULL_WORD, reg);
 399   } else {
 400     int oop_index = __ oop_recorder()->find_index(o);
 401     assert(Universe::heap()->is_in_reserved(JNIHandles::resolve(o)), "should be real oop");
 402     RelocationHolder rspec = oop_Relocation::spec(oop_index);
 403     __ set(NULL_WORD, reg, rspec); // Will be set when the nmethod is created
 404   }
 405 }
 406 
 407 
 408 void LIR_Assembler::jobject2reg_with_patching(Register reg, CodeEmitInfo *info) {
 409   // Allocate a new index in table to hold the object once it's been patched


1476   __ ld_ptr(L0, 0, G0);
1477   __ ret();
1478   __ delayed()->restore();
1479 }
1480 
1481 
1482 int LIR_Assembler::safepoint_poll(LIR_Opr tmp, CodeEmitInfo* info) {
1483   __ set((intptr_t)os::get_polling_page(), tmp->as_register());
1484   if (info != NULL) {
1485     add_debug_info_for_branch(info);
1486   }
1487   int offset = __ offset();
1488   __ relocate(relocInfo::poll_type);
1489   __ ld_ptr(tmp->as_register(), 0, G0);
1490   return offset;
1491 }
1492 
1493 
1494 void LIR_Assembler::emit_static_call_stub() {
1495   address call_pc = __ pc();
1496   address stub = __ start_a_stub(call_stub_size());
1497   if (stub == NULL) {
1498     bailout("static call stub overflow");
1499     return;
1500   }
1501 
1502   int start = __ offset();
1503   __ relocate(static_stub_Relocation::spec(call_pc));
1504 
1505   __ set_metadata(NULL, G5);
1506   // must be set to -1 at code generation time
1507   AddressLiteral addrlit(-1);
1508   __ jump_to(addrlit, G3);
1509   __ delayed()->nop();
1510 
1511   assert(__ offset() - start <= call_stub_size(), "stub too big");
1512   __ end_a_stub();
1513 }
1514 
1515 
1516 void LIR_Assembler::comp_op(LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, LIR_Op2* op) {
1517   if (opr1->is_single_fpu()) {
1518     __ fcmp(FloatRegisterImpl::S, Assembler::fcc0, opr1->as_float_reg(), opr2->as_float_reg());
1519   } else if (opr1->is_double_fpu()) {
1520     __ fcmp(FloatRegisterImpl::D, Assembler::fcc0, opr1->as_double_reg(), opr2->as_double_reg());
1521   } else if (opr1->is_single_cpu()) {
1522     if (opr2->is_constant()) {
1523       switch (opr2->as_constant_ptr()->type()) {
1524         case T_INT:
1525           { jint con = opr2->as_constant_ptr()->as_jint();
1526             if (Assembler::is_simm13(con)) {
1527               __ cmp(opr1->as_register(), con);
1528             } else {
1529               __ set(con, O7);
1530               __ cmp(opr1->as_register(), O7);
1531             }


src/cpu/sparc/vm/c1_LIRAssembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File