src/share/vm/c1/c1_LIRAssembler.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6930772 Sdiff src/share/vm/c1

src/share/vm/c1/c1_LIRAssembler.cpp

Print this page




 284       assert(false, "unbound label");
 285     }
 286   }
 287 }
 288 #endif
 289 
 290 //----------------------------------debug info--------------------------------
 291 
 292 
 293 void LIR_Assembler::add_debug_info_for_branch(CodeEmitInfo* info) {
 294   _masm->code_section()->relocate(pc(), relocInfo::poll_type);
 295   int pc_offset = code_offset();
 296   flush_debug_info(pc_offset);
 297   info->record_debug_info(compilation()->debug_info_recorder(), pc_offset);
 298   if (info->exception_handlers() != NULL) {
 299     compilation()->add_exception_handlers_for_pco(pc_offset, info->exception_handlers());
 300   }
 301 }
 302 
 303 
 304 void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo, bool is_method_handle_invoke) {
 305   flush_debug_info(pc_offset);
 306   cinfo->record_debug_info(compilation()->debug_info_recorder(), pc_offset, is_method_handle_invoke);
 307   if (cinfo->exception_handlers() != NULL) {
 308     compilation()->add_exception_handlers_for_pco(pc_offset, cinfo->exception_handlers());
 309   }
 310 }
 311 
 312 static ValueStack* debug_info(Instruction* ins) {
 313   StateSplit* ss = ins->as_StateSplit();
 314   if (ss != NULL) return ss->state();
 315   return ins->lock_stack();
 316 }
 317 
 318 void LIR_Assembler::process_debug_info(LIR_Op* op) {
 319   Instruction* src = op->source();
 320   if (src == NULL)  return;
 321   int pc_offset = code_offset();
 322   if (_pending_non_safepoint == src) {
 323     _pending_non_safepoint_offset = pc_offset;
 324     return;
 325   }
 326   ValueStack* vstack = debug_info(src);


 396   emit_code_stub(stub);
 397 }
 398 
 399 void LIR_Assembler::add_debug_info_for_div0_here(CodeEmitInfo* info) {
 400   add_debug_info_for_div0(code_offset(), info);
 401 }
 402 
 403 void LIR_Assembler::add_debug_info_for_div0(int pc_offset, CodeEmitInfo* cinfo) {
 404   DivByZeroStub* stub = new DivByZeroStub(pc_offset, cinfo);
 405   emit_code_stub(stub);
 406 }
 407 
 408 void LIR_Assembler::emit_rtcall(LIR_OpRTCall* op) {
 409   rt_call(op->result_opr(), op->addr(), op->arguments(), op->tmp(), op->info());
 410 }
 411 
 412 
 413 void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {
 414   verify_oop_map(op->info());
 415 
 416   // JSR 292
 417   // Preserve the SP over MethodHandle call sites.
 418   if (op->is_method_handle_invoke()) {
 419     preserve_SP(op);
 420   }
 421 
 422   if (os::is_MP()) {
 423     // must align calls sites, otherwise they can't be updated atomically on MP hardware
 424     align_call(op->code());
 425   }
 426 
 427   // emit the static call stub stuff out of line
 428   emit_static_call_stub();
 429 
 430   switch (op->code()) {
 431   case lir_static_call:
 432     call(op, relocInfo::static_call_type);
 433     break;
 434   case lir_optvirtual_call:
 435   case lir_dynamic_call:
 436     call(op, relocInfo::opt_virtual_call_type);
 437     break;
 438   case lir_icvirtual_call:
 439     ic_call(op);
 440     break;
 441   case lir_virtual_call:
 442     vtable_call(op);
 443     break;
 444   default: ShouldNotReachHere();
 445   }
 446 
 447   if (op->is_method_handle_invoke()) {
 448     restore_SP(op);
 449   }
 450 
 451 #if defined(X86) && defined(TIERED)
 452   // C2 leave fpu stack dirty clean it
 453   if (UseSSE < 2) {
 454     int i;
 455     for ( i = 1; i <= 7 ; i++ ) {
 456       ffree(i);
 457     }
 458     if (!op->result_opr()->is_float_kind()) {
 459       ffree(0);
 460     }
 461   }
 462 #endif // X86 && TIERED
 463 }
 464 
 465 
 466 void LIR_Assembler::emit_opLabel(LIR_OpLabel* op) {
 467   _masm->bind (*(op->label()));
 468 }
 469 
 470 




 284       assert(false, "unbound label");
 285     }
 286   }
 287 }
 288 #endif
 289 
 290 //----------------------------------debug info--------------------------------
 291 
 292 
 293 void LIR_Assembler::add_debug_info_for_branch(CodeEmitInfo* info) {
 294   _masm->code_section()->relocate(pc(), relocInfo::poll_type);
 295   int pc_offset = code_offset();
 296   flush_debug_info(pc_offset);
 297   info->record_debug_info(compilation()->debug_info_recorder(), pc_offset);
 298   if (info->exception_handlers() != NULL) {
 299     compilation()->add_exception_handlers_for_pco(pc_offset, info->exception_handlers());
 300   }
 301 }
 302 
 303 
 304 void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo) {
 305   flush_debug_info(pc_offset);
 306   cinfo->record_debug_info(compilation()->debug_info_recorder(), pc_offset);
 307   if (cinfo->exception_handlers() != NULL) {
 308     compilation()->add_exception_handlers_for_pco(pc_offset, cinfo->exception_handlers());
 309   }
 310 }
 311 
 312 static ValueStack* debug_info(Instruction* ins) {
 313   StateSplit* ss = ins->as_StateSplit();
 314   if (ss != NULL) return ss->state();
 315   return ins->lock_stack();
 316 }
 317 
 318 void LIR_Assembler::process_debug_info(LIR_Op* op) {
 319   Instruction* src = op->source();
 320   if (src == NULL)  return;
 321   int pc_offset = code_offset();
 322   if (_pending_non_safepoint == src) {
 323     _pending_non_safepoint_offset = pc_offset;
 324     return;
 325   }
 326   ValueStack* vstack = debug_info(src);


 396   emit_code_stub(stub);
 397 }
 398 
 399 void LIR_Assembler::add_debug_info_for_div0_here(CodeEmitInfo* info) {
 400   add_debug_info_for_div0(code_offset(), info);
 401 }
 402 
 403 void LIR_Assembler::add_debug_info_for_div0(int pc_offset, CodeEmitInfo* cinfo) {
 404   DivByZeroStub* stub = new DivByZeroStub(pc_offset, cinfo);
 405   emit_code_stub(stub);
 406 }
 407 
 408 void LIR_Assembler::emit_rtcall(LIR_OpRTCall* op) {
 409   rt_call(op->result_opr(), op->addr(), op->arguments(), op->tmp(), op->info());
 410 }
 411 
 412 
 413 void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {
 414   verify_oop_map(op->info());
 415 






 416   if (os::is_MP()) {
 417     // must align calls sites, otherwise they can't be updated atomically on MP hardware
 418     align_call(op->code());
 419   }
 420 
 421   // emit the static call stub stuff out of line
 422   emit_static_call_stub();
 423 
 424   switch (op->code()) {
 425   case lir_static_call:
 426     call(op, relocInfo::static_call_type);
 427     break;
 428   case lir_optvirtual_call:
 429   case lir_dynamic_call:
 430     call(op, relocInfo::opt_virtual_call_type);
 431     break;
 432   case lir_icvirtual_call:
 433     ic_call(op);
 434     break;
 435   case lir_virtual_call:
 436     vtable_call(op);
 437     break;
 438   default: ShouldNotReachHere();
 439   }
 440 




 441 #if defined(X86) && defined(TIERED)
 442   // C2 leave fpu stack dirty clean it
 443   if (UseSSE < 2) {
 444     int i;
 445     for ( i = 1; i <= 7 ; i++ ) {
 446       ffree(i);
 447     }
 448     if (!op->result_opr()->is_float_kind()) {
 449       ffree(0);
 450     }
 451   }
 452 #endif // X86 && TIERED
 453 }
 454 
 455 
 456 void LIR_Assembler::emit_opLabel(LIR_OpLabel* op) {
 457   _masm->bind (*(op->label()));
 458 }
 459 
 460 


src/share/vm/c1/c1_LIRAssembler.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File