< prev index next >

src/hotspot/share/c1/c1_LIRAssembler.cpp

Print this page




 464   case lir_optvirtual_call:
 465     call(op, relocInfo::opt_virtual_call_type);
 466     break;
 467   case lir_icvirtual_call:
 468     ic_call(op);
 469     break;
 470   case lir_virtual_call:
 471     vtable_call(op);
 472     break;
 473   default:
 474     fatal("unexpected op code: %s", op->name());
 475     break;
 476   }
 477 
 478   // JSR 292
 479   // Record if this method has MethodHandle invokes.
 480   if (op->is_method_handle_invoke()) {
 481     compilation()->set_has_method_handle_invokes(true);
 482   }
 483 
 484 #if defined(X86) && defined(TIERED)
 485   // C2 leave fpu stack dirty clean it
 486   if (UseSSE < 2) {
 487     int i;
 488     for ( i = 1; i <= 7 ; i++ ) {
 489       ffree(i);
 490     }
 491     if (!op->result_opr()->is_float_kind()) {
 492       ffree(0);
 493     }
 494   }
 495 #endif // X86 && TIERED
 496 }
 497 
 498 
 499 void LIR_Assembler::emit_opLabel(LIR_OpLabel* op) {
 500   _masm->bind (*(op->label()));
 501 }
 502 
 503 
 504 void LIR_Assembler::emit_op1(LIR_Op1* op) {


 515       }
 516       break;
 517 
 518     case lir_roundfp: {
 519       LIR_OpRoundFP* round_op = op->as_OpRoundFP();
 520       roundfp_op(round_op->in_opr(), round_op->tmp(), round_op->result_opr(), round_op->pop_fpu_stack());
 521       break;
 522     }
 523 
 524     case lir_return:
 525       return_op(op->in_opr());
 526       break;
 527 
 528     case lir_safepoint:
 529       if (compilation()->debug_info_recorder()->last_pc_offset() == code_offset()) {
 530         _masm->nop();
 531       }
 532       safepoint_poll(op->in_opr(), op->info());
 533       break;
 534 

 535     case lir_fxch:
 536       fxch(op->in_opr()->as_jint());
 537       break;
 538 
 539     case lir_fld:
 540       fld(op->in_opr()->as_jint());
 541       break;
 542 
 543     case lir_ffree:
 544       ffree(op->in_opr()->as_jint());
 545       break;
 546 
 547     case lir_branch:
 548       break;
 549 
 550     case lir_push:
 551       push(op->in_opr());
 552       break;
 553 
 554     case lir_pop:
 555       pop(op->in_opr());
 556       break;
 557 
 558     case lir_leal:
 559       leal(op->in_opr(), op->result_opr(), op->patch_code(), op->info());
 560       break;
 561 
 562     case lir_null_check: {
 563       ImplicitNullCheckStub* stub = add_debug_info_for_null_check_here(op->info());
 564 
 565       if (op->in_opr()->is_single_cpu()) {


 619       // init offsets
 620       offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());
 621       _masm->align(CodeEntryAlignment);
 622       if (needs_icache(compilation()->method())) {
 623         check_icache();
 624       }
 625       offsets()->set_value(CodeOffsets::Verified_Entry, _masm->offset());
 626       _masm->verified_entry();
 627       if (needs_clinit_barrier_on_entry(compilation()->method())) {
 628         clinit_barrier(compilation()->method());
 629       }
 630       build_frame();
 631       offsets()->set_value(CodeOffsets::Frame_Complete, _masm->offset());
 632       break;
 633 
 634     case lir_osr_entry:
 635       offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());
 636       osr_entry();
 637       break;
 638 
 639     case lir_24bit_FPU:
 640       set_24bit_FPU();
 641       break;
 642 
 643     case lir_reset_FPU:
 644       reset_FPU();
 645       break;

 646 
 647     case lir_breakpoint:
 648       breakpoint();
 649       break;
 650 
 651     case lir_fpop_raw:
 652       fpop();
 653       break;
 654 
 655     case lir_membar:
 656       membar();
 657       break;
 658 
 659     case lir_membar_acquire:
 660       membar_acquire();
 661       break;
 662 
 663     case lir_membar_release:
 664       membar_release();
 665       break;
 666 
 667     case lir_membar_loadload:
 668       membar_loadload();
 669       break;
 670 
 671     case lir_membar_storestore:
 672       membar_storestore();
 673       break;
 674 




 464   case lir_optvirtual_call:
 465     call(op, relocInfo::opt_virtual_call_type);
 466     break;
 467   case lir_icvirtual_call:
 468     ic_call(op);
 469     break;
 470   case lir_virtual_call:
 471     vtable_call(op);
 472     break;
 473   default:
 474     fatal("unexpected op code: %s", op->name());
 475     break;
 476   }
 477 
 478   // JSR 292
 479   // Record if this method has MethodHandle invokes.
 480   if (op->is_method_handle_invoke()) {
 481     compilation()->set_has_method_handle_invokes(true);
 482   }
 483 
 484 #if defined(IA32) && defined(TIERED)
 485   // C2 leave fpu stack dirty clean it
 486   if (UseSSE < 2) {
 487     int i;
 488     for ( i = 1; i <= 7 ; i++ ) {
 489       ffree(i);
 490     }
 491     if (!op->result_opr()->is_float_kind()) {
 492       ffree(0);
 493     }
 494   }
 495 #endif // X86 && TIERED
 496 }
 497 
 498 
 499 void LIR_Assembler::emit_opLabel(LIR_OpLabel* op) {
 500   _masm->bind (*(op->label()));
 501 }
 502 
 503 
 504 void LIR_Assembler::emit_op1(LIR_Op1* op) {


 515       }
 516       break;
 517 
 518     case lir_roundfp: {
 519       LIR_OpRoundFP* round_op = op->as_OpRoundFP();
 520       roundfp_op(round_op->in_opr(), round_op->tmp(), round_op->result_opr(), round_op->pop_fpu_stack());
 521       break;
 522     }
 523 
 524     case lir_return:
 525       return_op(op->in_opr());
 526       break;
 527 
 528     case lir_safepoint:
 529       if (compilation()->debug_info_recorder()->last_pc_offset() == code_offset()) {
 530         _masm->nop();
 531       }
 532       safepoint_poll(op->in_opr(), op->info());
 533       break;
 534 
 535 #ifdef IA32
 536     case lir_fxch:
 537       fxch(op->in_opr()->as_jint());
 538       break;
 539 
 540     case lir_fld:
 541       fld(op->in_opr()->as_jint());
 542       break;
 543 #endif // IA32



 544 
 545     case lir_branch:
 546       break;
 547 
 548     case lir_push:
 549       push(op->in_opr());
 550       break;
 551 
 552     case lir_pop:
 553       pop(op->in_opr());
 554       break;
 555 
 556     case lir_leal:
 557       leal(op->in_opr(), op->result_opr(), op->patch_code(), op->info());
 558       break;
 559 
 560     case lir_null_check: {
 561       ImplicitNullCheckStub* stub = add_debug_info_for_null_check_here(op->info());
 562 
 563       if (op->in_opr()->is_single_cpu()) {


 617       // init offsets
 618       offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());
 619       _masm->align(CodeEntryAlignment);
 620       if (needs_icache(compilation()->method())) {
 621         check_icache();
 622       }
 623       offsets()->set_value(CodeOffsets::Verified_Entry, _masm->offset());
 624       _masm->verified_entry();
 625       if (needs_clinit_barrier_on_entry(compilation()->method())) {
 626         clinit_barrier(compilation()->method());
 627       }
 628       build_frame();
 629       offsets()->set_value(CodeOffsets::Frame_Complete, _masm->offset());
 630       break;
 631 
 632     case lir_osr_entry:
 633       offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());
 634       osr_entry();
 635       break;
 636 
 637 #ifdef IA32
 638     case lir_fpop_raw:
 639       fpop();



 640       break;
 641 #endif // IA32
 642 
 643     case lir_breakpoint:
 644       breakpoint();
 645       break;
 646 




 647     case lir_membar:
 648       membar();
 649       break;
 650 
 651     case lir_membar_acquire:
 652       membar_acquire();
 653       break;
 654 
 655     case lir_membar_release:
 656       membar_release();
 657       break;
 658 
 659     case lir_membar_loadload:
 660       membar_loadload();
 661       break;
 662 
 663     case lir_membar_storestore:
 664       membar_storestore();
 665       break;
 666 


< prev index next >