src/cpu/x86/vm/c1_LIRAssembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6930772 Sdiff src/cpu/x86/vm

src/cpu/x86/vm/c1_LIRAssembler_x86.cpp

Print this page




2719       case lir_dynamic_call:
2720         offset += NativeCall::displacement_offset;
2721         break;
2722       case lir_icvirtual_call:
2723         offset += NativeCall::displacement_offset + NativeMovConstReg::instruction_size;
2724       break;
2725       case lir_virtual_call:  // currently, sparc-specific for niagara
2726       default: ShouldNotReachHere();
2727     }
2728     while (offset++ % BytesPerWord != 0) {
2729       __ nop();
2730     }
2731   }
2732 }
2733 
2734 
2735 void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) {
2736   assert(!os::is_MP() || (__ offset() + NativeCall::displacement_offset) % BytesPerWord == 0,
2737          "must be aligned");
2738   __ call(AddressLiteral(op->addr(), rtype));
2739   add_call_info(code_offset(), op->info(), op->is_method_handle_invoke());
2740 }
2741 
2742 
2743 void LIR_Assembler::ic_call(LIR_OpJavaCall* op) {
2744   RelocationHolder rh = virtual_call_Relocation::spec(pc());
2745   __ movoop(IC_Klass, (jobject)Universe::non_oop_word());
2746   assert(!os::is_MP() ||
2747          (__ offset() + NativeCall::displacement_offset) % BytesPerWord == 0,
2748          "must be aligned");
2749   __ call(AddressLiteral(op->addr(), rh));
2750   add_call_info(code_offset(), op->info(), op->is_method_handle_invoke());
2751 }
2752 
2753 
2754 /* Currently, vtable-dispatch is only enabled for sparc platforms */
2755 void LIR_Assembler::vtable_call(LIR_OpJavaCall* op) {
2756   ShouldNotReachHere();
2757 }
2758 
2759 
2760 void LIR_Assembler::preserve_SP(LIR_OpJavaCall* op) {
2761   __ movptr(FrameMap::method_handle_invoke_SP_save_opr()->as_register(), rsp);
2762 }
2763 
2764 
2765 void LIR_Assembler::restore_SP(LIR_OpJavaCall* op) {
2766   __ movptr(rsp, FrameMap::method_handle_invoke_SP_save_opr()->as_register());
2767 }
2768 
2769 
2770 void LIR_Assembler::emit_static_call_stub() {
2771   address call_pc = __ pc();
2772   address stub = __ start_a_stub(call_stub_size);
2773   if (stub == NULL) {
2774     bailout("static call stub overflow");
2775     return;
2776   }
2777 
2778   int start = __ offset();
2779   if (os::is_MP()) {
2780     // make sure that the displacement word of the call ends up word aligned
2781     int offset = __ offset() + NativeMovConstReg::instruction_size + NativeCall::displacement_offset;
2782     while (offset++ % BytesPerWord != 0) {
2783       __ nop();
2784     }
2785   }
2786   __ relocate(static_stub_Relocation::spec(call_pc));
2787   __ movoop(rbx, (jobject)NULL);
2788   // must be set to -1 at code generation time
2789   assert(!os::is_MP() || ((__ offset() + 1) % BytesPerWord) == 0, "must be aligned on MP");




2719       case lir_dynamic_call:
2720         offset += NativeCall::displacement_offset;
2721         break;
2722       case lir_icvirtual_call:
2723         offset += NativeCall::displacement_offset + NativeMovConstReg::instruction_size;
2724       break;
2725       case lir_virtual_call:  // currently, sparc-specific for niagara
2726       default: ShouldNotReachHere();
2727     }
2728     while (offset++ % BytesPerWord != 0) {
2729       __ nop();
2730     }
2731   }
2732 }
2733 
2734 
2735 void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) {
2736   assert(!os::is_MP() || (__ offset() + NativeCall::displacement_offset) % BytesPerWord == 0,
2737          "must be aligned");
2738   __ call(AddressLiteral(op->addr(), rtype));
2739   add_call_info(code_offset(), op->info());
2740 }
2741 
2742 
2743 void LIR_Assembler::ic_call(LIR_OpJavaCall* op) {
2744   RelocationHolder rh = virtual_call_Relocation::spec(pc());
2745   __ movoop(IC_Klass, (jobject)Universe::non_oop_word());
2746   assert(!os::is_MP() ||
2747          (__ offset() + NativeCall::displacement_offset) % BytesPerWord == 0,
2748          "must be aligned");
2749   __ call(AddressLiteral(op->addr(), rh));
2750   add_call_info(code_offset(), op->info());
2751 }
2752 
2753 
2754 /* Currently, vtable-dispatch is only enabled for sparc platforms */
2755 void LIR_Assembler::vtable_call(LIR_OpJavaCall* op) {
2756   ShouldNotReachHere();
2757 }
2758 
2759 










2760 void LIR_Assembler::emit_static_call_stub() {
2761   address call_pc = __ pc();
2762   address stub = __ start_a_stub(call_stub_size);
2763   if (stub == NULL) {
2764     bailout("static call stub overflow");
2765     return;
2766   }
2767 
2768   int start = __ offset();
2769   if (os::is_MP()) {
2770     // make sure that the displacement word of the call ends up word aligned
2771     int offset = __ offset() + NativeMovConstReg::instruction_size + NativeCall::displacement_offset;
2772     while (offset++ % BytesPerWord != 0) {
2773       __ nop();
2774     }
2775   }
2776   __ relocate(static_stub_Relocation::spec(call_pc));
2777   __ movoop(rbx, (jobject)NULL);
2778   // must be set to -1 at code generation time
2779   assert(!os::is_MP() || ((__ offset() + 1) % BytesPerWord) == 0, "must be aligned on MP");


src/cpu/x86/vm/c1_LIRAssembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File