src/cpu/sparc/vm/sparc.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/sparc.ad

Print this page




 984 
 985 #ifdef ASSERT
 986   {
 987     MacroAssembler _masm(&cbuf);
 988     if (is_verified_oop_base) {
 989       __ verify_oop(reg_to_register_object(src1_enc));
 990     }
 991     if (is_verified_oop_store) {
 992       __ verify_oop(reg_to_register_object(dst_enc));
 993     }
 994     if (tmp_enc != -1) {
 995       __ mov(O7, reg_to_register_object(tmp_enc));
 996     }
 997     if (is_verified_oop_load) {
 998       __ verify_oop(reg_to_register_object(dst_enc));
 999     }
1000   }
1001 #endif
1002 }
1003 
1004 void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, relocInfo::relocType rtype, bool preserve_g2 = false) {
1005   // The method which records debug information at every safepoint
1006   // expects the call to be the first instruction in the snippet as
1007   // it creates a PcDesc structure which tracks the offset of a call
1008   // from the start of the codeBlob. This offset is computed as
1009   // code_end() - code_begin() of the code which has been emitted
1010   // so far.
1011   // In this particular case we have skirted around the problem by
1012   // putting the "mov" instruction in the delay slot but the problem
1013   // may bite us again at some other point and a cleaner/generic
1014   // solution using relocations would be needed.
1015   MacroAssembler _masm(&cbuf);
1016   __ set_inst_mark();
1017 
1018   // We flush the current window just so that there is a valid stack copy
1019   // the fact that the current window becomes active again instantly is
1020   // not a problem there is nothing live in it.
1021 
1022 #ifdef ASSERT
1023   int startpos = __ offset();
1024 #endif /* ASSERT */
1025 
1026   __ call((address)entry_point, rtype);
1027 
1028   if (preserve_g2)   __ delayed()->mov(G2, L7);
1029   else __ delayed()->nop();
1030 
1031   if (preserve_g2)   __ mov(L7, G2);
1032 
1033 #ifdef ASSERT
1034   if (preserve_g2 && (VerifyCompiledCode || VerifyOops)) {
1035 #ifdef _LP64
1036     // Trash argument dump slots.
1037     __ set(0xb0b8ac0db0b8ac0d, G1);
1038     __ mov(G1, G5);
1039     __ stx(G1, SP, STACK_BIAS + 0x80);
1040     __ stx(G1, SP, STACK_BIAS + 0x88);
1041     __ stx(G1, SP, STACK_BIAS + 0x90);
1042     __ stx(G1, SP, STACK_BIAS + 0x98);
1043     __ stx(G1, SP, STACK_BIAS + 0xA0);
1044     __ stx(G1, SP, STACK_BIAS + 0xA8);
1045 #else // _LP64
1046     // this is also a native call, so smash the first 7 stack locations,


2576 
2577   // Long values come back from native calls in O0:O1 in the 32-bit VM, copy the value
2578   // to G1 so the register allocator will not have to deal with the misaligned register
2579   // pair.
2580   enc_class adjust_long_from_native_call %{
2581 #ifndef _LP64
2582     if (returns_long()) {
2583       //    sllx  O0,32,O0
2584       emit3_simm13( cbuf, Assembler::arith_op, R_O0_enc, Assembler::sllx_op3, R_O0_enc, 0x1020 );
2585       //    srl   O1,0,O1
2586       emit3_simm13( cbuf, Assembler::arith_op, R_O1_enc, Assembler::srl_op3, R_O1_enc, 0x0000 );
2587       //    or    O0,O1,G1
2588       emit3       ( cbuf, Assembler::arith_op, R_G1_enc, Assembler:: or_op3, R_O0_enc, 0, R_O1_enc );
2589     }
2590 #endif
2591   %}
2592 
2593   enc_class Java_To_Runtime (method meth) %{    // CALL Java_To_Runtime
2594     // CALL directly to the runtime
2595     // The user of this is responsible for ensuring that R_L7 is empty (killed).
2596     emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type,
2597                     /*preserve_g2=*/true);
2598   %}
2599 
2600   enc_class preserve_SP %{
2601     MacroAssembler _masm(&cbuf);
2602     __ mov(SP, L7_mh_SP_save);
2603   %}
2604 
2605   enc_class restore_SP %{
2606     MacroAssembler _masm(&cbuf);
2607     __ mov(L7_mh_SP_save, SP);
2608   %}
2609 
2610   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
2611     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
2612     // who we intended to call.
2613     if (!_method) {
2614       emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
2615     } else if (_optimized_virtual) {
2616       emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
2617     } else {
2618       emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
2619     }
2620     if (_method) {  // Emit stub for static call.



2621       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
2622       // Stub does not fit into scratch buffer if TraceJumps is enabled
2623       if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) {
2624         ciEnv::current()->record_failure("CodeCache is full");
2625         return;
2626       }
2627     }
2628   %}
2629 
2630   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
2631     MacroAssembler _masm(&cbuf);
2632     __ set_inst_mark();
2633     int vtable_index = this->_vtable_index;
2634     // MachCallDynamicJavaNode::ret_addr_offset uses this same test
2635     if (vtable_index < 0) {
2636       // must be invalid_vtable_index, not nonvirtual_vtable_index
2637       assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2638       Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2639       assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2640       assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2641       __ ic_call((address)$meth$$method);
2642     } else {
2643       assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2644       // Just go thru the vtable
2645       // get receiver klass (receiver already checked for non-null)
2646       // If we end up going thru a c2i adapter interpreter expects method in G5
2647       int off = __ offset();
2648       __ load_klass(O0, G3_scratch);
2649       int klass_load_size;
2650       if (UseCompressedClassPointers) {
2651         assert(Universe::heap() != NULL, "java heap should be initialized");
2652         klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
2653       } else {
2654         klass_load_size = 1*BytesPerInstWord;
2655       }
2656       int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
2657       int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
2658       if (Assembler::is_simm13(v_off)) {
2659         __ ld_ptr(G3, v_off, G5_method);
2660       } else {
2661         // Generate 2 instructions




 984 
 985 #ifdef ASSERT
 986   {
 987     MacroAssembler _masm(&cbuf);
 988     if (is_verified_oop_base) {
 989       __ verify_oop(reg_to_register_object(src1_enc));
 990     }
 991     if (is_verified_oop_store) {
 992       __ verify_oop(reg_to_register_object(dst_enc));
 993     }
 994     if (tmp_enc != -1) {
 995       __ mov(O7, reg_to_register_object(tmp_enc));
 996     }
 997     if (is_verified_oop_load) {
 998       __ verify_oop(reg_to_register_object(dst_enc));
 999     }
1000   }
1001 #endif
1002 }
1003 
1004 void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, RelocationHolder const& rspec, bool preserve_g2 = false) {
1005   // The method which records debug information at every safepoint
1006   // expects the call to be the first instruction in the snippet as
1007   // it creates a PcDesc structure which tracks the offset of a call
1008   // from the start of the codeBlob. This offset is computed as
1009   // code_end() - code_begin() of the code which has been emitted
1010   // so far.
1011   // In this particular case we have skirted around the problem by
1012   // putting the "mov" instruction in the delay slot but the problem
1013   // may bite us again at some other point and a cleaner/generic
1014   // solution using relocations would be needed.
1015   MacroAssembler _masm(&cbuf);
1016   __ set_inst_mark();
1017 
1018   // We flush the current window just so that there is a valid stack copy
1019   // the fact that the current window becomes active again instantly is
1020   // not a problem there is nothing live in it.
1021 
1022 #ifdef ASSERT
1023   int startpos = __ offset();
1024 #endif /* ASSERT */
1025 
1026   __ call((address)entry_point, rspec);
1027 
1028   if (preserve_g2)   __ delayed()->mov(G2, L7);
1029   else __ delayed()->nop();
1030 
1031   if (preserve_g2)   __ mov(L7, G2);
1032 
1033 #ifdef ASSERT
1034   if (preserve_g2 && (VerifyCompiledCode || VerifyOops)) {
1035 #ifdef _LP64
1036     // Trash argument dump slots.
1037     __ set(0xb0b8ac0db0b8ac0d, G1);
1038     __ mov(G1, G5);
1039     __ stx(G1, SP, STACK_BIAS + 0x80);
1040     __ stx(G1, SP, STACK_BIAS + 0x88);
1041     __ stx(G1, SP, STACK_BIAS + 0x90);
1042     __ stx(G1, SP, STACK_BIAS + 0x98);
1043     __ stx(G1, SP, STACK_BIAS + 0xA0);
1044     __ stx(G1, SP, STACK_BIAS + 0xA8);
1045 #else // _LP64
1046     // this is also a native call, so smash the first 7 stack locations,


2576 
2577   // Long values come back from native calls in O0:O1 in the 32-bit VM, copy the value
2578   // to G1 so the register allocator will not have to deal with the misaligned register
2579   // pair.
2580   enc_class adjust_long_from_native_call %{
2581 #ifndef _LP64
2582     if (returns_long()) {
2583       //    sllx  O0,32,O0
2584       emit3_simm13( cbuf, Assembler::arith_op, R_O0_enc, Assembler::sllx_op3, R_O0_enc, 0x1020 );
2585       //    srl   O1,0,O1
2586       emit3_simm13( cbuf, Assembler::arith_op, R_O1_enc, Assembler::srl_op3, R_O1_enc, 0x0000 );
2587       //    or    O0,O1,G1
2588       emit3       ( cbuf, Assembler::arith_op, R_G1_enc, Assembler:: or_op3, R_O0_enc, 0, R_O1_enc );
2589     }
2590 #endif
2591   %}
2592 
2593   enc_class Java_To_Runtime (method meth) %{    // CALL Java_To_Runtime
2594     // CALL directly to the runtime
2595     // The user of this is responsible for ensuring that R_L7 is empty (killed).
2596     emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec(), /*preserve_g2=*/true);

2597   %}
2598 
2599   enc_class preserve_SP %{
2600     MacroAssembler _masm(&cbuf);
2601     __ mov(SP, L7_mh_SP_save);
2602   %}
2603 
2604   enc_class restore_SP %{
2605     MacroAssembler _masm(&cbuf);
2606     __ mov(L7_mh_SP_save, SP);
2607   %}
2608 
2609   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
2610     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
2611     // who we intended to call.
2612     if (!_method) {
2613       emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec());


2614     } else {
2615       int method_index = resolved_method_index(cbuf);
2616       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
2617                                                   : static_call_Relocation::spec(method_index);
2618       emit_call_reloc(cbuf, $meth$$method, rspec);
2619 
2620       // Emit stub for static call.
2621       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
2622       // Stub does not fit into scratch buffer if TraceJumps is enabled
2623       if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) {
2624         ciEnv::current()->record_failure("CodeCache is full");
2625         return;
2626       }
2627     }
2628   %}
2629 
2630   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
2631     MacroAssembler _masm(&cbuf);
2632     __ set_inst_mark();
2633     int vtable_index = this->_vtable_index;
2634     // MachCallDynamicJavaNode::ret_addr_offset uses this same test
2635     if (vtable_index < 0) {
2636       // must be invalid_vtable_index, not nonvirtual_vtable_index
2637       assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2638       Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2639       assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2640       assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2641       __ ic_call((address)$meth$$method, /*emit_delay=*/true, resolved_method_index(cbuf));
2642     } else {
2643       assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2644       // Just go thru the vtable
2645       // get receiver klass (receiver already checked for non-null)
2646       // If we end up going thru a c2i adapter interpreter expects method in G5
2647       int off = __ offset();
2648       __ load_klass(O0, G3_scratch);
2649       int klass_load_size;
2650       if (UseCompressedClassPointers) {
2651         assert(Universe::heap() != NULL, "java heap should be initialized");
2652         klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
2653       } else {
2654         klass_load_size = 1*BytesPerInstWord;
2655       }
2656       int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
2657       int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
2658       if (Assembler::is_simm13(v_off)) {
2659         __ ld_ptr(G3, v_off, G5_method);
2660       } else {
2661         // Generate 2 instructions


src/cpu/sparc/vm/sparc.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File