src/cpu/sparc/vm/sparc.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/sparc.ad

Print this page




 984 
 985 #ifdef ASSERT
 986   {
 987     MacroAssembler _masm(&cbuf);
 988     if (is_verified_oop_base) {
 989       __ verify_oop(reg_to_register_object(src1_enc));
 990     }
 991     if (is_verified_oop_store) {
 992       __ verify_oop(reg_to_register_object(dst_enc));
 993     }
 994     if (tmp_enc != -1) {
 995       __ mov(O7, reg_to_register_object(tmp_enc));
 996     }
 997     if (is_verified_oop_load) {
 998       __ verify_oop(reg_to_register_object(dst_enc));
 999     }
1000   }
1001 #endif
1002 }
1003 
1004 void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, relocInfo::relocType rtype, bool preserve_g2 = false) {
1005   // The method which records debug information at every safepoint
1006   // expects the call to be the first instruction in the snippet as
1007   // it creates a PcDesc structure which tracks the offset of a call
1008   // from the start of the codeBlob. This offset is computed as
1009   // code_end() - code_begin() of the code which has been emitted
1010   // so far.
1011   // In this particular case we have skirted around the problem by
1012   // putting the "mov" instruction in the delay slot but the problem
1013   // may bite us again at some other point and a cleaner/generic
1014   // solution using relocations would be needed.
1015   MacroAssembler _masm(&cbuf);
1016   __ set_inst_mark();
1017 
1018   // We flush the current window just so that there is a valid stack copy
1019   // the fact that the current window becomes active again instantly is
1020   // not a problem there is nothing live in it.
1021 
1022 #ifdef ASSERT
1023   int startpos = __ offset();
1024 #endif /* ASSERT */
1025 
1026   __ call((address)entry_point, rtype);
1027 
1028   if (preserve_g2)   __ delayed()->mov(G2, L7);
1029   else __ delayed()->nop();
1030 
1031   if (preserve_g2)   __ mov(L7, G2);
1032 
1033 #ifdef ASSERT
1034   if (preserve_g2 && (VerifyCompiledCode || VerifyOops)) {
1035 #ifdef _LP64
1036     // Trash argument dump slots.
1037     __ set(0xb0b8ac0db0b8ac0d, G1);
1038     __ mov(G1, G5);
1039     __ stx(G1, SP, STACK_BIAS + 0x80);
1040     __ stx(G1, SP, STACK_BIAS + 0x88);
1041     __ stx(G1, SP, STACK_BIAS + 0x90);
1042     __ stx(G1, SP, STACK_BIAS + 0x98);
1043     __ stx(G1, SP, STACK_BIAS + 0xA0);
1044     __ stx(G1, SP, STACK_BIAS + 0xA8);
1045 #else // _LP64
1046     // this is also a native call, so smash the first 7 stack locations,


2565 
2566   // Long values come back from native calls in O0:O1 in the 32-bit VM, copy the value
2567   // to G1 so the register allocator will not have to deal with the misaligned register
2568   // pair.
2569   enc_class adjust_long_from_native_call %{
2570 #ifndef _LP64
2571     if (returns_long()) {
2572       //    sllx  O0,32,O0
2573       emit3_simm13( cbuf, Assembler::arith_op, R_O0_enc, Assembler::sllx_op3, R_O0_enc, 0x1020 );
2574       //    srl   O1,0,O1
2575       emit3_simm13( cbuf, Assembler::arith_op, R_O1_enc, Assembler::srl_op3, R_O1_enc, 0x0000 );
2576       //    or    O0,O1,G1
2577       emit3       ( cbuf, Assembler::arith_op, R_G1_enc, Assembler:: or_op3, R_O0_enc, 0, R_O1_enc );
2578     }
2579 #endif
2580   %}
2581 
2582   enc_class Java_To_Runtime (method meth) %{    // CALL Java_To_Runtime
2583     // CALL directly to the runtime
2584     // The user of this is responsible for ensuring that R_L7 is empty (killed).
2585     emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type,
2586                     /*preserve_g2=*/true);
2587   %}
2588 
2589   enc_class preserve_SP %{
2590     MacroAssembler _masm(&cbuf);
2591     __ mov(SP, L7_mh_SP_save);
2592   %}
2593 
2594   enc_class restore_SP %{
2595     MacroAssembler _masm(&cbuf);
2596     __ mov(L7_mh_SP_save, SP);
2597   %}
2598 
2599   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
2600     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
2601     // who we intended to call.
2602     if (!_method) {
2603       emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
2604     } else if (_optimized_virtual) {
2605       emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
2606     } else {
2607       emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
2608     }
2609     if (_method) {  // Emit stub for static call.



2610       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
2611       // Stub does not fit into scratch buffer if TraceJumps is enabled
2612       if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) {
2613         ciEnv::current()->record_failure("CodeCache is full");
2614         return;
2615       } 
2616     }
2617   %}
2618 
2619   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
2620     MacroAssembler _masm(&cbuf);
2621     __ set_inst_mark();
2622     int vtable_index = this->_vtable_index;
2623     // MachCallDynamicJavaNode::ret_addr_offset uses this same test
2624     if (vtable_index < 0) {
2625       // must be invalid_vtable_index, not nonvirtual_vtable_index
2626       assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2627       Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2628       assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2629       assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2630       __ ic_call((address)$meth$$method);
2631     } else {
2632       assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2633       // Just go thru the vtable
2634       // get receiver klass (receiver already checked for non-null)
2635       // If we end up going thru a c2i adapter interpreter expects method in G5
2636       int off = __ offset();
2637       __ load_klass(O0, G3_scratch);
2638       int klass_load_size;
2639       if (UseCompressedClassPointers) {
2640         assert(Universe::heap() != NULL, "java heap should be initialized");
2641         klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
2642       } else {
2643         klass_load_size = 1*BytesPerInstWord;
2644       }
2645       int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
2646       int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
2647       if (Assembler::is_simm13(v_off)) {
2648         __ ld_ptr(G3, v_off, G5_method);
2649       } else {
2650         // Generate 2 instructions




 984 
 985 #ifdef ASSERT
 986   {
 987     MacroAssembler _masm(&cbuf);
 988     if (is_verified_oop_base) {
 989       __ verify_oop(reg_to_register_object(src1_enc));
 990     }
 991     if (is_verified_oop_store) {
 992       __ verify_oop(reg_to_register_object(dst_enc));
 993     }
 994     if (tmp_enc != -1) {
 995       __ mov(O7, reg_to_register_object(tmp_enc));
 996     }
 997     if (is_verified_oop_load) {
 998       __ verify_oop(reg_to_register_object(dst_enc));
 999     }
1000   }
1001 #endif
1002 }
1003 
1004 void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, RelocationHolder const& rspec, bool preserve_g2 = false) {
1005   // The method which records debug information at every safepoint
1006   // expects the call to be the first instruction in the snippet as
1007   // it creates a PcDesc structure which tracks the offset of a call
1008   // from the start of the codeBlob. This offset is computed as
1009   // code_end() - code_begin() of the code which has been emitted
1010   // so far.
1011   // In this particular case we have skirted around the problem by
1012   // putting the "mov" instruction in the delay slot but the problem
1013   // may bite us again at some other point and a cleaner/generic
1014   // solution using relocations would be needed.
1015   MacroAssembler _masm(&cbuf);
1016   __ set_inst_mark();
1017 
1018   // We flush the current window just so that there is a valid stack copy
1019   // the fact that the current window becomes active again instantly is
1020   // not a problem there is nothing live in it.
1021 
1022 #ifdef ASSERT
1023   int startpos = __ offset();
1024 #endif /* ASSERT */
1025 
1026   __ call((address)entry_point, rspec);
1027 
1028   if (preserve_g2)   __ delayed()->mov(G2, L7);
1029   else __ delayed()->nop();
1030 
1031   if (preserve_g2)   __ mov(L7, G2);
1032 
1033 #ifdef ASSERT
1034   if (preserve_g2 && (VerifyCompiledCode || VerifyOops)) {
1035 #ifdef _LP64
1036     // Trash argument dump slots.
1037     __ set(0xb0b8ac0db0b8ac0d, G1);
1038     __ mov(G1, G5);
1039     __ stx(G1, SP, STACK_BIAS + 0x80);
1040     __ stx(G1, SP, STACK_BIAS + 0x88);
1041     __ stx(G1, SP, STACK_BIAS + 0x90);
1042     __ stx(G1, SP, STACK_BIAS + 0x98);
1043     __ stx(G1, SP, STACK_BIAS + 0xA0);
1044     __ stx(G1, SP, STACK_BIAS + 0xA8);
1045 #else // _LP64
1046     // this is also a native call, so smash the first 7 stack locations,


2565 
2566   // Long values come back from native calls in O0:O1 in the 32-bit VM, copy the value
2567   // to G1 so the register allocator will not have to deal with the misaligned register
2568   // pair.
2569   enc_class adjust_long_from_native_call %{
2570 #ifndef _LP64
2571     if (returns_long()) {
2572       //    sllx  O0,32,O0
2573       emit3_simm13( cbuf, Assembler::arith_op, R_O0_enc, Assembler::sllx_op3, R_O0_enc, 0x1020 );
2574       //    srl   O1,0,O1
2575       emit3_simm13( cbuf, Assembler::arith_op, R_O1_enc, Assembler::srl_op3, R_O1_enc, 0x0000 );
2576       //    or    O0,O1,G1
2577       emit3       ( cbuf, Assembler::arith_op, R_G1_enc, Assembler:: or_op3, R_O0_enc, 0, R_O1_enc );
2578     }
2579 #endif
2580   %}
2581 
2582   enc_class Java_To_Runtime (method meth) %{    // CALL Java_To_Runtime
2583     // CALL directly to the runtime
2584     // The user of this is responsible for ensuring that R_L7 is empty (killed).
2585     emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec(), /*preserve_g2=*/true);

2586   %}
2587 
2588   enc_class preserve_SP %{
2589     MacroAssembler _masm(&cbuf);
2590     __ mov(SP, L7_mh_SP_save);
2591   %}
2592 
2593   enc_class restore_SP %{
2594     MacroAssembler _masm(&cbuf);
2595     __ mov(L7_mh_SP_save, SP);
2596   %}
2597 
2598   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
2599     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
2600     // who we intended to call.
2601     if (!_method) {
2602       emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec());


2603     } else {
2604       int method_index = resolved_method_index(cbuf);
2605       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
2606                                                   : static_call_Relocation::spec(method_index);
2607       emit_call_reloc(cbuf, $meth$$method, rspec);
2608 
2609       // Emit stub for static call.
2610       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
2611       // Stub does not fit into scratch buffer if TraceJumps is enabled
2612       if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) {
2613         ciEnv::current()->record_failure("CodeCache is full");
2614         return;
2615       }
2616     }
2617   %}
2618 
2619   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
2620     MacroAssembler _masm(&cbuf);
2621     __ set_inst_mark();
2622     int vtable_index = this->_vtable_index;
2623     // MachCallDynamicJavaNode::ret_addr_offset uses this same test
2624     if (vtable_index < 0) {
2625       // must be invalid_vtable_index, not nonvirtual_vtable_index
2626       assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2627       Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2628       assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2629       assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2630       __ ic_call((address)$meth$$method, /*emit_delay=*/true, resolved_method_index(cbuf));
2631     } else {
2632       assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2633       // Just go thru the vtable
2634       // get receiver klass (receiver already checked for non-null)
2635       // If we end up going thru a c2i adapter interpreter expects method in G5
2636       int off = __ offset();
2637       __ load_klass(O0, G3_scratch);
2638       int klass_load_size;
2639       if (UseCompressedClassPointers) {
2640         assert(Universe::heap() != NULL, "java heap should be initialized");
2641         klass_load_size = MacroAssembler::instr_size_for_decode_klass_not_null() + 1*BytesPerInstWord;
2642       } else {
2643         klass_load_size = 1*BytesPerInstWord;
2644       }
2645       int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
2646       int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
2647       if (Assembler::is_simm13(v_off)) {
2648         __ ld_ptr(G3, v_off, G5_method);
2649       } else {
2650         // Generate 2 instructions


src/cpu/sparc/vm/sparc.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File