src/cpu/sparc/vm/sparc.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/cpu/sparc/vm/sparc.ad	Mon Nov 23 21:11:14 2015
--- new/src/cpu/sparc/vm/sparc.ad	Mon Nov 23 21:11:13 2015

*** 999,1009 **** --- 999,1009 ---- } } #endif } ! void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, relocInfo::relocType rtype, bool preserve_g2 = false) { ! void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, RelocationHolder const& rspec, bool preserve_g2 = false) { // The method which records debug information at every safepoint // expects the call to be the first instruction in the snippet as // it creates a PcDesc structure which tracks the offset of a call // from the start of the codeBlob. This offset is computed as // code_end() - code_begin() of the code which has been emitted
*** 1021,1031 **** --- 1021,1031 ---- #ifdef ASSERT int startpos = __ offset(); #endif /* ASSERT */ ! __ call((address)entry_point, rtype); ! __ call((address)entry_point, rspec); if (preserve_g2) __ delayed()->mov(G2, L7); else __ delayed()->nop(); if (preserve_g2) __ mov(L7, G2);
*** 2591,2602 **** --- 2591,2601 ---- %} enc_class Java_To_Runtime (method meth) %{ // CALL Java_To_Runtime // CALL directly to the runtime // The user of this is responsible for ensuring that R_L7 is empty (killed). ! emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type, /*preserve_g2=*/true); ! emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec(), /*preserve_g2=*/true); %} enc_class preserve_SP %{ MacroAssembler _masm(&cbuf); __ mov(SP, L7_mh_SP_save);
*** 2609,2625 **** --- 2608,2625 ---- enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine // who we intended to call. if (!_method) { ! emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type); } else if (_optimized_virtual) { emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type); ! emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec()); } else { ! emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type); } if (_method) { // Emit stub for static call. ! int method_index = resolved_method_index(cbuf); + RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index) + : static_call_Relocation::spec(method_index); + emit_call_reloc(cbuf, $meth$$method, rspec); + + // Emit stub for static call. address stub = CompiledStaticCall::emit_to_interp_stub(cbuf); // Stub does not fit into scratch buffer if TraceJumps is enabled if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) { ciEnv::current()->record_failure("CodeCache is full"); return;
*** 2636,2646 **** --- 2636,2646 ---- // must be invalid_vtable_index, not nonvirtual_vtable_index assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value"); Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode()); assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()"); assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub"); ! __ ic_call((address)$meth$$method, /*emit_delay=*/true, resolved_method_index(cbuf)); } else { assert(!UseInlineCaches, "expect vtable calls only if not using ICs"); // Just go thru the vtable // get receiver klass (receiver already checked for non-null) // If we end up going thru a c2i adapter interpreter expects method in G5

src/cpu/sparc/vm/sparc.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File