< prev index next >

src/hotspot/cpu/ppc/templateTable_ppc_64.cpp

Print this page
rev 48251 : 8193257: PPC64, s390 implementation for Thread-local handshakes
Reviewed-by:

*** 1628,1638 **** // Bump bcp to target of JSR. __ add(R14_bcp, Rdisp, R14_bcp); // Push returnAddress for "ret" on stack. __ push_ptr(R17_tos); // And away we go! ! __ dispatch_next(vtos); return; } // -------------------------------------------------------------------------- // Normal (non-jsr) branch handling --- 1628,1638 ---- // Bump bcp to target of JSR. __ add(R14_bcp, Rdisp, R14_bcp); // Push returnAddress for "ret" on stack. __ push_ptr(R17_tos); // And away we go! ! __ dispatch_next(vtos, 0 ,true); return; } // -------------------------------------------------------------------------- // Normal (non-jsr) branch handling
*** 1641,1651 **** __ add(R14_bcp, Rdisp, R14_bcp); // Add to bc addr. const bool increment_invocation_counter_for_backward_branches = UseCompiler && UseLoopCounter; if (increment_invocation_counter_for_backward_branches) { Label Lforward; - __ dispatch_prolog(vtos); // Check branch direction. __ cmpdi(CCR0, Rdisp, 0); __ bgt(CCR0, Lforward); --- 1641,1650 ----
*** 1742,1756 **** } } } __ bind(Lforward); - __ dispatch_epilog(vtos); - - } else { - __ dispatch_next(vtos); } } // Helper function for if_cmp* methods below. // Factored out common compare and branch code. void TemplateTable::if_cmp_common(Register Rfirst, Register Rsecond, Register Rscratch1, Register Rscratch2, Condition cc, bool is_jint, bool cmp0) { --- 1741,1752 ---- } } } __ bind(Lforward); } + __ dispatch_next(vtos, 0, true); } // Helper function for if_cmp* methods below. // Factored out common compare and branch code. void TemplateTable::if_cmp_common(Register Rfirst, Register Rsecond, Register Rscratch1, Register Rscratch2, Condition cc, bool is_jint, bool cmp0) {
*** 1827,1837 **** __ profile_ret(vtos, R17_tos, R11_scratch1, R12_scratch2); __ ld(R11_scratch1, in_bytes(Method::const_offset()), R19_method); __ add(R11_scratch1, R17_tos, R11_scratch1); __ addi(R14_bcp, R11_scratch1, in_bytes(ConstMethod::codes_offset())); ! __ dispatch_next(vtos); } void TemplateTable::wide_ret() { transition(vtos, vtos); --- 1823,1833 ---- __ profile_ret(vtos, R17_tos, R11_scratch1, R12_scratch2); __ ld(R11_scratch1, in_bytes(Method::const_offset()), R19_method); __ add(R11_scratch1, R17_tos, R11_scratch1); __ addi(R14_bcp, R11_scratch1, in_bytes(ConstMethod::codes_offset())); ! __ dispatch_next(vtos, 0, true); } void TemplateTable::wide_ret() { transition(vtos, vtos);
*** 1844,1854 **** __ profile_ret(vtos, R17_tos, Rscratch1, R12_scratch2); // Tos now contains the bci, compute the bcp from that. __ ld(Rscratch1, in_bytes(Method::const_offset()), R19_method); __ addi(Rscratch2, R17_tos, in_bytes(ConstMethod::codes_offset())); __ add(R14_bcp, Rscratch1, Rscratch2); ! __ dispatch_next(vtos); } void TemplateTable::tableswitch() { transition(itos, vtos); --- 1840,1850 ---- __ profile_ret(vtos, R17_tos, Rscratch1, R12_scratch2); // Tos now contains the bci, compute the bcp from that. __ ld(Rscratch1, in_bytes(Method::const_offset()), R19_method); __ addi(Rscratch2, R17_tos, in_bytes(ConstMethod::codes_offset())); __ add(R14_bcp, Rscratch1, Rscratch2); ! __ dispatch_next(vtos, 0, true); } void TemplateTable::tableswitch() { transition(itos, vtos);
*** 1894,1904 **** __ get_u4(Roffset, Rdef_offset_addr, 0, InterpreterMacroAssembler::Signed); __ bind(Ldispatch); __ add(R14_bcp, Roffset, R14_bcp); ! __ dispatch_next(vtos); } void TemplateTable::lookupswitch() { transition(itos, itos); __ stop("lookupswitch bytecode should have been rewritten"); --- 1890,1900 ---- __ get_u4(Roffset, Rdef_offset_addr, 0, InterpreterMacroAssembler::Signed); __ bind(Ldispatch); __ add(R14_bcp, Roffset, R14_bcp); ! __ dispatch_next(vtos, 0, true); } void TemplateTable::lookupswitch() { transition(itos, itos); __ stop("lookupswitch bytecode should have been rewritten");
*** 1958,1968 **** __ profile_switch_case(Rcurrent_pair, Rcount /*scratch*/, Rdef_offset_addr/*scratch*/, Rscratch); } __ bind(Lcontinue_execution); __ add(R14_bcp, Roffset, R14_bcp); ! __ dispatch_next(vtos); } // Table switch using binary search (value/offset pairs are ordered). // Bytecode stream format: // Bytecode (1) | 4-byte padding | default offset (4) | count (4) | value/offset pair1 (8) | value/offset pair2 (8) | ... --- 1954,1964 ---- __ profile_switch_case(Rcurrent_pair, Rcount /*scratch*/, Rdef_offset_addr/*scratch*/, Rscratch); } __ bind(Lcontinue_execution); __ add(R14_bcp, Roffset, R14_bcp); ! __ dispatch_next(vtos, 0, true); } // Table switch using binary search (value/offset pairs are ordered). // Bytecode stream format: // Bytecode (1) | 4-byte padding | default offset (4) | count (4) | value/offset pair1 (8) | value/offset pair2 (8) | ...
*** 2091,2101 **** __ bind(continue_execution); __ extsw(Rj, Rj); __ add(R14_bcp, Rj, R14_bcp); ! __ dispatch_next(vtos); } void TemplateTable::_return(TosState state) { transition(state, state); assert(_desc->calls_vm(), --- 2087,2097 ---- __ bind(continue_execution); __ extsw(Rj, Rj); __ add(R14_bcp, Rj, R14_bcp); ! __ dispatch_next(vtos, 0 , true); } void TemplateTable::_return(TosState state) { transition(state, state); assert(_desc->calls_vm(),
*** 2122,2131 **** --- 2118,2138 ---- __ align(32, 12); __ bind(Lskip_register_finalizer); } + if (SafepointMechanism::uses_thread_local_poll() && _desc->bytecode() != Bytecodes::_return_register_finalizer) { + Label no_safepoint; + __ ld(R11_scratch1, in_bytes(Thread::polling_page_offset()), R16_thread); + __ andi_(R11_scratch1, R11_scratch1, SafepointMechanism::poll_bit()); + __ beq(CCR0, no_safepoint); + __ push(state); + __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)); + __ pop(state); + __ bind(no_safepoint); + } + // Move the result value into the correct register and remove memory stack frame. __ remove_activation(state, /* throw_monitor_exception */ true); // Restoration of lr done by remove_activation. switch (state) { // Narrow result if state is itos but result type is smaller.
< prev index next >