< prev index next >

src/cpu/ppc/vm/templateInterpreterGenerator_ppc.cpp

Print this page
rev 11567 : 8160245: C1: Clean up platform #defines in c1_LIR.hpp.
Summary: Also add fnoreg on x86, LIR_Address constructor without scale, and clean up templateInterpreterGenerator.hpp.

*** 850,862 **** // without going through the signal handler, i.e., reserved and yellow zones // will not be made usable. The shadow zone must suffice to handle the // overflow. // // Kills Rmem_frame_size, Rscratch1. ! void TemplateInterpreterGenerator::generate_stack_overflow_check(Register Rmem_frame_size, Register Rscratch1) { Label done; assert_different_registers(Rmem_frame_size, Rscratch1); BLOCK_COMMENT("stack_overflow_check_with_compare {"); __ sub(Rmem_frame_size, R1_SP, Rmem_frame_size); __ ld(Rscratch1, thread_(stack_overflow_limit)); __ cmpld(CCR0/*is_stack_overflow*/, Rmem_frame_size, Rscratch1); --- 850,863 ---- // without going through the signal handler, i.e., reserved and yellow zones // will not be made usable. The shadow zone must suffice to handle the // overflow. // // Kills Rmem_frame_size, Rscratch1. ! void TemplateInterpreterGenerator::generate_stack_overflow_check(Register Rmem_frame_size, Register Rscratch1, Register unused) { Label done; assert_different_registers(Rmem_frame_size, Rscratch1); + assert(unused == noreg, "not needed on ppc"); BLOCK_COMMENT("stack_overflow_check_with_compare {"); __ sub(Rmem_frame_size, R1_SP, Rmem_frame_size); __ ld(Rscratch1, thread_(stack_overflow_limit)); __ cmpld(CCR0/*is_stack_overflow*/, Rmem_frame_size, Rscratch1);
*** 879,892 **** __ align(32, 12); __ bind(done); BLOCK_COMMENT("} stack_overflow_check_with_compare"); } - void TemplateInterpreterGenerator::unlock_method(bool check_exceptions) { - __ unlock_object(R26_monitor, check_exceptions); - } - // Lock the current method, interpreter register window must be set up! void TemplateInterpreterGenerator::lock_method(Register Rflags, Register Rscratch1, Register Rscratch2, bool flags_preloaded) { const Register Robj_to_lock = Rscratch2; { --- 880,889 ----
*** 1564,1574 **** // Handle exceptions if (synchronized) { // Don't check for exceptions since we're still in the i2n frame. Do that // manually afterwards. ! unlock_method(false); } // Reset active handles after returning from native. // thread->active_handles()->clear(); __ ld(active_handles, thread_(active_handles)); --- 1561,1572 ---- // Handle exceptions if (synchronized) { // Don't check for exceptions since we're still in the i2n frame. Do that // manually afterwards. ! __ unlock_object(R26_monitor, false); // Can also unlock methods. ! } // Reset active handles after returning from native. // thread->active_handles()->clear(); __ ld(active_handles, thread_(active_handles));
*** 1607,1617 **** BIND(exception_return_sync_check); if (synchronized) { // Don't check for exceptions since we're still in the i2n frame. Do that // manually afterwards. ! unlock_method(false); } BIND(exception_return_sync_check_already_unlocked); const Register return_pc = R31; --- 1605,1615 ---- BIND(exception_return_sync_check); if (synchronized) { // Don't check for exceptions since we're still in the i2n frame. Do that // manually afterwards. ! __ unlock_object(R26_monitor, false); // Can also unlock methods. } BIND(exception_return_sync_check_already_unlocked); const Register return_pc = R31;
< prev index next >