205 OopMap* RegisterSaver::push_frame_reg_args_and_save_live_registers(MacroAssembler* masm,
206 int* out_frame_size_in_bytes,
207 bool generate_oop_map,
208 int return_pc_adjustment,
209 ReturnPCLocation return_pc_location) {
210 // Push an abi_reg_args-frame and store all registers which may be live.
211 // If requested, create an OopMap: Record volatile registers as
212 // callee-save values in an OopMap so their save locations will be
213 // propagated to the RegisterMap of the caller frame during
214 // StackFrameStream construction (needed for deoptimization; see
215 // compiledVFrame::create_stack_value).
216 // If return_pc_adjustment != 0 adjust the return pc by return_pc_adjustment.
217
218 int i;
219 int offset;
220
221 // calcualte frame size
222 const int regstosave_num = sizeof(RegisterSaver_LiveRegs) /
223 sizeof(RegisterSaver::LiveRegType);
224 const int register_save_size = regstosave_num * reg_size;
225 const int frame_size_in_bytes = align_up(register_save_size, frame::alignment_in_bytes)
226 + frame::abi_reg_args_size;
227 *out_frame_size_in_bytes = frame_size_in_bytes;
228 const int frame_size_in_slots = frame_size_in_bytes / sizeof(jint);
229 const int register_save_offset = frame_size_in_bytes - register_save_size;
230
231 // OopMap frame size is in c2 stack slots (sizeof(jint)) not bytes or words.
232 OopMap* map = generate_oop_map ? new OopMap(frame_size_in_slots, 0) : NULL;
233
234 BLOCK_COMMENT("push_frame_reg_args_and_save_live_registers {");
235
236 // Save r31 in the last slot of the not yet pushed frame so that we
237 // can use it as scratch reg.
238 __ std(R31, -reg_size, R1_SP);
239 assert(-reg_size == register_save_offset - frame_size_in_bytes + ((regstosave_num-1)*reg_size),
240 "consistency check");
241
242 // save the flags
243 // Do the save_LR_CR by hand and adjust the return pc if requested.
244 __ mfcr(R31);
245 __ std(R31, _abi(cr), R1_SP);
857 const VMRegPair *regs,
858 Label& call_interpreter,
859 const Register& ientry) {
860
861 address c2i_entrypoint;
862
863 const Register sender_SP = R21_sender_SP; // == R21_tmp1
864 const Register code = R22_tmp2;
865 //const Register ientry = R23_tmp3;
866 const Register value_regs[] = { R24_tmp4, R25_tmp5, R26_tmp6 };
867 const int num_value_regs = sizeof(value_regs) / sizeof(Register);
868 int value_regs_index = 0;
869
870 const Register return_pc = R27_tmp7;
871 const Register tmp = R28_tmp8;
872
873 assert_different_registers(sender_SP, code, ientry, return_pc, tmp);
874
875 // Adapter needs TOP_IJAVA_FRAME_ABI.
876 const int adapter_size = frame::top_ijava_frame_abi_size +
877 align_up(total_args_passed * wordSize, frame::alignment_in_bytes);
878
879 // regular (verified) c2i entry point
880 c2i_entrypoint = __ pc();
881
882 // Does compiled code exists? If yes, patch the caller's callsite.
883 __ ld(code, method_(code));
884 __ cmpdi(CCR0, code, 0);
885 __ ld(ientry, method_(interpreter_entry)); // preloaded
886 __ beq(CCR0, call_interpreter);
887
888
889 // Patch caller's callsite, method_(code) was not NULL which means that
890 // compiled code exists.
891 __ mflr(return_pc);
892 __ std(return_pc, _abi(lr), R1_SP);
893 RegisterSaver::push_frame_and_save_argument_registers(masm, tmp, adapter_size, total_args_passed, regs);
894
895 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::fixup_callers_callsite), R19_method, return_pc);
896
897 RegisterSaver::restore_argument_registers_and_pop_frame(masm, adapter_size, total_args_passed, regs);
2187 __ addi(r_box, R1_SP, lock_offset);
2188
2189 # ifdef ASSERT
2190 if (UseBiasedLocking) {
2191 // Making the box point to itself will make it clear it went unused
2192 // but also be obviously invalid.
2193 __ std(r_box, 0, r_box);
2194 }
2195 # endif // ASSERT
2196
2197 // Try fastpath for locking.
2198 // fast_lock kills r_temp_1, r_temp_2, r_temp_3.
2199 __ compiler_fast_lock_object(r_flag, r_oop, r_box, r_temp_1, r_temp_2, r_temp_3);
2200 __ beq(r_flag, locked);
2201
2202 // None of the above fast optimizations worked so we have to get into the
2203 // slow case of monitor enter. Inline a special case of call_VM that
2204 // disallows any pending_exception.
2205
2206 // Save argument registers and leave room for C-compatible ABI_REG_ARGS.
2207 int frame_size = frame::abi_reg_args_size +
2208 align_up(total_c_args * wordSize, frame::alignment_in_bytes);
2209 __ mr(R11_scratch1, R1_SP);
2210 RegisterSaver::push_frame_and_save_argument_registers(masm, R12_scratch2, frame_size, total_c_args, out_regs, out_regs2);
2211
2212 // Do the call.
2213 __ set_last_Java_frame(R11_scratch1, r_return_pc);
2214 assert(r_return_pc->is_nonvolatile(), "expecting return pc to be in non-volatile register");
2215 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::complete_monitor_locking_C), r_oop, r_box, R16_thread);
2216 __ reset_last_Java_frame();
2217
2218 RegisterSaver::restore_argument_registers_and_pop_frame(masm, frame_size, total_c_args, out_regs, out_regs2);
2219
2220 __ asm_assert_mem8_is_zero(thread_(pending_exception),
2221 "no pending exception allowed on exit from SharedRuntime::complete_monitor_locking_C", 0);
2222
2223 __ bind(locked);
2224 }
2225
2226
2227 // Publish thread state
2228 // --------------------------------------------------------------------------
2554 frame_done_pc-start_pc,
2555 stack_slots / VMRegImpl::slots_per_word,
2556 (method_is_static ? in_ByteSize(klass_offset) : in_ByteSize(receiver_offset)),
2557 in_ByteSize(lock_offset),
2558 oop_maps);
2559
2560 if (is_critical_native) {
2561 nm->set_lazy_critical_native(true);
2562 }
2563
2564 return nm;
2565 #else
2566 ShouldNotReachHere();
2567 return NULL;
2568 #endif // COMPILER2
2569 }
2570
2571 // This function returns the adjust size (in number of words) to a c2i adapter
2572 // activation for use during deoptimization.
2573 int Deoptimization::last_frame_adjust(int callee_parameters, int callee_locals) {
2574 return align_up((callee_locals - callee_parameters) * Interpreter::stackElementWords, frame::alignment_in_bytes);
2575 }
2576
2577 uint SharedRuntime::out_preserve_stack_slots() {
2578 #if defined(COMPILER1) || defined(COMPILER2)
2579 return frame::jit_out_preserve_size / VMRegImpl::stack_slot_size;
2580 #else
2581 return 0;
2582 #endif
2583 }
2584
2585 #if defined(COMPILER1) || defined(COMPILER2)
2586 // Frame generation for deopt and uncommon trap blobs.
2587 static void push_skeleton_frame(MacroAssembler* masm, bool deopt,
2588 /* Read */
2589 Register unroll_block_reg,
2590 /* Update */
2591 Register frame_sizes_reg,
2592 Register number_of_frames_reg,
2593 Register pcs_reg,
2594 /* Invalidate */
|
205 OopMap* RegisterSaver::push_frame_reg_args_and_save_live_registers(MacroAssembler* masm,
206 int* out_frame_size_in_bytes,
207 bool generate_oop_map,
208 int return_pc_adjustment,
209 ReturnPCLocation return_pc_location) {
210 // Push an abi_reg_args-frame and store all registers which may be live.
211 // If requested, create an OopMap: Record volatile registers as
212 // callee-save values in an OopMap so their save locations will be
213 // propagated to the RegisterMap of the caller frame during
214 // StackFrameStream construction (needed for deoptimization; see
215 // compiledVFrame::create_stack_value).
216 // If return_pc_adjustment != 0 adjust the return pc by return_pc_adjustment.
217
218 int i;
219 int offset;
220
221 // calcualte frame size
222 const int regstosave_num = sizeof(RegisterSaver_LiveRegs) /
223 sizeof(RegisterSaver::LiveRegType);
224 const int register_save_size = regstosave_num * reg_size;
225 const int frame_size_in_bytes = align_up(register_save_size, (int)frame::alignment_in_bytes)
226 + frame::abi_reg_args_size;
227 *out_frame_size_in_bytes = frame_size_in_bytes;
228 const int frame_size_in_slots = frame_size_in_bytes / sizeof(jint);
229 const int register_save_offset = frame_size_in_bytes - register_save_size;
230
231 // OopMap frame size is in c2 stack slots (sizeof(jint)) not bytes or words.
232 OopMap* map = generate_oop_map ? new OopMap(frame_size_in_slots, 0) : NULL;
233
234 BLOCK_COMMENT("push_frame_reg_args_and_save_live_registers {");
235
236 // Save r31 in the last slot of the not yet pushed frame so that we
237 // can use it as scratch reg.
238 __ std(R31, -reg_size, R1_SP);
239 assert(-reg_size == register_save_offset - frame_size_in_bytes + ((regstosave_num-1)*reg_size),
240 "consistency check");
241
242 // save the flags
243 // Do the save_LR_CR by hand and adjust the return pc if requested.
244 __ mfcr(R31);
245 __ std(R31, _abi(cr), R1_SP);
857 const VMRegPair *regs,
858 Label& call_interpreter,
859 const Register& ientry) {
860
861 address c2i_entrypoint;
862
863 const Register sender_SP = R21_sender_SP; // == R21_tmp1
864 const Register code = R22_tmp2;
865 //const Register ientry = R23_tmp3;
866 const Register value_regs[] = { R24_tmp4, R25_tmp5, R26_tmp6 };
867 const int num_value_regs = sizeof(value_regs) / sizeof(Register);
868 int value_regs_index = 0;
869
870 const Register return_pc = R27_tmp7;
871 const Register tmp = R28_tmp8;
872
873 assert_different_registers(sender_SP, code, ientry, return_pc, tmp);
874
875 // Adapter needs TOP_IJAVA_FRAME_ABI.
876 const int adapter_size = frame::top_ijava_frame_abi_size +
877 align_up(total_args_passed * wordSize, (int)frame::alignment_in_bytes);
878
879 // regular (verified) c2i entry point
880 c2i_entrypoint = __ pc();
881
882 // Does compiled code exists? If yes, patch the caller's callsite.
883 __ ld(code, method_(code));
884 __ cmpdi(CCR0, code, 0);
885 __ ld(ientry, method_(interpreter_entry)); // preloaded
886 __ beq(CCR0, call_interpreter);
887
888
889 // Patch caller's callsite, method_(code) was not NULL which means that
890 // compiled code exists.
891 __ mflr(return_pc);
892 __ std(return_pc, _abi(lr), R1_SP);
893 RegisterSaver::push_frame_and_save_argument_registers(masm, tmp, adapter_size, total_args_passed, regs);
894
895 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::fixup_callers_callsite), R19_method, return_pc);
896
897 RegisterSaver::restore_argument_registers_and_pop_frame(masm, adapter_size, total_args_passed, regs);
2187 __ addi(r_box, R1_SP, lock_offset);
2188
2189 # ifdef ASSERT
2190 if (UseBiasedLocking) {
2191 // Making the box point to itself will make it clear it went unused
2192 // but also be obviously invalid.
2193 __ std(r_box, 0, r_box);
2194 }
2195 # endif // ASSERT
2196
2197 // Try fastpath for locking.
2198 // fast_lock kills r_temp_1, r_temp_2, r_temp_3.
2199 __ compiler_fast_lock_object(r_flag, r_oop, r_box, r_temp_1, r_temp_2, r_temp_3);
2200 __ beq(r_flag, locked);
2201
2202 // None of the above fast optimizations worked so we have to get into the
2203 // slow case of monitor enter. Inline a special case of call_VM that
2204 // disallows any pending_exception.
2205
2206 // Save argument registers and leave room for C-compatible ABI_REG_ARGS.
2207 int frame_size = frame::abi_reg_args_size + align_up(total_c_args * wordSize, (int)frame::alignment_in_bytes);
2208 __ mr(R11_scratch1, R1_SP);
2209 RegisterSaver::push_frame_and_save_argument_registers(masm, R12_scratch2, frame_size, total_c_args, out_regs, out_regs2);
2210
2211 // Do the call.
2212 __ set_last_Java_frame(R11_scratch1, r_return_pc);
2213 assert(r_return_pc->is_nonvolatile(), "expecting return pc to be in non-volatile register");
2214 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::complete_monitor_locking_C), r_oop, r_box, R16_thread);
2215 __ reset_last_Java_frame();
2216
2217 RegisterSaver::restore_argument_registers_and_pop_frame(masm, frame_size, total_c_args, out_regs, out_regs2);
2218
2219 __ asm_assert_mem8_is_zero(thread_(pending_exception),
2220 "no pending exception allowed on exit from SharedRuntime::complete_monitor_locking_C", 0);
2221
2222 __ bind(locked);
2223 }
2224
2225
2226 // Publish thread state
2227 // --------------------------------------------------------------------------
2553 frame_done_pc-start_pc,
2554 stack_slots / VMRegImpl::slots_per_word,
2555 (method_is_static ? in_ByteSize(klass_offset) : in_ByteSize(receiver_offset)),
2556 in_ByteSize(lock_offset),
2557 oop_maps);
2558
2559 if (is_critical_native) {
2560 nm->set_lazy_critical_native(true);
2561 }
2562
2563 return nm;
2564 #else
2565 ShouldNotReachHere();
2566 return NULL;
2567 #endif // COMPILER2
2568 }
2569
2570 // This function returns the adjust size (in number of words) to a c2i adapter
2571 // activation for use during deoptimization.
2572 int Deoptimization::last_frame_adjust(int callee_parameters, int callee_locals) {
2573 return align_up((callee_locals - callee_parameters) * Interpreter::stackElementWords, (int)frame::alignment_in_bytes);
2574 }
2575
2576 uint SharedRuntime::out_preserve_stack_slots() {
2577 #if defined(COMPILER1) || defined(COMPILER2)
2578 return frame::jit_out_preserve_size / VMRegImpl::stack_slot_size;
2579 #else
2580 return 0;
2581 #endif
2582 }
2583
2584 #if defined(COMPILER1) || defined(COMPILER2)
2585 // Frame generation for deopt and uncommon trap blobs.
2586 static void push_skeleton_frame(MacroAssembler* masm, bool deopt,
2587 /* Read */
2588 Register unroll_block_reg,
2589 /* Update */
2590 Register frame_sizes_reg,
2591 Register number_of_frames_reg,
2592 Register pcs_reg,
2593 /* Invalidate */
|