src/cpu/aarch64/vm/interp_masm_aarch64.cpp
Index
Unified diffs
Context diffs
Sdiffs
Patch
New
Old
Previous File
Next File
*** old/src/cpu/aarch64/vm/interp_masm_aarch64.cpp Thu Dec 17 17:51:52 2015
--- new/src/cpu/aarch64/vm/interp_masm_aarch64.cpp Thu Dec 17 17:51:52 2015
*** 45,56 ****
--- 45,54 ----
void InterpreterMacroAssembler::jump_to_entry(address entry) {
assert(entry, "Entry must have been generated by now");
b(entry);
}
#ifndef CC_INTERP
void InterpreterMacroAssembler::check_and_handle_popframe(Register java_thread) {
if (JvmtiExport::can_pop_frame()) {
Label L;
// Initiate popframe handling only if it is not already being
// processed. If the flag has the popframe_processing bit set, it
*** 593,604 ****
--- 591,600 ----
// compiled code the saved sender SP was saved in sender_sp, so this
// restores it.
andr(sp, esp, -16);
}
#endif // C_INTERP
// Lock object
//
// Args:
// c_rarg1: BasicObjectLock to be used for locking
//
*** 756,767 ****
--- 752,761 ----
restore_bcp();
}
}
#ifndef CC_INTERP
void InterpreterMacroAssembler::test_method_data_pointer(Register mdp,
Label& zero_continue) {
assert(ProfileInterpreter, "must be profiling interpreter");
ldr(mdp, Address(rfp, frame::interpreter_frame_mdp_offset * wordSize));
cbz(mdp, zero_continue);
*** 1343,1353 ****
--- 1337,1346 ----
MacroAssembler::verify_oop(reg);
}
}
void InterpreterMacroAssembler::verify_FPU(int stack_depth, TosState state) { ; }
#endif // !CC_INTERP
void InterpreterMacroAssembler::notify_method_entry() {
// Whenever JVMTI is interp_only_mode, method entry/exit events are sent to
// track stack depth. If it is possible to enter interp_only_mode we add
*** 1390,1417 ****
--- 1383,1409 ----
// Note: frame::interpreter_frame_result has a dependency on how the
// method result is saved across the call to post_method_exit. If this
// is changed then the interpreter_frame_result implementation will
// need to be updated too.
// For c++ interpreter the result is always stored at a known location in the frame
// template interpreter will leave it on the top of the stack.
NOT_CC_INTERP(push(state);)
+ // template interpreter will leave the result on the top of the stack.
+ push(state);
ldrw(r3, Address(rthread, JavaThread::interp_only_mode_offset()));
cbz(r3, L);
call_VM(noreg,
CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_exit));
bind(L);
! NOT_CC_INTERP(pop(state));
}
{
SkipIfEqual skip(this, &DTraceMethodProbes, false);
! NOT_CC_INTERP(push(state));
get_method(c_rarg1);
call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit),
rthread, c_rarg1);
! NOT_CC_INTERP(pop(state));
}
}
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
src/cpu/aarch64/vm/interp_masm_aarch64.cpp
Index
Unified diffs
Context diffs
Sdiffs
Patch
New
Old
Previous File
Next File