1683
1684 // Now handlize the static class mirror it's known not-null.
1685 __ str(c_rarg1, Address(sp, klass_offset));
1686 map->set_oop(VMRegImpl::stack2reg(klass_slot_offset));
1687
1688 // Now get the handle
1689 __ lea(c_rarg1, Address(sp, klass_offset));
1690 // and protect the arg if we must spill
1691 c_arg--;
1692 }
1693
1694 // Change state to native (we save the return address in the thread, since it might not
1695 // be pushed on the stack when we do a stack traversal).
1696 // We use the same pc/oopMap repeatedly when we call out
1697
1698 Label native_return;
1699 __ set_last_Java_frame(sp, noreg, native_return, rscratch1);
1700
1701 Label dtrace_method_entry, dtrace_method_entry_done;
1702 {
1703 unsigned long offset;
1704 __ adrp(rscratch1, ExternalAddress((address)&DTraceMethodProbes), offset);
1705 __ ldrb(rscratch1, Address(rscratch1, offset));
1706 __ cbnzw(rscratch1, dtrace_method_entry);
1707 __ bind(dtrace_method_entry_done);
1708 }
1709
1710 // RedefineClasses() tracing support for obsolete method entry
1711 if (log_is_enabled(Trace, redefine, class, obsolete)) {
1712 // protect the args we've loaded
1713 save_args(masm, total_c_args, c_arg, out_regs);
1714 __ mov_metadata(c_rarg1, method());
1715 __ call_VM_leaf(
1716 CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),
1717 rthread, c_rarg1);
1718 restore_args(masm, total_c_args, c_arg, out_regs);
1719 }
1720
1721 // Lock a synchronized method
1722
1723 // Register definitions used by locking and unlocking
1897 __ lea(r0, Address(sp, lock_slot_offset * VMRegImpl::stack_slot_size));
1898 // get old displaced header
1899 __ ldr(old_hdr, Address(r0, 0));
1900
1901 // Atomic swap old header if oop still contains the stack lock
1902 Label succeed;
1903 __ cmpxchg_obj_header(r0, old_hdr, obj_reg, rscratch1, succeed, &slow_path_unlock);
1904 __ bind(succeed);
1905
1906 // slow path re-enters here
1907 __ bind(unlock_done);
1908 if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1909 restore_native_result(masm, ret_type, stack_slots);
1910 }
1911
1912 __ bind(done);
1913 }
1914
1915 Label dtrace_method_exit, dtrace_method_exit_done;
1916 {
1917 unsigned long offset;
1918 __ adrp(rscratch1, ExternalAddress((address)&DTraceMethodProbes), offset);
1919 __ ldrb(rscratch1, Address(rscratch1, offset));
1920 __ cbnzw(rscratch1, dtrace_method_exit);
1921 __ bind(dtrace_method_exit_done);
1922 }
1923
1924 __ reset_last_Java_frame(false);
1925
1926 // Unbox oop result, e.g. JNIHandles::resolve result.
1927 if (is_reference_type(ret_type)) {
1928 __ resolve_jobject(r0, rthread, rscratch2);
1929 }
1930
1931 if (CheckJNICalls) {
1932 // clear_pending_jni_exception_check
1933 __ str(zr, Address(rthread, JavaThread::pending_jni_exception_check_fn_offset()));
1934 }
1935
1936 if (!is_critical_native) {
1937 // reset handle block
|
1683
1684 // Now handlize the static class mirror it's known not-null.
1685 __ str(c_rarg1, Address(sp, klass_offset));
1686 map->set_oop(VMRegImpl::stack2reg(klass_slot_offset));
1687
1688 // Now get the handle
1689 __ lea(c_rarg1, Address(sp, klass_offset));
1690 // and protect the arg if we must spill
1691 c_arg--;
1692 }
1693
1694 // Change state to native (we save the return address in the thread, since it might not
1695 // be pushed on the stack when we do a stack traversal).
1696 // We use the same pc/oopMap repeatedly when we call out
1697
1698 Label native_return;
1699 __ set_last_Java_frame(sp, noreg, native_return, rscratch1);
1700
1701 Label dtrace_method_entry, dtrace_method_entry_done;
1702 {
1703 uint64_t offset;
1704 __ adrp(rscratch1, ExternalAddress((address)&DTraceMethodProbes), offset);
1705 __ ldrb(rscratch1, Address(rscratch1, offset));
1706 __ cbnzw(rscratch1, dtrace_method_entry);
1707 __ bind(dtrace_method_entry_done);
1708 }
1709
1710 // RedefineClasses() tracing support for obsolete method entry
1711 if (log_is_enabled(Trace, redefine, class, obsolete)) {
1712 // protect the args we've loaded
1713 save_args(masm, total_c_args, c_arg, out_regs);
1714 __ mov_metadata(c_rarg1, method());
1715 __ call_VM_leaf(
1716 CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),
1717 rthread, c_rarg1);
1718 restore_args(masm, total_c_args, c_arg, out_regs);
1719 }
1720
1721 // Lock a synchronized method
1722
1723 // Register definitions used by locking and unlocking
1897 __ lea(r0, Address(sp, lock_slot_offset * VMRegImpl::stack_slot_size));
1898 // get old displaced header
1899 __ ldr(old_hdr, Address(r0, 0));
1900
1901 // Atomic swap old header if oop still contains the stack lock
1902 Label succeed;
1903 __ cmpxchg_obj_header(r0, old_hdr, obj_reg, rscratch1, succeed, &slow_path_unlock);
1904 __ bind(succeed);
1905
1906 // slow path re-enters here
1907 __ bind(unlock_done);
1908 if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1909 restore_native_result(masm, ret_type, stack_slots);
1910 }
1911
1912 __ bind(done);
1913 }
1914
1915 Label dtrace_method_exit, dtrace_method_exit_done;
1916 {
1917 uint64_t offset;
1918 __ adrp(rscratch1, ExternalAddress((address)&DTraceMethodProbes), offset);
1919 __ ldrb(rscratch1, Address(rscratch1, offset));
1920 __ cbnzw(rscratch1, dtrace_method_exit);
1921 __ bind(dtrace_method_exit_done);
1922 }
1923
1924 __ reset_last_Java_frame(false);
1925
1926 // Unbox oop result, e.g. JNIHandles::resolve result.
1927 if (is_reference_type(ret_type)) {
1928 __ resolve_jobject(r0, rthread, rscratch2);
1929 }
1930
1931 if (CheckJNICalls) {
1932 // clear_pending_jni_exception_check
1933 __ str(zr, Address(rthread, JavaThread::pending_jni_exception_check_fn_offset()));
1934 }
1935
1936 if (!is_critical_native) {
1937 // reset handle block
|