< prev index next >

src/hotspot/cpu/arm/sharedRuntime_arm.cpp

Print this page
rev 51719 : [mq]: 8210676


1549   __ membar(MacroAssembler::StoreStore, Rtemp);
1550 #endif // !AARCH64
1551 
1552   // RedefineClasses() tracing support for obsolete method entry
1553   if (log_is_enabled(Trace, redefine, class, obsolete)) {
1554 #ifdef AARCH64
1555     __ NOT_TESTED();
1556 #endif
1557     __ save_caller_save_registers();
1558     __ mov(R0, Rthread);
1559     __ mov_metadata(R1, method());
1560     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry), R0, R1);
1561     __ restore_caller_save_registers();
1562   }
1563 
1564   const Register sync_handle = AARCH64_ONLY(R20) NOT_AARCH64(R5);
1565   const Register sync_obj    = AARCH64_ONLY(R21) NOT_AARCH64(R6);
1566   const Register disp_hdr    = AARCH64_ONLY(R22) NOT_AARCH64(altFP_7_11);
1567   const Register tmp         = AARCH64_ONLY(R23) NOT_AARCH64(R8);
1568 
1569   Label slow_lock, slow_lock_biased, lock_done, fast_lock, leave;
1570   if (method->is_synchronized()) {
1571     // The first argument is a handle to sync object (a class or an instance)
1572     __ ldr(sync_obj, Address(R1));
1573     // Remember the handle for the unlocking code
1574     __ mov(sync_handle, R1);
1575 
1576     if(UseBiasedLocking) {
1577       __ biased_locking_enter(sync_obj, tmp, disp_hdr/*scratched*/, false, Rtemp, lock_done, slow_lock_biased);
1578     }
1579 
1580     const Register mark = tmp;
1581 #ifdef AARCH64
1582     __ sub(disp_hdr, FP, lock_slot_fp_offset);
1583     assert(oopDesc::mark_offset_in_bytes() == 0, "Required by atomic instructions");
1584 
1585     __ ldr(mark, sync_obj);
1586 
1587     // Test if object is already locked
1588     assert(markOopDesc::unlocked_value == 1, "adjust this code");
1589     __ tbnz(mark, exact_log2(markOopDesc::unlocked_value), fast_lock);


1669   // make sure the store is observed before reading the SafepointSynchronize state and further mem refs
1670   __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreLoad | MacroAssembler::StoreStore), Rtemp);
1671 
1672   __ ldr_s32(R2, Address(R2));
1673   __ ldr_u32(R3, Address(Rthread, JavaThread::suspend_flags_offset()));
1674   __ cmp(R2, SafepointSynchronize::_not_synchronized);
1675   __ cond_cmp(R3, 0, eq);
1676   __ b(call_safepoint_runtime, ne);
1677   __ bind(return_to_java);
1678 
1679   // Perform thread state transition and reguard stack yellow pages if needed
1680   Label reguard, reguard_done;
1681   __ mov(Rtemp, _thread_in_Java);
1682   __ ldr_s32(R2, Address(Rthread, JavaThread::stack_guard_state_offset()));
1683   __ str_32(Rtemp, Address(Rthread, JavaThread::thread_state_offset()));
1684 
1685   __ cmp(R2, JavaThread::stack_guard_yellow_reserved_disabled);
1686   __ b(reguard, eq);
1687   __ bind(reguard_done);
1688 
1689   Label slow_unlock, unlock_done, retry;
1690   if (method->is_synchronized()) {
1691     __ ldr(sync_obj, Address(sync_handle));
1692 
1693     if(UseBiasedLocking) {
1694       __ biased_locking_exit(sync_obj, Rtemp, unlock_done);
1695       // disp_hdr may not have been saved on entry with biased locking
1696       __ sub(disp_hdr, FP, lock_slot_fp_offset);
1697     }
1698 
1699     // See C1_MacroAssembler::unlock_object() for more comments
1700     __ ldr(R2, Address(disp_hdr, BasicLock::displaced_header_offset_in_bytes()));
1701     __ cbz(R2, unlock_done);
1702 
1703     __ cas_for_lock_release(disp_hdr, R2, sync_obj, Rtemp, slow_unlock);
1704 
1705     __ bind(unlock_done);
1706   }
1707 
1708   // Set last java frame and handle block to zero
1709   __ ldr(LR, Address(Rthread, JavaThread::active_handles_offset()));




1549   __ membar(MacroAssembler::StoreStore, Rtemp);
1550 #endif // !AARCH64
1551 
1552   // RedefineClasses() tracing support for obsolete method entry
1553   if (log_is_enabled(Trace, redefine, class, obsolete)) {
1554 #ifdef AARCH64
1555     __ NOT_TESTED();
1556 #endif
1557     __ save_caller_save_registers();
1558     __ mov(R0, Rthread);
1559     __ mov_metadata(R1, method());
1560     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry), R0, R1);
1561     __ restore_caller_save_registers();
1562   }
1563 
1564   const Register sync_handle = AARCH64_ONLY(R20) NOT_AARCH64(R5);
1565   const Register sync_obj    = AARCH64_ONLY(R21) NOT_AARCH64(R6);
1566   const Register disp_hdr    = AARCH64_ONLY(R22) NOT_AARCH64(altFP_7_11);
1567   const Register tmp         = AARCH64_ONLY(R23) NOT_AARCH64(R8);
1568 
1569   Label slow_lock, slow_lock_biased, lock_done, fast_lock;
1570   if (method->is_synchronized()) {
1571     // The first argument is a handle to sync object (a class or an instance)
1572     __ ldr(sync_obj, Address(R1));
1573     // Remember the handle for the unlocking code
1574     __ mov(sync_handle, R1);
1575 
1576     if(UseBiasedLocking) {
1577       __ biased_locking_enter(sync_obj, tmp, disp_hdr/*scratched*/, false, Rtemp, lock_done, slow_lock_biased);
1578     }
1579 
1580     const Register mark = tmp;
1581 #ifdef AARCH64
1582     __ sub(disp_hdr, FP, lock_slot_fp_offset);
1583     assert(oopDesc::mark_offset_in_bytes() == 0, "Required by atomic instructions");
1584 
1585     __ ldr(mark, sync_obj);
1586 
1587     // Test if object is already locked
1588     assert(markOopDesc::unlocked_value == 1, "adjust this code");
1589     __ tbnz(mark, exact_log2(markOopDesc::unlocked_value), fast_lock);


1669   // make sure the store is observed before reading the SafepointSynchronize state and further mem refs
1670   __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreLoad | MacroAssembler::StoreStore), Rtemp);
1671 
1672   __ ldr_s32(R2, Address(R2));
1673   __ ldr_u32(R3, Address(Rthread, JavaThread::suspend_flags_offset()));
1674   __ cmp(R2, SafepointSynchronize::_not_synchronized);
1675   __ cond_cmp(R3, 0, eq);
1676   __ b(call_safepoint_runtime, ne);
1677   __ bind(return_to_java);
1678 
1679   // Perform thread state transition and reguard stack yellow pages if needed
1680   Label reguard, reguard_done;
1681   __ mov(Rtemp, _thread_in_Java);
1682   __ ldr_s32(R2, Address(Rthread, JavaThread::stack_guard_state_offset()));
1683   __ str_32(Rtemp, Address(Rthread, JavaThread::thread_state_offset()));
1684 
1685   __ cmp(R2, JavaThread::stack_guard_yellow_reserved_disabled);
1686   __ b(reguard, eq);
1687   __ bind(reguard_done);
1688 
1689   Label slow_unlock, unlock_done;
1690   if (method->is_synchronized()) {
1691     __ ldr(sync_obj, Address(sync_handle));
1692 
1693     if(UseBiasedLocking) {
1694       __ biased_locking_exit(sync_obj, Rtemp, unlock_done);
1695       // disp_hdr may not have been saved on entry with biased locking
1696       __ sub(disp_hdr, FP, lock_slot_fp_offset);
1697     }
1698 
1699     // See C1_MacroAssembler::unlock_object() for more comments
1700     __ ldr(R2, Address(disp_hdr, BasicLock::displaced_header_offset_in_bytes()));
1701     __ cbz(R2, unlock_done);
1702 
1703     __ cas_for_lock_release(disp_hdr, R2, sync_obj, Rtemp, slow_unlock);
1704 
1705     __ bind(unlock_done);
1706   }
1707 
1708   // Set last java frame and handle block to zero
1709   __ ldr(LR, Address(Rthread, JavaThread::active_handles_offset()));


< prev index next >