< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page




1743     // Beware -- there's a subtle invariant that fetch of the markword
1744     // at [FETCH], below, will never observe a biased encoding (*101b).
1745     // If this invariant is not held we risk exclusion (safety) failure.
1746     if (UseBiasedLocking && !UseOptoBiasInlining) {
1747       biased_locking_enter(boxReg, objReg, tmpReg, scrReg, false, DONE_LABEL, NULL, counters);
1748     }
1749 
1750 #if INCLUDE_RTM_OPT
1751     if (UseRTMForStackLocks && use_rtm) {
1752       rtm_stack_locking(objReg, tmpReg, scrReg, cx2Reg,
1753                         stack_rtm_counters, method_data, profile_rtm,
1754                         DONE_LABEL, IsInflated);
1755     }
1756 #endif // INCLUDE_RTM_OPT
1757 
1758     movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes()));          // [FETCH]
1759     testptr(tmpReg, markOopDesc::monitor_value); // inflated vs stack-locked|neutral|biased
1760     jccb(Assembler::notZero, IsInflated);
1761 
1762     // Attempt stack-locking ...
1763     orptr (tmpReg, markOopDesc::unlocked_value);




1764     movptr(Address(boxReg, 0), tmpReg);          // Anticipate successful CAS
1765     if (os::is_MP()) {
1766       lock();
1767     }
1768     cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes()));      // Updates tmpReg
1769     if (counters != NULL) {
1770       cond_inc32(Assembler::equal,
1771                  ExternalAddress((address)counters->fast_path_entry_count_addr()));
1772     }
1773     jcc(Assembler::equal, DONE_LABEL);           // Success
1774 
1775     // Recursive locking.
1776     // The object is stack-locked: markword contains stack pointer to BasicLock.
1777     // Locked by current thread if difference with current SP is less than one page.
1778     subptr(tmpReg, rsp);
1779     // Next instruction set ZFlag == 1 (Success) if difference is less then one page.
1780     andptr(tmpReg, (int32_t) (NOT_LP64(0xFFFFF003) LP64_ONLY(7 - os::vm_page_size())) );
1781     movptr(Address(boxReg, 0), tmpReg);
1782     if (counters != NULL) {
1783       cond_inc32(Assembler::equal,




1743     // Beware -- there's a subtle invariant that fetch of the markword
1744     // at [FETCH], below, will never observe a biased encoding (*101b).
1745     // If this invariant is not held we risk exclusion (safety) failure.
1746     if (UseBiasedLocking && !UseOptoBiasInlining) {
1747       biased_locking_enter(boxReg, objReg, tmpReg, scrReg, false, DONE_LABEL, NULL, counters);
1748     }
1749 
1750 #if INCLUDE_RTM_OPT
1751     if (UseRTMForStackLocks && use_rtm) {
1752       rtm_stack_locking(objReg, tmpReg, scrReg, cx2Reg,
1753                         stack_rtm_counters, method_data, profile_rtm,
1754                         DONE_LABEL, IsInflated);
1755     }
1756 #endif // INCLUDE_RTM_OPT
1757 
1758     movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes()));          // [FETCH]
1759     testptr(tmpReg, markOopDesc::monitor_value); // inflated vs stack-locked|neutral|biased
1760     jccb(Assembler::notZero, IsInflated);
1761 
1762     // Attempt stack-locking ...
1763     orptr(tmpReg, markOopDesc::unlocked_value);
1764     if (EnableValhalla && !UseBiasedLocking) {
1765       // Mask always_locked bit such that we go to the slow path if object is a value type
1766       andptr(tmpReg, ~markOopDesc::biased_lock_bit_in_place);
1767     }
1768     movptr(Address(boxReg, 0), tmpReg);          // Anticipate successful CAS
1769     if (os::is_MP()) {
1770       lock();
1771     }
1772     cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes()));      // Updates tmpReg
1773     if (counters != NULL) {
1774       cond_inc32(Assembler::equal,
1775                  ExternalAddress((address)counters->fast_path_entry_count_addr()));
1776     }
1777     jcc(Assembler::equal, DONE_LABEL);           // Success
1778 
1779     // Recursive locking.
1780     // The object is stack-locked: markword contains stack pointer to BasicLock.
1781     // Locked by current thread if difference with current SP is less than one page.
1782     subptr(tmpReg, rsp);
1783     // Next instruction set ZFlag == 1 (Success) if difference is less then one page.
1784     andptr(tmpReg, (int32_t) (NOT_LP64(0xFFFFF003) LP64_ONLY(7 - os::vm_page_size())) );
1785     movptr(Address(boxReg, 0), tmpReg);
1786     if (counters != NULL) {
1787       cond_inc32(Assembler::equal,


< prev index next >