< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page
rev 50900 : [mq]: cleanup-asm.patch

*** 1175,1185 **** // If the low three bits in the xor result aren't clear, that means // the prototype header is no longer biased and we have to revoke // the bias on this object. testptr(header_reg, markOopDesc::biased_lock_mask_in_place); ! jccb_if_possible(Assembler::notZero, try_revoke_bias); // Biasing is still enabled for this data type. See whether the // epoch of the current bias is still valid, meaning that the epoch // bits of the mark word are equal to the epoch bits of the // prototype header. (Note that the prototype header's epoch bits --- 1175,1185 ---- // If the low three bits in the xor result aren't clear, that means // the prototype header is no longer biased and we have to revoke // the bias on this object. testptr(header_reg, markOopDesc::biased_lock_mask_in_place); ! jccb(Assembler::notZero, try_revoke_bias); // Biasing is still enabled for this data type. See whether the // epoch of the current bias is still valid, meaning that the epoch // bits of the mark word are equal to the epoch bits of the // prototype header. (Note that the prototype header's epoch bits
*** 1187,1197 **** // toward the current thread. Note that we must be absolutely sure // that the current epoch is invalid in order to do this because // otherwise the manipulations it performs on the mark word are // illegal. testptr(header_reg, markOopDesc::epoch_mask_in_place); ! jccb_if_possible(Assembler::notZero, try_rebias); // The epoch of the current bias is still valid but we know nothing // about the owner; it might be set or it might be clear. Try to // acquire the bias of the object using an atomic operation. If this // fails we will go in to the runtime to revoke the object's bias. --- 1187,1197 ---- // toward the current thread. Note that we must be absolutely sure // that the current epoch is invalid in order to do this because // otherwise the manipulations it performs on the mark word are // illegal. testptr(header_reg, markOopDesc::epoch_mask_in_place); ! jccb(Assembler::notZero, try_rebias); // The epoch of the current bias is still valid but we know nothing // about the owner; it might be set or it might be clear. Try to // acquire the bias of the object using an atomic operation. If this // fails we will go in to the runtime to revoke the object's bias.
*** 1759,1769 **** } #endif // INCLUDE_RTM_OPT movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // [FETCH] testptr(tmpReg, markOopDesc::monitor_value); // inflated vs stack-locked|neutral|biased ! jccb_if_possible(Assembler::notZero, IsInflated); // Attempt stack-locking ... orptr (tmpReg, markOopDesc::unlocked_value); movptr(Address(boxReg, 0), tmpReg); // Anticipate successful CAS if (os::is_MP()) { --- 1759,1769 ---- } #endif // INCLUDE_RTM_OPT movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // [FETCH] testptr(tmpReg, markOopDesc::monitor_value); // inflated vs stack-locked|neutral|biased ! jccb(Assembler::notZero, IsInflated); // Attempt stack-locking ... orptr (tmpReg, markOopDesc::unlocked_value); movptr(Address(boxReg, 0), tmpReg); // Anticipate successful CAS if (os::is_MP()) {
*** 1836,1846 **** } else { // Can suffer RTS->RTO upgrades on shared or cold $ lines // Test-And-CAS instead of CAS movptr(tmpReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner))); // rax, = m->_owner testptr(tmpReg, tmpReg); // Locked ? ! jccb_if_possible(Assembler::notZero, DONE_LABEL); } // Appears unlocked - try to swing _owner from null to non-null. // Ideally, I'd manifest "Self" with get_thread and then attempt // to CAS the register containing Self into m->Owner. --- 1836,1846 ---- } else { // Can suffer RTS->RTO upgrades on shared or cold $ lines // Test-And-CAS instead of CAS movptr(tmpReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner))); // rax, = m->_owner testptr(tmpReg, tmpReg); // Locked ? ! jccb (Assembler::notZero, DONE_LABEL); } // Appears unlocked - try to swing _owner from null to non-null. // Ideally, I'd manifest "Self" with get_thread and then attempt // to CAS the register containing Self into m->Owner.
*** 1854,1864 **** } cmpxchgptr(scrReg, Address(boxReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner))); movptr(Address(scrReg, 0), 3); // box->_displaced_header = 3 // If we weren't able to swing _owner from NULL to the BasicLock // then take the slow path. ! jccb_if_possible(Assembler::notZero, DONE_LABEL); // update _owner from BasicLock to thread get_thread (scrReg); // beware: clobbers ICCs movptr(Address(boxReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), scrReg); xorptr(boxReg, boxReg); // set icc.ZFlag = 1 to indicate success --- 1854,1864 ---- } cmpxchgptr(scrReg, Address(boxReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner))); movptr(Address(scrReg, 0), 3); // box->_displaced_header = 3 // If we weren't able to swing _owner from NULL to the BasicLock // then take the slow path. ! jccb (Assembler::notZero, DONE_LABEL); // update _owner from BasicLock to thread get_thread (scrReg); // beware: clobbers ICCs movptr(Address(boxReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), scrReg); xorptr(boxReg, boxReg); // set icc.ZFlag = 1 to indicate success
*** 1884,1894 **** xorptr (tmpReg, tmpReg); } else { // Can suffer RTS->RTO upgrades on shared or cold $ lines movptr(tmpReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner))); // rax, = m->_owner testptr(tmpReg, tmpReg); // Locked ? ! jccb_if_possible(Assembler::notZero, DONE_LABEL); } // Appears unlocked - try to swing _owner from null to non-null. // Use either "Self" (in scr) or rsp as thread identity in _owner. // Invariant: tmpReg == 0. tmpReg is EAX which is the implicit cmpxchg comparand. --- 1884,1894 ---- xorptr (tmpReg, tmpReg); } else { // Can suffer RTS->RTO upgrades on shared or cold $ lines movptr(tmpReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner))); // rax, = m->_owner testptr(tmpReg, tmpReg); // Locked ? ! jccb (Assembler::notZero, DONE_LABEL); } // Appears unlocked - try to swing _owner from null to non-null. // Use either "Self" (in scr) or rsp as thread identity in _owner. // Invariant: tmpReg == 0. tmpReg is EAX which is the implicit cmpxchg comparand.
*** 2013,2023 **** int owner_offset = OM_OFFSET_NO_MONITOR_VALUE_TAG(owner); movptr(boxReg, Address(tmpReg, owner_offset)); testptr(boxReg, boxReg); jccb(Assembler::notZero, L_regular_inflated_unlock); xend(); ! jmpb_if_possible(DONE_LABEL); bind(L_regular_inflated_unlock); } #endif // Despite our balanced locking property we still check that m->_owner == Self --- 2013,2023 ---- int owner_offset = OM_OFFSET_NO_MONITOR_VALUE_TAG(owner); movptr(boxReg, Address(tmpReg, owner_offset)); testptr(boxReg, boxReg); jccb(Assembler::notZero, L_regular_inflated_unlock); xend(); ! jmpb(DONE_LABEL); bind(L_regular_inflated_unlock); } #endif // Despite our balanced locking property we still check that m->_owner == Self
*** 2057,2077 **** if ((EmitSync & 65536) == 0 && (EmitSync & 256)) { // Attempt to reduce branch density - AMD's branch predictor. orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq))); ! jccb_if_possible(Assembler::notZero, DONE_LABEL); movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), NULL_WORD); ! jmpb_if_possible(DONE_LABEL); } else { orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions))); ! jccb_if_possible(Assembler::notZero, DONE_LABEL); movptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq))); jccb (Assembler::notZero, CheckSucc); movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), NULL_WORD); ! jmpb_if_possible(DONE_LABEL); } // The Following code fragment (EmitSync & 65536) improves the performance of // contended applications and contended synchronization microbenchmarks. // Unfortunately the emission of the code - even though not executed - causes regressions --- 2057,2077 ---- if ((EmitSync & 65536) == 0 && (EmitSync & 256)) { // Attempt to reduce branch density - AMD's branch predictor. orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq))); ! jccb (Assembler::notZero, DONE_LABEL); movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), NULL_WORD); ! jmpb (DONE_LABEL); } else { orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions))); ! jccb (Assembler::notZero, DONE_LABEL); movptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq))); jccb (Assembler::notZero, CheckSucc); movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), NULL_WORD); ! jmpb (DONE_LABEL); } // The Following code fragment (EmitSync & 65536) improves the performance of // contended applications and contended synchronization microbenchmarks. // Unfortunately the emission of the code - even though not executed - causes regressions
*** 2137,2151 **** movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), boxReg); // Intentional fall-through into LGoSlowPath ... bind (LGoSlowPath); orptr(boxReg, 1); // set ICC.ZF=0 to indicate failure ! jmpb_if_possible(DONE_LABEL); bind (LSuccess); xorptr(boxReg, boxReg); // set ICC.ZF=1 to indicate success ! jmpb_if_possible(DONE_LABEL); } bind (Stacked); // It's not inflated and it's not recursively stack-locked and it's not biased. // It must be stack-locked. --- 2137,2151 ---- movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), boxReg); // Intentional fall-through into LGoSlowPath ... bind (LGoSlowPath); orptr(boxReg, 1); // set ICC.ZF=0 to indicate failure ! jmpb (DONE_LABEL); bind (LSuccess); xorptr(boxReg, boxReg); // set ICC.ZF=1 to indicate success ! jmpb (DONE_LABEL); } bind (Stacked); // It's not inflated and it's not recursively stack-locked and it's not biased. // It must be stack-locked.
*** 2179,2194 **** xorptr(boxReg, r15_thread); } else { xorptr(boxReg, boxReg); } orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions))); ! jccb_if_possible(Assembler::notZero, DONE_LABEL); movptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList))); jccb (Assembler::notZero, CheckSucc); movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), (int32_t)NULL_WORD); ! jmpb_if_possible(DONE_LABEL); if ((EmitSync & 65536) == 0) { // Try to avoid passing control into the slow_path ... Label LSuccess, LGoSlowPath ; bind (CheckSucc); --- 2179,2194 ---- xorptr(boxReg, r15_thread); } else { xorptr(boxReg, boxReg); } orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions))); ! jccb (Assembler::notZero, DONE_LABEL); movptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq))); orptr(boxReg, Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList))); jccb (Assembler::notZero, CheckSucc); movptr(Address(tmpReg, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), (int32_t)NULL_WORD); ! jmpb (DONE_LABEL); if ((EmitSync & 65536) == 0) { // Try to avoid passing control into the slow_path ... Label LSuccess, LGoSlowPath ; bind (CheckSucc);
*** 2241,2255 **** jccb (Assembler::notEqual, LSuccess); // Intentional fall-through into slow-path bind (LGoSlowPath); orl (boxReg, 1); // set ICC.ZF=0 to indicate failure ! jmpb_if_possible(DONE_LABEL); bind (LSuccess); testl (boxReg, 0); // set ICC.ZF=1 to indicate success ! jmpb_if_possible (DONE_LABEL); } bind (Stacked); movptr(tmpReg, Address (boxReg, 0)); // re-fetch if (os::is_MP()) { lock(); } --- 2241,2255 ---- jccb (Assembler::notEqual, LSuccess); // Intentional fall-through into slow-path bind (LGoSlowPath); orl (boxReg, 1); // set ICC.ZF=0 to indicate failure ! jmpb (DONE_LABEL); bind (LSuccess); testl (boxReg, 0); // set ICC.ZF=1 to indicate success ! jmpb (DONE_LABEL); } bind (Stacked); movptr(tmpReg, Address (boxReg, 0)); // re-fetch if (os::is_MP()) { lock(); }
< prev index next >