< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page
rev 47408 : [mq]: no_cmpxchg_if_null
rev 47401 : [mq]: cmpxchg_if_null
rev 47400 : [mq]: cmpxchg_ptr
rev 47216 : 8187443: Forest Consolidation: Move files to unified layout
Reviewed-by: darcy, ihse


1647   // This includes oop constants not inlined in the code stream.
1648   for (oop* p = oops_begin(); p < oops_end(); p++) {
1649     if (*p == Universe::non_oop_word())  continue;  // skip non-oops
1650     f->do_oop(p);
1651   }
1652 }
1653 
1654 #define NMETHOD_SENTINEL ((nmethod*)badAddress)
1655 
1656 nmethod* volatile nmethod::_oops_do_mark_nmethods;
1657 
1658 // An nmethod is "marked" if its _mark_link is set non-null.
1659 // Even if it is the end of the linked list, it will have a non-null link value,
1660 // as long as it is on the list.
1661 // This code must be MP safe, because it is used from parallel GC passes.
1662 bool nmethod::test_set_oops_do_mark() {
1663   assert(nmethod::oops_do_marking_is_active(), "oops_do_marking_prologue must be called");
1664   nmethod* observed_mark_link = _oops_do_mark_link;
1665   if (observed_mark_link == NULL) {
1666     // Claim this nmethod for this thread to mark.
1667     if (Atomic::cmpxchg_if_null(NMETHOD_SENTINEL, &_oops_do_mark_link)) {
1668       // Atomically append this nmethod (now claimed) to the head of the list:
1669       nmethod* observed_mark_nmethods = _oops_do_mark_nmethods;
1670       for (;;) {
1671         nmethod* required_mark_nmethods = observed_mark_nmethods;
1672         _oops_do_mark_link = required_mark_nmethods;
1673         observed_mark_nmethods =
1674           Atomic::cmpxchg(this, &_oops_do_mark_nmethods, required_mark_nmethods);
1675         if (observed_mark_nmethods == required_mark_nmethods)
1676           break;
1677       }
1678       // Mark was clear when we first saw this guy.
1679       if (TraceScavenge) { print_on(tty, "oops_do, mark"); }
1680       return false;
1681     }
1682   }
1683   // On fall through, another racing thread marked this nmethod before we did.
1684   return true;
1685 }
1686 
1687 void nmethod::oops_do_marking_prologue() {
1688   if (TraceScavenge) { tty->print_cr("[oops_do_marking_prologue"); }
1689   assert(_oops_do_mark_nmethods == NULL, "must not call oops_do_marking_prologue twice in a row");
1690   // We use cmpxchg instead of regular assignment here because the user
1691   // may fork a bunch of threads, and we need them all to see the same state.
1692   bool observed = Atomic::cmpxchg_if_null(NMETHOD_SENTINEL, &_oops_do_mark_nmethods);
1693   guarantee(observed, "no races in this sequential code");
1694 }
1695 
1696 void nmethod::oops_do_marking_epilogue() {
1697   assert(_oops_do_mark_nmethods != NULL, "must not call oops_do_marking_epilogue twice in a row");
1698   nmethod* cur = _oops_do_mark_nmethods;
1699   while (cur != NMETHOD_SENTINEL) {
1700     assert(cur != NULL, "not NULL-terminated");
1701     nmethod* next = cur->_oops_do_mark_link;
1702     cur->_oops_do_mark_link = NULL;
1703     DEBUG_ONLY(cur->verify_oop_relocations());
1704     NOT_PRODUCT(if (TraceScavenge)  cur->print_on(tty, "oops_do, unmark"));
1705     cur = next;
1706   }
1707   nmethod* required = _oops_do_mark_nmethods;
1708   nmethod* observed = Atomic::cmpxchg((nmethod*)NULL, &_oops_do_mark_nmethods, required);
1709   guarantee(observed == required, "no races in this sequential code");
1710   if (TraceScavenge) { tty->print_cr("oops_do_marking_epilogue]"); }
1711 }
1712 
1713 class DetectScavengeRoot: public OopClosure {




1647   // This includes oop constants not inlined in the code stream.
1648   for (oop* p = oops_begin(); p < oops_end(); p++) {
1649     if (*p == Universe::non_oop_word())  continue;  // skip non-oops
1650     f->do_oop(p);
1651   }
1652 }
1653 
1654 #define NMETHOD_SENTINEL ((nmethod*)badAddress)
1655 
1656 nmethod* volatile nmethod::_oops_do_mark_nmethods;
1657 
1658 // An nmethod is "marked" if its _mark_link is set non-null.
1659 // Even if it is the end of the linked list, it will have a non-null link value,
1660 // as long as it is on the list.
1661 // This code must be MP safe, because it is used from parallel GC passes.
1662 bool nmethod::test_set_oops_do_mark() {
1663   assert(nmethod::oops_do_marking_is_active(), "oops_do_marking_prologue must be called");
1664   nmethod* observed_mark_link = _oops_do_mark_link;
1665   if (observed_mark_link == NULL) {
1666     // Claim this nmethod for this thread to mark.
1667     if (Atomic::cmpxchg(NMETHOD_SENTINEL, &_oops_do_mark_link, (nmethod*)NULL) == NULL) {
1668       // Atomically append this nmethod (now claimed) to the head of the list:
1669       nmethod* observed_mark_nmethods = _oops_do_mark_nmethods;
1670       for (;;) {
1671         nmethod* required_mark_nmethods = observed_mark_nmethods;
1672         _oops_do_mark_link = required_mark_nmethods;
1673         observed_mark_nmethods =
1674           Atomic::cmpxchg(this, &_oops_do_mark_nmethods, required_mark_nmethods);
1675         if (observed_mark_nmethods == required_mark_nmethods)
1676           break;
1677       }
1678       // Mark was clear when we first saw this guy.
1679       if (TraceScavenge) { print_on(tty, "oops_do, mark"); }
1680       return false;
1681     }
1682   }
1683   // On fall through, another racing thread marked this nmethod before we did.
1684   return true;
1685 }
1686 
1687 void nmethod::oops_do_marking_prologue() {
1688   if (TraceScavenge) { tty->print_cr("[oops_do_marking_prologue"); }
1689   assert(_oops_do_mark_nmethods == NULL, "must not call oops_do_marking_prologue twice in a row");
1690   // We use cmpxchg instead of regular assignment here because the user
1691   // may fork a bunch of threads, and we need them all to see the same state.
1692   nmethod* observed = Atomic::cmpxchg(NMETHOD_SENTINEL, &_oops_do_mark_nmethods, (nmethod*)NULL);
1693   guarantee(observed == NULL, "no races in this sequential code");
1694 }
1695 
1696 void nmethod::oops_do_marking_epilogue() {
1697   assert(_oops_do_mark_nmethods != NULL, "must not call oops_do_marking_epilogue twice in a row");
1698   nmethod* cur = _oops_do_mark_nmethods;
1699   while (cur != NMETHOD_SENTINEL) {
1700     assert(cur != NULL, "not NULL-terminated");
1701     nmethod* next = cur->_oops_do_mark_link;
1702     cur->_oops_do_mark_link = NULL;
1703     DEBUG_ONLY(cur->verify_oop_relocations());
1704     NOT_PRODUCT(if (TraceScavenge)  cur->print_on(tty, "oops_do, unmark"));
1705     cur = next;
1706   }
1707   nmethod* required = _oops_do_mark_nmethods;
1708   nmethod* observed = Atomic::cmpxchg((nmethod*)NULL, &_oops_do_mark_nmethods, required);
1709   guarantee(observed == required, "no races in this sequential code");
1710   if (TraceScavenge) { tty->print_cr("oops_do_marking_epilogue]"); }
1711 }
1712 
1713 class DetectScavengeRoot: public OopClosure {


< prev index next >