src/share/vm/memory/referenceProcessor.cpp

Print this page
rev 2585 : [mq]: g1-reference-processing

@@ -34,44 +34,14 @@
 #include "runtime/jniHandles.hpp"
 
 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy      = NULL;
 oop              ReferenceProcessor::_sentinelRef = NULL;
-const int        subclasses_of_ref                = REF_PHANTOM - REF_OTHER;
 
-// List of discovered references.
-class DiscoveredList {
-public:
-  DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
-  oop head() const     {
-     return UseCompressedOops ?  oopDesc::decode_heap_oop_not_null(_compressed_head) :
-                                _oop_head;
-  }
-  HeapWord* adr_head() {
-    return UseCompressedOops ? (HeapWord*)&_compressed_head :
-                               (HeapWord*)&_oop_head;
-  }
-  void   set_head(oop o) {
-    if (UseCompressedOops) {
-      // Must compress the head ptr.
-      _compressed_head = oopDesc::encode_heap_oop_not_null(o);
-    } else {
-      _oop_head = o;
-    }
-  }
-  bool   empty() const          { return head() == ReferenceProcessor::sentinel_ref(); }
-  size_t length()               { return _len; }
-  void   set_length(size_t len) { _len = len;  }
-  void   inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
-  void   dec_length(size_t dec) { _len -= dec; }
-private:
-  // Set value depending on UseCompressedOops. This could be a template class
-  // but then we have to fix all the instantiations and declarations that use this class.
-  oop       _oop_head;
-  narrowOop _compressed_head;
-  size_t _len;
-};
+bool DiscoveredList::is_empty() const {
+  return head() == ReferenceProcessor::sentinel_ref();
+}
 
 void referenceProcessor_init() {
   ReferenceProcessor::init_statics();
 }
 

@@ -121,46 +91,46 @@
   _span = span;
   _discovery_is_atomic = atomic_discovery;
   _discovery_is_mt     = mt_discovery;
   _num_q               = MAX2(1, mt_processing_degree);
   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
-  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
+  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref());
   if (_discoveredSoftRefs == NULL) {
     vm_exit_during_initialization("Could not allocated RefProc Array");
   }
   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
   assert(sentinel_ref() != NULL, "_sentinelRef is NULL");
   // Initialized all entries to _sentinelRef
-  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
+  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
         _discoveredSoftRefs[i].set_head(sentinel_ref());
     _discoveredSoftRefs[i].set_length(0);
   }
-  // If we do barreirs, cache a copy of the barrier set.
+  // If we do barriers, cache a copy of the barrier set.
   if (discovered_list_needs_barrier) {
     _bs = Universe::heap()->barrier_set();
   }
   setup_policy(false /* default soft ref policy */);
 }
 
 #ifndef PRODUCT
 void ReferenceProcessor::verify_no_references_recorded() {
   guarantee(!_discovering_refs, "Discovering refs?");
-  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
-    guarantee(_discoveredSoftRefs[i].empty(),
+  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
+    guarantee(_discoveredSoftRefs[i].is_empty(),
               "Found non-empty discovered list");
   }
 }
 #endif
 
 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
   // Should this instead be
-  // for (int i = 0; i < subclasses_of_ref; i++_ {
+  // for (int i = 0; i < subclasses_of_ref(); i++_ {
   //   for (int j = 0; j < _num_q; j++) {
   //     int index = i * _max_num_q + j;
-  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
+  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
     if (UseCompressedOops) {
       f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
     } else {
       f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
     }

@@ -390,11 +360,11 @@
     // (n_queues) with which that ReferenceProcessor was created.  That
     // is because of the "clever" way the discovered references lists were
     // allocated and are indexed into.
     assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
     for (int j = 0;
-         j < subclasses_of_ref;
+         j < ReferenceProcessor::subclasses_of_ref();
          j++, index += _n_queues) {
       _ref_processor.enqueue_discovered_reflist(
         _refs_lists[index], _pending_list_addr);
       _refs_lists[index].set_head(_sentinel_ref);
       _refs_lists[index].set_length(0);

@@ -410,11 +380,11 @@
     RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
                            pending_list_addr, sentinel_ref(), _max_num_q);
     task_executor->execute(tsk);
   } else {
     // Serial code: call the parent class's implementation
-    for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
+    for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
       enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
       _discoveredSoftRefs[i].set_head(sentinel_ref());
       _discoveredSoftRefs[i].set_length(0);
     }
   }

@@ -451,11 +421,29 @@
 
   // Remove the current reference from the list
   inline void remove();
 
   // Make the Reference object active again.
-  inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
+  inline void make_active() {
+    // For G1 we don't want to use set_next - it
+    // will dirty the card for the next field of
+    // the reference object and will fail
+    // CT verification.
+    if (UseG1GC) {
+      BarrierSet* bs = oopDesc::bs();
+      HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
+
+      if (UseCompressedOops) {
+        bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
+      } else {
+        bs->write_ref_field_pre((oop*)next_addr, NULL);
+      }
+      java_lang_ref_Reference::set_next_raw(_ref, NULL);
+    } else {
+      java_lang_ref_Reference::set_next(_ref, NULL);
+    }
+  }
 
   // Make the referent alive.
   inline void make_referent_alive() {
     if (UseCompressedOops) {
       _keep_alive->do_oop((narrowOop*)_referent_addr);

@@ -493,23 +481,25 @@
   oop                _next;
   HeapWord*          _referent_addr;
   oop                _referent;
   OopClosure*        _keep_alive;
   BoolObjectClosure* _is_alive;
+
   DEBUG_ONLY(
   oop                _first_seen; // cyclic linked list check
   )
+
   NOT_PRODUCT(
   size_t             _processed;
   size_t             _removed;
   )
 };
 
 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList&    refs_list,
                                                       OopClosure*        keep_alive,
-                                                      BoolObjectClosure* is_alive)
-  : _refs_list(refs_list),
+                                                      BoolObjectClosure* is_alive) :
+    _refs_list(refs_list),
     _prev_next(refs_list.adr_head()),
     _ref(refs_list.head()),
 #ifdef ASSERT
     _first_seen(refs_list.head()),
 #endif

@@ -743,14 +733,13 @@
   refs_list.set_length(0);
 }
 
 void ReferenceProcessor::abandon_partial_discovery() {
   // loop over the lists
-  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
+  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
     if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
-      gclog_or_tty->print_cr("\nAbandoning %s discovered list",
-                             list_name(i));
+      gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
     }
     abandon_partial_discovered_list(_discoveredSoftRefs[i]);
   }
 }
 

@@ -865,11 +854,18 @@
         // find an element to split the list on
         for (size_t j = 0; j < refs_to_move; ++j) {
           move_tail = new_head;
           new_head = java_lang_ref_Reference::discovered(new_head);
         }
+
+        if (_discovered_list_needs_barrier) {
         java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
+        } else {
+          HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(move_tail);
+          oop_store_raw(discovered_addr, ref_lists[to_idx].head());
+        }
+          
         ref_lists[to_idx].set_head(move_head);
         ref_lists[to_idx].inc_length(refs_to_move);
         ref_lists[from_idx].set_head(new_head);
         ref_lists[from_idx].dec_length(refs_to_move);
         if (ref_lists[from_idx].length() == 0) {

@@ -979,14 +975,14 @@
 }
 
 void ReferenceProcessor::clean_up_discovered_references() {
   // loop over the lists
   // Should this instead be
-  // for (int i = 0; i < subclasses_of_ref; i++_ {
+  // for (int i = 0; i < subclasses_of_ref(); i++) {
   //   for (int j = 0; j < _num_q; j++) {
   //     int index = i * _max_num_q + j;
-  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
+  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
     if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
       gclog_or_tty->print_cr(
         "\nScrubbing %s discovered list of Null referents",
         list_name(i));
     }

@@ -1203,10 +1199,12 @@
     if (!_current_soft_ref_policy->should_clear_reference(obj)) {
       return false;
     }
   }
 
+  ResourceMark rm;      // Needed for tracing.
+
   HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
   const oop  discovered = java_lang_ref_Reference::discovered(obj);
   assert(discovered->is_oop_or_null(), "bad discovered field");
   if (discovered != NULL) {
     // The reference has already been discovered...

@@ -1417,11 +1415,11 @@
     }
   )
 }
 
 const char* ReferenceProcessor::list_name(int i) {
-   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
+   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref(), "Out of bounds index");
    int j = i / _max_num_q;
    switch (j) {
      case 0: return "SoftRef";
      case 1: return "WeakRef";
      case 2: return "FinalRef";

@@ -1442,11 +1440,11 @@
 }
 
 #ifndef PRODUCT
 void ReferenceProcessor::clear_discovered_references() {
   guarantee(!_discovering_refs, "Discovering refs?");
-  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
+  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
     oop obj = _discoveredSoftRefs[i].head();
     while (obj != sentinel_ref()) {
       oop next = java_lang_ref_Reference::discovered(obj);
       java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
       obj = next;