< prev index next >

src/share/vm/gc/shared/referenceProcessor.cpp

Print this page




 100                                        BoolObjectClosure* is_alive_non_header)  :
 101   _discovering_refs(false),
 102   _enqueuing_is_done(false),
 103   _is_alive_non_header(is_alive_non_header),
 104   _processing_is_mt(mt_processing),
 105   _next_id(0)
 106 {
 107   _span = span;
 108   _discovery_is_atomic = atomic_discovery;
 109   _discovery_is_mt     = mt_discovery;
 110   _num_q               = MAX2(1U, mt_processing_degree);
 111   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 112   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 113             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 114 
 115   if (_discovered_refs == NULL) {
 116     vm_exit_during_initialization("Could not allocated RefProc Array");
 117   }
 118   _discoveredSoftRefs    = &_discovered_refs[0];
 119   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 120   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];

 121   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 122   _discoveredCleanerRefs = &_discoveredPhantomRefs[_max_num_q];
 123 
 124   // Initialize all entries to NULL
 125   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 126     _discovered_refs[i].set_head(NULL);
 127     _discovered_refs[i].set_length(0);
 128   }
 129 
 130   setup_policy(false /* default soft ref policy */);
 131 }
 132 
 133 #ifndef PRODUCT
 134 void ReferenceProcessor::verify_no_references_recorded() {
 135   guarantee(!_discovering_refs, "Discovering refs?");
 136   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 137     guarantee(_discovered_refs[i].is_empty(),
 138               "Found non-empty discovered list");
 139   }
 140 }
 141 #endif
 142 


 191   BoolObjectClosure*           is_alive,
 192   OopClosure*                  keep_alive,
 193   VoidClosure*                 complete_gc,
 194   AbstractRefProcTaskExecutor* task_executor,
 195   GCTimer*                     gc_timer) {
 196 
 197   assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
 198   // Stop treating discovered references specially.
 199   disable_discovery();
 200 
 201   // If discovery was concurrent, someone could have modified
 202   // the value of the static field in the j.l.r.SoftReference
 203   // class that holds the soft reference timestamp clock using
 204   // reflection or Unsafe between when discovery was enabled and
 205   // now. Unconditionally update the static field in ReferenceProcessor
 206   // here so that we use the new value during processing of the
 207   // discovered soft refs.
 208 
 209   _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
 210 
 211   // Include cleaners in phantom statistics.  We expect Cleaner
 212   // references to be temporary, and don't want to deal with
 213   // possible incompatibilities arising from making it more visible.
 214   ReferenceProcessorStats stats(
 215       total_count(_discoveredSoftRefs),
 216       total_count(_discoveredWeakRefs),

 217       total_count(_discoveredFinalRefs),
 218       total_count(_discoveredPhantomRefs) + total_count(_discoveredCleanerRefs));










 219 
 220   // Soft references
 221   {
 222     GCTraceTime(Debug, gc, ref) tt("SoftReference", gc_timer);
 223     process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
 224                                is_alive, keep_alive, complete_gc, task_executor);
 225   }
 226 
 227   update_soft_ref_master_clock();
 228 









 229   // Weak references
 230   {
 231     GCTraceTime(Debug, gc, ref) tt("WeakReference", gc_timer);
 232     process_discovered_reflist(_discoveredWeakRefs, NULL, true,
 233                                is_alive, keep_alive, complete_gc, task_executor);
 234   }
 235 






 236   // Final references
 237   {
 238     GCTraceTime(Debug, gc, ref) tt("FinalReference", gc_timer);
 239     process_discovered_reflist(_discoveredFinalRefs, NULL, false,
 240                                is_alive, keep_alive, complete_gc, task_executor);
 241   }
 242 
 243   // Phantom references
 244   {
 245     GCTraceTime(Debug, gc, ref) tt("PhantomReference", gc_timer);
 246     process_discovered_reflist(_discoveredPhantomRefs, NULL, true,
 247                                is_alive, keep_alive, complete_gc, task_executor);
 248 
 249     // Process cleaners, but include them in phantom timing.  We expect
 250     // Cleaner references to be temporary, and don't want to deal with
 251     // possible incompatibilities arising from making it more visible.
 252     process_discovered_reflist(_discoveredCleanerRefs, NULL, true,
 253                                  is_alive, keep_alive, complete_gc, task_executor);
 254   }
 255 
 256   // Weak global JNI references. It would make more sense (semantically) to
 257   // traverse these simultaneously with the regular weak references above, but
 258   // that is not how the JDK1.2 specification is. See #4126360. Native code can
 259   // thus use JNI weak references to circumvent the phantom references and
 260   // resurrect a "post-mortem" object.
 261   {
 262     GCTraceTime(Debug, gc, ref) tt("JNI Weak Reference", gc_timer);
 263     if (task_executor != NULL) {
 264       task_executor->set_single_threaded_mode();
 265     }
 266     process_phaseJNI(is_alive, keep_alive, complete_gc);
 267   }
 268 
 269   log_debug(gc, ref)("Ref Counts: Soft: " SIZE_FORMAT " Weak: " SIZE_FORMAT " Final: " SIZE_FORMAT " Phantom: " SIZE_FORMAT,
 270                      stats.soft_count(), stats.weak_count(), stats.final_count(), stats.phantom_count());
 271   log_develop_trace(gc, ref)("JNI Weak Reference count: " SIZE_FORMAT, count_jni_refs());
 272 
 273   return stats;
 274 }
 275 
 276 #ifndef PRODUCT
 277 // Calculate the number of jni handles.
 278 size_t ReferenceProcessor::count_jni_refs() {
 279   class AlwaysAliveClosure: public BoolObjectClosure {
 280   public:
 281     virtual bool do_object_b(oop obj) { return true; }
 282   };
 283 
 284   class CountHandleClosure: public OopClosure {
 285   private:
 286     size_t _count;
 287   public:
 288     CountHandleClosure(): _count(0) {}
 289     void do_oop(oop* unused)       { _count++; }
 290     void do_oop(narrowOop* unused) { ShouldNotReachHere(); }


 573         keep_alive->do_oop((narrowOop*)next_addr);
 574       } else {
 575         keep_alive->do_oop((oop*)next_addr);
 576       }
 577       iter.move_to_next();
 578     } else {
 579       iter.next();
 580     }
 581   }
 582   // Now close the newly reachable set
 583   complete_gc->do_void();
 584   NOT_PRODUCT(
 585     if (iter.processed() > 0) {
 586       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 587         " Refs in discovered list " INTPTR_FORMAT,
 588         iter.removed(), iter.processed(), p2i(refs_list.head()));
 589     }
 590   )
 591 }
 592 
 593 // Traverse the list and process the referents, by either
 594 // clearing them or keeping them (and their reachable












































































































































 595 // closure) alive.
 596 void
 597 ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,
 598                                    bool               clear_referent,

 599                                    BoolObjectClosure* is_alive,
 600                                    OopClosure*        keep_alive,
 601                                    VoidClosure*       complete_gc) {
 602   ResourceMark rm;
 603   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 604   while (iter.has_next()) {
 605     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 606     if (clear_referent) {
 607       // NULL out referent pointer
 608       iter.clear_referent();
 609     } else {
 610       // keep the referent around
 611       iter.make_referent_alive();
 612     }






 613     log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 614                                clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
 615     assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
 616     iter.next();
 617   }
 618   // Close the reachable set
 619   complete_gc->do_void();
 620 }
 621 
 622 void
 623 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 624   oop obj = NULL;
 625   oop next = refs_list.head();
 626   while (next != obj) {
 627     obj = next;
 628     next = java_lang_ref_Reference::discovered(obj);
 629     java_lang_ref_Reference::set_discovered_raw(obj, NULL);
 630   }
 631   refs_list.set_head(NULL);
 632   refs_list.set_length(0);


 651     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
 652       _policy(policy)
 653   { }
 654   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 655                     OopClosure& keep_alive,
 656                     VoidClosure& complete_gc)
 657   {
 658     Thread* thr = Thread::current();
 659     int refs_list_index = ((WorkerThread*)thr)->id();
 660     _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
 661                                   &is_alive, &keep_alive, &complete_gc);
 662   }
 663 private:
 664   ReferencePolicy* _policy;
 665 };
 666 
 667 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
 668 public:
 669   RefProcPhase2Task(ReferenceProcessor& ref_processor,
 670                     DiscoveredList      refs_lists[],

 671                     bool                marks_oops_alive)
 672     : ProcessTask(ref_processor, refs_lists, marks_oops_alive)

 673   { }
 674   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 675                     OopClosure& keep_alive,
 676                     VoidClosure& complete_gc)
 677   {
 678     _ref_processor.process_phase2(_refs_lists[i],
 679                                   &is_alive, &keep_alive, &complete_gc);



 680   }




 681 };
 682 
 683 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
 684 public:
 685   RefProcPhase3Task(ReferenceProcessor& ref_processor,
 686                     DiscoveredList      refs_lists[],
 687                     bool                clear_referent,

 688                     bool                marks_oops_alive)
 689     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
 690       _clear_referent(clear_referent)

 691   { }
 692   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 693                     OopClosure& keep_alive,
 694                     VoidClosure& complete_gc)
 695   {
 696     // Don't use "refs_list_index" calculated in this way because
 697     // balance_queues() has moved the Ref's into the first n queues.
 698     // Thread* thr = Thread::current();
 699     // int refs_list_index = ((WorkerThread*)thr)->id();
 700     // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
 701     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 702                                   &is_alive, &keep_alive, &complete_gc);
 703   }
 704 private:
 705   bool _clear_referent;

 706 };
 707 
 708 #ifndef PRODUCT
 709 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], size_t total_refs) {
 710   if (!log_is_enabled(Trace, gc, ref)) {
 711     return;
 712   }
 713 
 714   stringStream st;
 715   for (uint i = 0; i < _max_num_q; ++i) {
 716     st.print(SIZE_FORMAT " ", ref_lists[i].length());
 717   }
 718   log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
 719 }
 720 #endif
 721 
 722 // Balances reference queues.
 723 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 724 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 725 // corresponding to the active workers will be processed.


 788           break;
 789         }
 790       } else {
 791         to_idx = (to_idx + 1) % _num_q;
 792       }
 793     }
 794   }
 795 #ifdef ASSERT
 796   size_t balanced_total_refs = 0;
 797   for (uint i = 0; i < _max_num_q; ++i) {
 798     balanced_total_refs += ref_lists[i].length();
 799     }
 800   log_reflist_counts(ref_lists, balanced_total_refs);
 801   assert(total_refs == balanced_total_refs, "Balancing was incomplete");
 802 #endif
 803 }
 804 
 805 void ReferenceProcessor::balance_all_queues() {
 806   balance_queues(_discoveredSoftRefs);
 807   balance_queues(_discoveredWeakRefs);

 808   balance_queues(_discoveredFinalRefs);
 809   balance_queues(_discoveredPhantomRefs);
 810   balance_queues(_discoveredCleanerRefs);
 811 }
 812 
 813 void ReferenceProcessor::process_discovered_reflist(
 814   DiscoveredList               refs_lists[],
 815   ReferencePolicy*             policy,
 816   bool                         clear_referent,
 817   BoolObjectClosure*           is_alive,
 818   OopClosure*                  keep_alive,
 819   VoidClosure*                 complete_gc,
 820   AbstractRefProcTaskExecutor* task_executor)
 821 {
 822   bool mt_processing = task_executor != NULL && _processing_is_mt;
 823   // If discovery used MT and a dynamic number of GC threads, then
 824   // the queues must be balanced for correctness if fewer than the
 825   // maximum number of queues were used.  The number of queue used
 826   // during discovery may be different than the number to be used
 827   // for processing so don't depend of _num_q < _max_num_q as part
 828   // of the test.
 829   bool must_balance = _discovery_is_mt;
 830 


 839   //   policy reasons. Keep alive the transitive closure of all
 840   //   such referents.
 841   if (policy != NULL) {
 842     if (mt_processing) {
 843       RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
 844       task_executor->execute(phase1);
 845     } else {
 846       for (uint i = 0; i < _max_num_q; i++) {
 847         process_phase1(refs_lists[i], policy,
 848                        is_alive, keep_alive, complete_gc);
 849       }
 850     }
 851   } else { // policy == NULL
 852     assert(refs_lists != _discoveredSoftRefs,
 853            "Policy must be specified for soft references.");
 854   }
 855 
 856   // Phase 2:
 857   // . Traverse the list and remove any refs whose referents are alive.
 858   if (mt_processing) {
 859     RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);


 860     task_executor->execute(phase2);
 861   } else {
 862     for (uint i = 0; i < _max_num_q; i++) {
 863       process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);


 864     }
 865   }
 866 
 867   // Phase 3:
 868   // . Traverse the list and process referents as appropriate.
 869   if (mt_processing) {
 870     RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);


 871     task_executor->execute(phase3);
 872   } else {
 873     for (uint i = 0; i < _max_num_q; i++) {
 874       process_phase3(refs_lists[i], clear_referent,

























































































 875                      is_alive, keep_alive, complete_gc);
 876     }
 877   }
 878 }
 879 
 880 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
 881   uint id = 0;
 882   // Determine the queue index to use for this object.
 883   if (_discovery_is_mt) {
 884     // During a multi-threaded discovery phase,
 885     // each thread saves to its "own" list.
 886     Thread* thr = Thread::current();
 887     id = thr->as_Worker_thread()->id();
 888   } else {
 889     // single-threaded discovery, we save in round-robin
 890     // fashion to each of the lists.
 891     if (_processing_is_mt) {
 892       id = next_id();
 893     }
 894   }
 895   assert(id < _max_num_q, "Id is out-of-bounds (call Freud?)");
 896 
 897   // Get the discovered queue to which we will add
 898   DiscoveredList* list = NULL;
 899   switch (rt) {
 900     case REF_OTHER:
 901       // Unknown reference type, no special treatment
 902       break;
 903     case REF_SOFT:
 904       list = &_discoveredSoftRefs[id];
 905       break;
 906     case REF_WEAK:
 907       list = &_discoveredWeakRefs[id];
 908       break;



 909     case REF_FINAL:
 910       list = &_discoveredFinalRefs[id];
 911       break;
 912     case REF_PHANTOM:
 913       list = &_discoveredPhantomRefs[id];
 914       break;
 915     case REF_CLEANER:
 916       list = &_discoveredCleanerRefs[id];
 917       break;
 918     case REF_NONE:
 919       // we should not reach here if we are an InstanceRefKlass
 920     default:
 921       ShouldNotReachHere();
 922   }
 923   log_develop_trace(gc, ref)("Thread %d gets list " INTPTR_FORMAT, id, p2i(list));
 924   return list;
 925 }
 926 
 927 inline void
 928 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
 929                                               oop             obj,
 930                                               HeapWord*       discovered_addr) {
 931   assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
 932   // First we must make sure this object is only enqueued once. CAS in a non null
 933   // discovered_addr.
 934   oop current_head = refs_list.head();
 935   // The last ref must have its discovered field pointing to itself.
 936   oop next_discovered = (current_head != NULL) ? current_head : obj;
 937 


1099     list->inc_length(1);
1100 
1101     log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
1102   }
1103   assert(obj->is_oop(), "Discovered a bad reference");
1104   verify_referent(obj);
1105   return true;
1106 }
1107 
1108 // Preclean the discovered references by removing those
1109 // whose referents are alive, and by marking from those that
1110 // are not active. These lists can be handled here
1111 // in any order and, indeed, concurrently.
1112 void ReferenceProcessor::preclean_discovered_references(
1113   BoolObjectClosure* is_alive,
1114   OopClosure* keep_alive,
1115   VoidClosure* complete_gc,
1116   YieldClosure* yield,
1117   GCTimer* gc_timer) {
1118 

































1119   // Soft references
1120   {
1121     GCTraceTime(Debug, gc, ref) tm("Preclean SoftReferences", gc_timer);
1122     for (uint i = 0; i < _max_num_q; i++) {
1123       if (yield->should_return()) {
1124         return;
1125       }
1126       preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1127                                   keep_alive, complete_gc, yield);
1128     }
1129   }
1130 
1131   // Weak references
1132   {
1133     GCTraceTime(Debug, gc, ref) tm("Preclean WeakReferences", gc_timer);
1134     for (uint i = 0; i < _max_num_q; i++) {
1135       if (yield->should_return()) {
1136         return;
1137       }
1138       preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,


1145     GCTraceTime(Debug, gc, ref) tm("Preclean FinalReferences", gc_timer);
1146     for (uint i = 0; i < _max_num_q; i++) {
1147       if (yield->should_return()) {
1148         return;
1149       }
1150       preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1151                                   keep_alive, complete_gc, yield);
1152     }
1153   }
1154 
1155   // Phantom references
1156   {
1157     GCTraceTime(Debug, gc, ref) tm("Preclean PhantomReferences", gc_timer);
1158     for (uint i = 0; i < _max_num_q; i++) {
1159       if (yield->should_return()) {
1160         return;
1161       }
1162       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1163                                   keep_alive, complete_gc, yield);
1164     }
1165 
1166     // Cleaner references.  Included in timing for phantom references.  We
1167     // expect Cleaner references to be temporary, and don't want to deal with
1168     // possible incompatibilities arising from making it more visible.
1169     for (uint i = 0; i < _max_num_q; i++) {
1170       if (yield->should_return()) {
1171         return;
1172       }
1173       preclean_discovered_reflist(_discoveredCleanerRefs[i], is_alive,
1174                                   keep_alive, complete_gc, yield);
1175     }
1176   }
1177 }
1178 
1179 // Walk the given discovered ref list, and remove all reference objects
1180 // whose referents are still alive, whose referents are NULL or which
1181 // are not active (have a non-NULL next field). NOTE: When we are
1182 // thus precleaning the ref lists (which happens single-threaded today),
1183 // we do not disable refs discovery to honor the correct semantics of
1184 // java.lang.Reference. As a result, we need to be careful below
1185 // that ref removal steps interleave safely with ref discovery steps
1186 // (in this thread).
1187 void
1188 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1189                                                 BoolObjectClosure* is_alive,
1190                                                 OopClosure*        keep_alive,
1191                                                 VoidClosure*       complete_gc,
1192                                                 YieldClosure*      yield) {
1193   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1194   while (iter.has_next()) {
1195     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));


1210         keep_alive->do_oop(next_addr);
1211       } else {
1212         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
1213         keep_alive->do_oop(next_addr);
1214       }
1215       iter.move_to_next();
1216     } else {
1217       iter.next();
1218     }
1219   }
1220   // Close the reachable set
1221   complete_gc->do_void();
1222 
1223   NOT_PRODUCT(
1224     if (iter.processed() > 0) {
1225       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1226         iter.removed(), iter.processed(), p2i(refs_list.head()));
1227     }
1228   )
1229 }













































































1230 
1231 const char* ReferenceProcessor::list_name(uint i) {
1232    assert(i <= _max_num_q * number_of_subclasses_of_ref(),
1233           "Out of bounds index");
1234 
1235    int j = i / _max_num_q;
1236    switch (j) {
1237      case 0: return "SoftRef";
1238      case 1: return "WeakRef";
1239      case 2: return "FinalRef";
1240      case 3: return "PhantomRef";
1241      case 4: return "CleanerRef";
1242    }
1243    ShouldNotReachHere();
1244    return NULL;
1245 }
1246 


 100                                        BoolObjectClosure* is_alive_non_header)  :
 101   _discovering_refs(false),
 102   _enqueuing_is_done(false),
 103   _is_alive_non_header(is_alive_non_header),
 104   _processing_is_mt(mt_processing),
 105   _next_id(0)
 106 {
 107   _span = span;
 108   _discovery_is_atomic = atomic_discovery;
 109   _discovery_is_mt     = mt_discovery;
 110   _num_q               = MAX2(1U, mt_processing_degree);
 111   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 112   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 113             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 114 
 115   if (_discovered_refs == NULL) {
 116     vm_exit_during_initialization("Could not allocated RefProc Array");
 117   }
 118   _discoveredSoftRefs    = &_discovered_refs[0];
 119   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 120   _discoveredEphemerons  = &_discoveredWeakRefs[_max_num_q];
 121   _discoveredFinalRefs   = &_discoveredEphemerons[_max_num_q];
 122   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];

 123 
 124   // Initialize all entries to NULL
 125   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 126     _discovered_refs[i].set_head(NULL);
 127     _discovered_refs[i].set_length(0);
 128   }
 129 
 130   setup_policy(false /* default soft ref policy */);
 131 }
 132 
 133 #ifndef PRODUCT
 134 void ReferenceProcessor::verify_no_references_recorded() {
 135   guarantee(!_discovering_refs, "Discovering refs?");
 136   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 137     guarantee(_discovered_refs[i].is_empty(),
 138               "Found non-empty discovered list");
 139   }
 140 }
 141 #endif
 142 


 191   BoolObjectClosure*           is_alive,
 192   OopClosure*                  keep_alive,
 193   VoidClosure*                 complete_gc,
 194   AbstractRefProcTaskExecutor* task_executor,
 195   GCTimer*                     gc_timer) {
 196 
 197   assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
 198   // Stop treating discovered references specially.
 199   disable_discovery();
 200 
 201   // If discovery was concurrent, someone could have modified
 202   // the value of the static field in the j.l.r.SoftReference
 203   // class that holds the soft reference timestamp clock using
 204   // reflection or Unsafe between when discovery was enabled and
 205   // now. Unconditionally update the static field in ReferenceProcessor
 206   // here so that we use the new value during processing of the
 207   // discovered soft refs.
 208 
 209   _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
 210 



 211   ReferenceProcessorStats stats(
 212       total_count(_discoveredSoftRefs),
 213       total_count(_discoveredWeakRefs),
 214       total_count(_discoveredEphemerons),
 215       total_count(_discoveredFinalRefs),
 216       total_count(_discoveredPhantomRefs));
 217 
 218   // Ephemerons (phase2) before Soft references. This closes the hard-reachable
 219   // set that arrises when the value of an ephemeron with a hard-reachable key
 220   // refers to an otherwise weaker-than-hard-reachable key of some other ephemeron.
 221   {
 222     GCTraceTime(Debug, gc, ref) tt("Ephemeron-HardClosure", gc_timer);
 223     // balance queues if needed 1st
 224     balance_discovered_ephemerons(task_executor);
 225     process_discovered_ephemerons_ph2(is_alive, keep_alive, complete_gc, task_executor);
 226   }
 227 
 228   // Soft references
 229   {
 230     GCTraceTime(Debug, gc, ref) tt("SoftReference", gc_timer);
 231     process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
 232                                is_alive, keep_alive, complete_gc, task_executor);
 233   }
 234 
 235   update_soft_ref_master_clock();
 236 
 237   // Ephemerons (phase2) again before Weak references. This closes the soft-reachable
 238   // set that arrises when the value of an ephemeron with a soft-reachable key
 239   // refers to an otherwise weaker-than-soft-reachable key of some other ephemeron.
 240   {
 241     GCTraceTime(Debug, gc, ref) tt("Ephemeron-SoftClosure", gc_timer);
 242     process_discovered_ephemerons_ph2(is_alive, keep_alive, complete_gc, task_executor);
 243   }
 244 
 245 
 246   // Weak references
 247   {
 248     GCTraceTime(Debug, gc, ref) tt("WeakReference", gc_timer);
 249     process_discovered_reflist(_discoveredWeakRefs, NULL, true,
 250                                is_alive, keep_alive, complete_gc, task_executor);
 251   }
 252 
 253   // Ephemerons (phase3). This clears any remaining ephemerons.
 254   {
 255     GCTraceTime(Debug, gc, ref) tt("Ephemeron-Clear", gc_timer);
 256     process_discovered_ephemerons_ph3(is_alive, keep_alive, complete_gc, task_executor);
 257   }
 258 
 259   // Final references
 260   {
 261     GCTraceTime(Debug, gc, ref) tt("FinalReference", gc_timer);
 262     process_discovered_reflist(_discoveredFinalRefs, NULL, false,
 263                                is_alive, keep_alive, complete_gc, task_executor);
 264   }
 265 
 266   // Phantom references
 267   {
 268     GCTraceTime(Debug, gc, ref) tt("PhantomReference", gc_timer);
 269     process_discovered_reflist(_discoveredPhantomRefs, NULL, true,
 270                                is_alive, keep_alive, complete_gc, task_executor);






 271   }
 272 
 273   // Weak global JNI references. It would make more sense (semantically) to
 274   // traverse these simultaneously with the regular weak references above, but
 275   // that is not how the JDK1.2 specification is. See #4126360. Native code can
 276   // thus use JNI weak references to circumvent the phantom references and
 277   // resurrect a "post-mortem" object.
 278   {
 279     GCTraceTime(Debug, gc, ref) tt("JNI Weak Reference", gc_timer);
 280     if (task_executor != NULL) {
 281       task_executor->set_single_threaded_mode();
 282     }
 283     process_phaseJNI(is_alive, keep_alive, complete_gc);
 284   }
 285 
 286   log_debug(gc, ref)("Ref Counts: Soft: " SIZE_FORMAT " Weak: " SIZE_FORMAT " Ephemeron: " SIZE_FORMAT " Final: " SIZE_FORMAT " Phantom: " SIZE_FORMAT,
 287                      stats.soft_count(), stats.weak_count(), stats.ephemeron_count(), stats.final_count(), stats.phantom_count());
 288   log_develop_trace(gc, ref)("JNI Weak Reference count: " SIZE_FORMAT, count_jni_refs());
 289 
 290   return stats;
 291 }
 292 
 293 #ifndef PRODUCT
 294 // Calculate the number of jni handles.
 295 size_t ReferenceProcessor::count_jni_refs() {
 296   class AlwaysAliveClosure: public BoolObjectClosure {
 297   public:
 298     virtual bool do_object_b(oop obj) { return true; }
 299   };
 300 
 301   class CountHandleClosure: public OopClosure {
 302   private:
 303     size_t _count;
 304   public:
 305     CountHandleClosure(): _count(0) {}
 306     void do_oop(oop* unused)       { _count++; }
 307     void do_oop(narrowOop* unused) { ShouldNotReachHere(); }


 590         keep_alive->do_oop((narrowOop*)next_addr);
 591       } else {
 592         keep_alive->do_oop((oop*)next_addr);
 593       }
 594       iter.move_to_next();
 595     } else {
 596       iter.next();
 597     }
 598   }
 599   // Now close the newly reachable set
 600   complete_gc->do_void();
 601   NOT_PRODUCT(
 602     if (iter.processed() > 0) {
 603       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 604         " Refs in discovered list " INTPTR_FORMAT,
 605         iter.removed(), iter.processed(), p2i(refs_list.head()));
 606     }
 607   )
 608 }
 609 
 610 bool
 611 ReferenceProcessor::pp2_ephemerons_work(DiscoveredList& refs_list,
 612                                         BoolObjectClosure* is_alive,
 613                                         OopClosure* keep_alive,
 614                                         VoidClosure* complete_gc) {
 615   assert(discovery_is_atomic(), "Error");
 616   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 617   // Temporary list used to reverse the order of ephemerons at each pass to avoid
 618   // pathological cases where majority of revived ephemeron values point
 619   // to ephemeron keys in the list "preceeding" this ephemeron.
 620   DiscoveredList reversed_list;
 621   bool ephemerons_removed = false;
 622   while (iter.has_next()) {
 623     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 624     oop obj = iter.obj();
 625     DEBUG_ONLY(oop next = java_lang_ref_Reference::next(obj);)
 626     assert(next == NULL, "Should not discover inactive Reference");
 627     if (iter.is_referent_alive()) {
 628       log_develop_trace(gc, ref)("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
 629                                  p2i(obj), obj->klass()->internal_name());
 630       // The referent (key) is reachable after all.
 631       // Remove Ephemeron object from list.
 632       iter.remove();
 633       // Update the referent (key) pointer as necessary: Note that this
 634       // should not entail any recursive marking because the
 635       // referent must already have been traversed.
 636       iter.make_referent_alive();
 637       // Update the value pointer as necessary.
 638       HeapWord* value_addr = java_lang_ref_Ephemeron::value_addr(obj);
 639       if (UseCompressedOops) {
 640         keep_alive->do_oop((narrowOop*) value_addr);
 641       } else {
 642         keep_alive->do_oop((oop*) value_addr);
 643       }
 644       ephemerons_removed = true;
 645       // Close the newly reachable set as soon as the value is marked to be alive
 646       // to increase the chance other ephemeron referents (keys) are revived as
 647       // we proceed scanning the list.
 648       complete_gc->do_void();
 649     } else {
 650       // Referent (key) is not alive (yet) so move the ephemeron to a reversed_list
 651       // to reverse scanning in the next pass.
 652       iter.remove();
 653       HeapWord* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
 654       oop current_head = reversed_list.head();
 655       // The last ref must have its discovered field pointing to itself.
 656       oop next_discovered = (current_head != NULL) ? current_head : obj;
 657       oop_store_raw(discovered_addr, next_discovered);
 658       reversed_list.set_head(obj);
 659       reversed_list.inc_length(1);
 660     }
 661     iter.move_to_next();
 662   }
 663   assert(refs_list.length() == 0, "Should be empty");
 664   // replace the list with reversed list
 665   refs_list = reversed_list;
 666   NOT_PRODUCT(
 667     if (iter.processed() > 0) {
 668       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Ephemerons out of " SIZE_FORMAT
 669                                  " Ephemerons in discovered list " INTPTR_FORMAT,
 670                                  iter.processed() - refs_list.length(), iter.processed(),
 671                                  p2i(refs_list.head()));
 672     }
 673   )
 674   return ephemerons_removed;
 675 }
 676 
 677 bool
 678 ReferenceProcessor::pp2_ephemerons_work_concurrent_discovery(DiscoveredList& refs_list,
 679                                                              BoolObjectClosure* is_alive,
 680                                                              OopClosure* keep_alive,
 681                                                              VoidClosure* complete_gc) {
 682   assert(!discovery_is_atomic(), "Error");
 683   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 684   // Temporary list used to reverse the order of ephemerons at each pass to avoid
 685   // pathological cases where majority of revived ephemeron values point
 686   // to ephemeron keys in the list "preceeding" this ephemeron.
 687   DiscoveredList reversed_list;
 688   bool ephemerons_removed = false;
 689   while (iter.has_next()) {
 690     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 691     oop obj = iter.obj();
 692     HeapWord* next_addr = java_lang_ref_Reference::next_addr(obj);
 693     oop next = java_lang_ref_Reference::next(obj);
 694     if ((iter.referent() == NULL || iter.is_referent_alive() ||
 695          next != NULL)) {
 696       assert(next->is_oop_or_null(), "Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next));
 697       // Remove Reference object from list
 698       iter.remove();
 699       // Trace the cohorts
 700       iter.make_referent_alive();
 701       if (UseCompressedOops) {
 702         keep_alive->do_oop((narrowOop*) next_addr);
 703       } else {
 704         keep_alive->do_oop((oop*) next_addr);
 705       }
 706       HeapWord* value_addr = java_lang_ref_Ephemeron::value_addr(obj);
 707       if (UseCompressedOops) {
 708         keep_alive->do_oop((narrowOop*) value_addr);
 709       } else {
 710         keep_alive->do_oop((oop*) value_addr);
 711       }
 712       ephemerons_removed = true;
 713       // Close the newly reachable set as soon as the value is marked to be alive
 714       // to increase the chance other ephemeron keys are revived as we proceed
 715       // scanning the list
 716       complete_gc->do_void();
 717     } else {
 718       // Referent (key) is not alive (yet) so move the ephemeron to a reversed_list
 719       // to reverse scanning in the next pass.
 720       iter.remove();
 721       HeapWord* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
 722       oop current_head = reversed_list.head();
 723       // The last ref must have its discovered field pointing to itself.
 724       oop next_discovered = (current_head != NULL) ? current_head : obj;
 725       oop_store_raw(discovered_addr, next_discovered);
 726       reversed_list.set_head(obj);
 727       reversed_list.inc_length(1);
 728     }
 729     iter.move_to_next();
 730   }
 731   assert(refs_list.length() == 0, "Should be empty");
 732   // replace the list with reversed list
 733   refs_list = reversed_list;
 734   // Now close the newly reachable set at least once after the whole list has
 735   // been scanned even if there were no ephemerons
 736   if (!ephemerons_removed) {
 737     complete_gc->do_void();
 738   }
 739   NOT_PRODUCT(
 740     if (iter.processed() > 0) {
 741       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Ephemerons out of " SIZE_FORMAT
 742                                  " Ephemerons in discovered list " INTPTR_FORMAT,
 743                                  iter.processed() - refs_list.length(), iter.processed(),
 744                                  p2i(refs_list.head()));
 745     }
 746   )
 747   return ephemerons_removed;
 748 }
 749 
 750 // Traverse the list and process the referents (and values in case of Ephemerons),
 751 // by either clearing them or keeping them (and their reachable
 752 // closure) alive.
 753 void
 754 ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,
 755                                    bool               clear_referent,
 756                                    bool               has_ephemerons,
 757                                    BoolObjectClosure* is_alive,
 758                                    OopClosure*        keep_alive,
 759                                    VoidClosure*       complete_gc) {
 760   ResourceMark rm;
 761   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 762   while (iter.has_next()) {
 763     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 764     if (clear_referent) {
 765       // NULL out referent pointer
 766       iter.clear_referent();
 767     } else {
 768       // keep the referent around
 769       iter.make_referent_alive();
 770     }
 771     if (has_ephemerons) {
 772       assert(clear_referent, "Ephemerons should always be cleared");
 773       HeapWord* value_addr = java_lang_ref_Ephemeron::value_addr(iter.obj());
 774       // NULL out value pointer
 775       oop_store_raw(value_addr, NULL);
 776     }
 777     log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 778                                clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
 779     assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
 780     iter.next();
 781   }
 782   // Close the reachable set
 783   complete_gc->do_void();
 784 }
 785 
 786 void
 787 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 788   oop obj = NULL;
 789   oop next = refs_list.head();
 790   while (next != obj) {
 791     obj = next;
 792     next = java_lang_ref_Reference::discovered(obj);
 793     java_lang_ref_Reference::set_discovered_raw(obj, NULL);
 794   }
 795   refs_list.set_head(NULL);
 796   refs_list.set_length(0);


 815     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
 816       _policy(policy)
 817   { }
 818   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 819                     OopClosure& keep_alive,
 820                     VoidClosure& complete_gc)
 821   {
 822     Thread* thr = Thread::current();
 823     int refs_list_index = ((WorkerThread*)thr)->id();
 824     _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
 825                                   &is_alive, &keep_alive, &complete_gc);
 826   }
 827 private:
 828   ReferencePolicy* _policy;
 829 };
 830 
 831 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
 832 public:
 833   RefProcPhase2Task(ReferenceProcessor& ref_processor,
 834                     DiscoveredList      refs_lists[],
 835                     bool                has_ephemerons,
 836                     bool                marks_oops_alive)
 837     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
 838             _has_ephemerons(has_ephemerons), ephemerons_removed(false)
 839   { }
 840   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 841                     OopClosure& keep_alive,
 842                     VoidClosure& complete_gc)
 843   {
 844     bool r = _ref_processor.process_phase2(_refs_lists[i], _has_ephemerons,
 845                                            &is_alive, &keep_alive, &complete_gc);
 846     if (r) {
 847       ephemerons_removed = true;
 848     }
 849   }
 850 private:
 851   bool _has_ephemerons;
 852 public:
 853   bool ephemerons_removed;
 854 };
 855 
 856 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
 857 public:
 858   RefProcPhase3Task(ReferenceProcessor& ref_processor,
 859                     DiscoveredList      refs_lists[],
 860                     bool                clear_referent,
 861                     bool                has_ephemerons,
 862                     bool                marks_oops_alive)
 863     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
 864       _clear_referent(clear_referent),
 865       _has_ephemerons(has_ephemerons)
 866   { }
 867   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 868                     OopClosure& keep_alive,
 869                     VoidClosure& complete_gc)
 870   {
 871     // Don't use "refs_list_index" calculated in this way because
 872     // balance_queues() has moved the Ref's into the first n queues.
 873     // Thread* thr = Thread::current();
 874     // int refs_list_index = ((WorkerThread*)thr)->id();
 875     // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
 876     _ref_processor.process_phase3(_refs_lists[i], _clear_referent, _has_ephemerons,
 877                                   &is_alive, &keep_alive, &complete_gc);
 878   }
 879 private:
 880   bool _clear_referent;
 881   bool _has_ephemerons;
 882 };
 883 
 884 #ifndef PRODUCT
 885 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], size_t total_refs) {
 886   if (!log_is_enabled(Trace, gc, ref)) {
 887     return;
 888   }
 889 
 890   stringStream st;
 891   for (uint i = 0; i < _max_num_q; ++i) {
 892     st.print(SIZE_FORMAT " ", ref_lists[i].length());
 893   }
 894   log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
 895 }
 896 #endif
 897 
 898 // Balances reference queues.
 899 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 900 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 901 // corresponding to the active workers will be processed.


 964           break;
 965         }
 966       } else {
 967         to_idx = (to_idx + 1) % _num_q;
 968       }
 969     }
 970   }
 971 #ifdef ASSERT
 972   size_t balanced_total_refs = 0;
 973   for (uint i = 0; i < _max_num_q; ++i) {
 974     balanced_total_refs += ref_lists[i].length();
 975     }
 976   log_reflist_counts(ref_lists, balanced_total_refs);
 977   assert(total_refs == balanced_total_refs, "Balancing was incomplete");
 978 #endif
 979 }
 980 
 981 void ReferenceProcessor::balance_all_queues() {
 982   balance_queues(_discoveredSoftRefs);
 983   balance_queues(_discoveredWeakRefs);
 984   balance_queues(_discoveredEphemerons);
 985   balance_queues(_discoveredFinalRefs);
 986   balance_queues(_discoveredPhantomRefs);

 987 }
 988 
 989 void ReferenceProcessor::process_discovered_reflist(
 990   DiscoveredList               refs_lists[],
 991   ReferencePolicy*             policy,
 992   bool                         clear_referent,
 993   BoolObjectClosure*           is_alive,
 994   OopClosure*                  keep_alive,
 995   VoidClosure*                 complete_gc,
 996   AbstractRefProcTaskExecutor* task_executor)
 997 {
 998   bool mt_processing = task_executor != NULL && _processing_is_mt;
 999   // If discovery used MT and a dynamic number of GC threads, then
1000   // the queues must be balanced for correctness if fewer than the
1001   // maximum number of queues were used.  The number of queue used
1002   // during discovery may be different than the number to be used
1003   // for processing so don't depend of _num_q < _max_num_q as part
1004   // of the test.
1005   bool must_balance = _discovery_is_mt;
1006 


1015   //   policy reasons. Keep alive the transitive closure of all
1016   //   such referents.
1017   if (policy != NULL) {
1018     if (mt_processing) {
1019       RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
1020       task_executor->execute(phase1);
1021     } else {
1022       for (uint i = 0; i < _max_num_q; i++) {
1023         process_phase1(refs_lists[i], policy,
1024                        is_alive, keep_alive, complete_gc);
1025       }
1026     }
1027   } else { // policy == NULL
1028     assert(refs_lists != _discoveredSoftRefs,
1029            "Policy must be specified for soft references.");
1030   }
1031 
1032   // Phase 2:
1033   // . Traverse the list and remove any refs whose referents are alive.
1034   if (mt_processing) {
1035     RefProcPhase2Task phase2(*this, refs_lists,
1036                              false /*has_ephemerons*/,
1037                              !discovery_is_atomic() /*marks_oops_alive*/);
1038     task_executor->execute(phase2);
1039   } else {
1040     for (uint i = 0; i < _max_num_q; i++) {
1041       process_phase2(refs_lists[i],
1042                      false /*has_ephemerons*/,
1043                      is_alive, keep_alive, complete_gc);
1044     }
1045   }
1046 
1047   // Phase 3:
1048   // . Traverse the list and process referents as appropriate.
1049   if (mt_processing) {
1050     RefProcPhase3Task phase3(*this, refs_lists, clear_referent,
1051                              false /*has_ephemerons*/,
1052                              true /*marks_oops_alive*/);
1053     task_executor->execute(phase3);
1054   } else {
1055     for (uint i = 0; i < _max_num_q; i++) {
1056       process_phase3(refs_lists[i], clear_referent,
1057                      false /*has_ephemerons*/,
1058                      is_alive, keep_alive, complete_gc);
1059     }
1060   }
1061 }
1062 
1063 // Balance ephemerons queues if needed
1064 void ReferenceProcessor::balance_discovered_ephemerons(
1065   AbstractRefProcTaskExecutor* task_executor) {
1066 
1067   bool mt_processing = task_executor != NULL && _processing_is_mt;
1068   // If discovery used MT and a dynamic number of GC threads, then
1069   // the queues must be balanced for correctness if fewer than the
1070   // maximum number of queues were used.  The number of queue used
1071   // during discovery may be different than the number to be used
1072   // for processing so don't depend of _num_q < _max_num_q as part
1073   // of the test.
1074   bool must_balance = _discovery_is_mt;
1075 
1076   if ((mt_processing && ParallelRefProcBalancingEnabled) ||
1077       must_balance) {
1078     balance_queues(_discoveredEphemerons);
1079   }
1080 }
1081 
1082 // Process ephemerons, phase2
1083 void ReferenceProcessor::process_discovered_ephemerons_ph2(
1084   BoolObjectClosure*           is_alive,
1085   OopClosure*                  keep_alive,
1086   VoidClosure*                 complete_gc,
1087   AbstractRefProcTaskExecutor* task_executor) {
1088 
1089   // Traverse the _discoveredEphemerons lists and for those ephemerons whose keys
1090   // are alive, remove them from the list, mark their values alive and close the
1091   // reachable set. Iterate until lists become stable while reversing the
1092   // direction of scanning in each pass.
1093   bool mt_processing = task_executor != NULL && _processing_is_mt;
1094   bool ephemerons_removed;
1095   bool forward_scan = true;
1096   do {
1097     if (mt_processing) {
1098       RefProcPhase2Task phase2(*this, _discoveredEphemerons,
1099                                true /*has_ephemerons*/,
1100                                !discovery_is_atomic() /*marks_oops_alive*/);
1101       task_executor->execute(phase2);
1102       ephemerons_removed = phase2.ephemerons_removed;
1103     } else {
1104       ephemerons_removed = false;
1105       // alternate direction of selecting individual lists for scanning to avoid
1106       // pathological cases where majority of revived ephemeron values point
1107       // to ephemeron keys in "previous" lists...
1108       if (forward_scan) {
1109         for (uint i = 0; i < _max_num_q; i++) {
1110           ephemerons_removed |= process_phase2(_discoveredEphemerons[i],
1111                                                true /*has_ephemerons*/,
1112                                                is_alive, keep_alive, complete_gc);
1113         }
1114       } else {
1115         for (uint i = _max_num_q - 1; i < (uint) - 1; i--) {
1116           ephemerons_removed |= process_phase2(_discoveredEphemerons[i],
1117                                                true /*has_ephemerons*/,
1118                                                is_alive, keep_alive, complete_gc);
1119         }
1120       }
1121       forward_scan = !forward_scan;
1122     }
1123   } while (ephemerons_removed);
1124 }
1125 
1126 // Process ephemerons, phase3
1127 void ReferenceProcessor::process_discovered_ephemerons_ph3(
1128   BoolObjectClosure*           is_alive,
1129   OopClosure*                  keep_alive,
1130   VoidClosure*                 complete_gc,
1131   AbstractRefProcTaskExecutor* task_executor) {
1132 
1133   // Traverse the _discoveredEphemerons lists and clear ephemerons.
1134   bool mt_processing = task_executor != NULL && _processing_is_mt;
1135   if (mt_processing) {
1136     RefProcPhase3Task phase3(*this, _discoveredEphemerons,
1137                              true /*clear_referent*/,
1138                              true /*has_ephemerons*/,
1139                              true /*marks_oops_alive*/);
1140     task_executor->execute(phase3);
1141   } else {
1142     for (uint i = 0; i < _max_num_q; i++) {
1143       process_phase3(_discoveredEphemerons[i],
1144                      true /*clear_referent*/,
1145                      true /*has_ephemerons*/,
1146                      is_alive, keep_alive, complete_gc);
1147     }
1148   }
1149 }
1150 
1151 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
1152   uint id = 0;
1153   // Determine the queue index to use for this object.
1154   if (_discovery_is_mt) {
1155     // During a multi-threaded discovery phase,
1156     // each thread saves to its "own" list.
1157     Thread* thr = Thread::current();
1158     id = thr->as_Worker_thread()->id();
1159   } else {
1160     // single-threaded discovery, we save in round-robin
1161     // fashion to each of the lists.
1162     if (_processing_is_mt) {
1163       id = next_id();
1164     }
1165   }
1166   assert(id < _max_num_q, "Id is out-of-bounds (call Freud?)");
1167 
1168   // Get the discovered queue to which we will add
1169   DiscoveredList* list = NULL;
1170   switch (rt) {
1171     case REF_OTHER:
1172       // Unknown reference type, no special treatment
1173       break;
1174     case REF_SOFT:
1175       list = &_discoveredSoftRefs[id];
1176       break;
1177     case REF_WEAK:
1178       list = &_discoveredWeakRefs[id];
1179       break;
1180     case REF_EPHEMERON:
1181       list = &_discoveredEphemerons[id];
1182       break;
1183     case REF_FINAL:
1184       list = &_discoveredFinalRefs[id];
1185       break;
1186     case REF_PHANTOM:
1187       list = &_discoveredPhantomRefs[id];
1188       break;



1189     case REF_NONE:
1190       // we should not reach here if we are an InstanceRefKlass
1191     default:
1192       ShouldNotReachHere();
1193   }
1194   log_develop_trace(gc, ref)("Thread %d gets list " INTPTR_FORMAT, id, p2i(list));
1195   return list;
1196 }
1197 
1198 inline void
1199 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
1200                                               oop             obj,
1201                                               HeapWord*       discovered_addr) {
1202   assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1203   // First we must make sure this object is only enqueued once. CAS in a non null
1204   // discovered_addr.
1205   oop current_head = refs_list.head();
1206   // The last ref must have its discovered field pointing to itself.
1207   oop next_discovered = (current_head != NULL) ? current_head : obj;
1208 


1370     list->inc_length(1);
1371 
1372     log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
1373   }
1374   assert(obj->is_oop(), "Discovered a bad reference");
1375   verify_referent(obj);
1376   return true;
1377 }
1378 
1379 // Preclean the discovered references by removing those
1380 // whose referents are alive, and by marking from those that
1381 // are not active. These lists can be handled here
1382 // in any order and, indeed, concurrently.
1383 void ReferenceProcessor::preclean_discovered_references(
1384   BoolObjectClosure* is_alive,
1385   OopClosure* keep_alive,
1386   VoidClosure* complete_gc,
1387   YieldClosure* yield,
1388   GCTimer* gc_timer) {
1389 
1390   // Ephemerons - iterate until the lists become stable
1391   {
1392     GCTraceTime(Debug, gc, ref) tt("Preclean Ephemerons", gc_timer);
1393     bool ephemerons_removed;
1394     bool forward_scan = true;
1395     do {
1396       ephemerons_removed = false;
1397       // alternate direction of selecting individual lists for scanning to avoid
1398       // pathological cases where majority of revived ephemeron values point
1399       // to ephemeron keys in "previous" lists...
1400       if (forward_scan) {
1401         for (uint i = 0; i < _max_num_q; i++) {
1402           if (yield->should_return()) {
1403             return;
1404           }
1405           ephemerons_removed |=
1406             preclean_discovered_ephemerons_reflist(_discoveredEphemerons[i], is_alive,
1407                                                    keep_alive, complete_gc, yield);
1408         }
1409       } else {
1410         for (uint i = _max_num_q - 1; i < (uint) - 1; i--) {
1411           if (yield->should_return()) {
1412             return;
1413           }
1414           ephemerons_removed |=
1415             preclean_discovered_ephemerons_reflist(_discoveredEphemerons[i], is_alive,
1416                                                    keep_alive, complete_gc, yield);
1417         }
1418       }
1419       forward_scan = !forward_scan;
1420     } while (ephemerons_removed);
1421   }
1422 
1423   // Soft references
1424   {
1425     GCTraceTime(Debug, gc, ref) tm("Preclean SoftReferences", gc_timer);
1426     for (uint i = 0; i < _max_num_q; i++) {
1427       if (yield->should_return()) {
1428         return;
1429       }
1430       preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1431                                   keep_alive, complete_gc, yield);
1432     }
1433   }
1434 
1435   // Weak references
1436   {
1437     GCTraceTime(Debug, gc, ref) tm("Preclean WeakReferences", gc_timer);
1438     for (uint i = 0; i < _max_num_q; i++) {
1439       if (yield->should_return()) {
1440         return;
1441       }
1442       preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,


1449     GCTraceTime(Debug, gc, ref) tm("Preclean FinalReferences", gc_timer);
1450     for (uint i = 0; i < _max_num_q; i++) {
1451       if (yield->should_return()) {
1452         return;
1453       }
1454       preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1455                                   keep_alive, complete_gc, yield);
1456     }
1457   }
1458 
1459   // Phantom references
1460   {
1461     GCTraceTime(Debug, gc, ref) tm("Preclean PhantomReferences", gc_timer);
1462     for (uint i = 0; i < _max_num_q; i++) {
1463       if (yield->should_return()) {
1464         return;
1465       }
1466       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1467                                   keep_alive, complete_gc, yield);
1468     }











1469   }
1470 }
1471 
1472 // Walk the given discovered ref list, and remove all reference objects
1473 // whose referents are still alive, whose referents are NULL or which
1474 // are not active (have a non-NULL next field). NOTE: When we are
1475 // thus precleaning the ref lists (which happens single-threaded today),
1476 // we do not disable refs discovery to honor the correct semantics of
1477 // java.lang.Reference. As a result, we need to be careful below
1478 // that ref removal steps interleave safely with ref discovery steps
1479 // (in this thread).
1480 void
1481 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1482                                                 BoolObjectClosure* is_alive,
1483                                                 OopClosure*        keep_alive,
1484                                                 VoidClosure*       complete_gc,
1485                                                 YieldClosure*      yield) {
1486   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1487   while (iter.has_next()) {
1488     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));


1503         keep_alive->do_oop(next_addr);
1504       } else {
1505         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
1506         keep_alive->do_oop(next_addr);
1507       }
1508       iter.move_to_next();
1509     } else {
1510       iter.next();
1511     }
1512   }
1513   // Close the reachable set
1514   complete_gc->do_void();
1515 
1516   NOT_PRODUCT(
1517     if (iter.processed() > 0) {
1518       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1519         iter.removed(), iter.processed(), p2i(refs_list.head()));
1520     }
1521   )
1522 }
1523 // The same as above, but specialized for ephemerons and returns true if any
1524 // ephemerons were removed from ref list.
1525 bool
1526 ReferenceProcessor::preclean_discovered_ephemerons_reflist(DiscoveredList& refs_list,
1527                                                            BoolObjectClosure* is_alive,
1528                                                            OopClosure* keep_alive,
1529                                                            VoidClosure* complete_gc,
1530                                                            YieldClosure* yield) {
1531   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1532   // Temporary list used to reverse the order of ephemerons at each pass to avoid
1533   // pathological cases where majority of revived ephemeron values point
1534   // to ephemeron keys in the list "preceeding" this ephemeron.
1535   DiscoveredList reversed_list;
1536   bool ephemerons_removed = false;
1537   while (iter.has_next()) {
1538     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1539     oop obj = iter.obj();
1540     oop next = java_lang_ref_Reference::next(obj);
1541     if (iter.referent() == NULL || iter.is_referent_alive() ||
1542         next != NULL) {
1543       // The referent has been cleared, or is alive, or the Reference is not
1544       // active; we need to trace and mark its cohort.
1545       log_develop_trace(gc, ref)("Precleaning Ephemeron (" INTPTR_FORMAT ": %s)",
1546                                  p2i(obj), obj->klass()->internal_name());
1547       // Remove Reference object from list
1548       iter.remove();
1549       // Keep alive its cohort.
1550       iter.make_referent_alive();
1551       if (UseCompressedOops) {
1552         narrowOop* next_addr = (narrowOop*) java_lang_ref_Reference::next_addr(obj);
1553         keep_alive->do_oop(next_addr);
1554       } else {
1555         oop* next_addr = (oop*) java_lang_ref_Reference::next_addr(obj);
1556         keep_alive->do_oop(next_addr);
1557       }
1558       HeapWord* value_addr = java_lang_ref_Ephemeron::value_addr(obj);
1559       if (UseCompressedOops) {
1560         keep_alive->do_oop((narrowOop*) value_addr);
1561       } else {
1562         keep_alive->do_oop((oop*) value_addr);
1563       }
1564       ephemerons_removed = true;
1565       // Close the newly reachable set as soon as the value is marked to be alive
1566       // to increase the chance other ephemeron referents (keys) are revived as
1567       // we proceed scanning the list.
1568       complete_gc->do_void();
1569     } else {
1570       // Referent (key) is not alive (yet) so move the ephemeron to a reversed_list
1571       // to reverse scanning in the next pass.
1572       iter.remove();
1573       HeapWord* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1574       oop current_head = reversed_list.head();
1575       // The last ref must have its discovered field pointing to itself.
1576       oop next_discovered = (current_head != NULL) ? current_head : obj;
1577       oop_store_raw(discovered_addr, next_discovered);
1578       reversed_list.set_head(obj);
1579       reversed_list.inc_length(1);
1580     }
1581     iter.move_to_next();
1582   }
1583   assert(refs_list.length() == 0, "Should be empty");
1584   // replace the list with reversed list
1585   refs_list = reversed_list;
1586   // Close the reachable set even if no ephemeron was removed from list
1587   if (!ephemerons_removed) {
1588     complete_gc->do_void();
1589   }
1590   NOT_PRODUCT(
1591     if (iter.processed() > 0) {
1592       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Ephemerons out of " SIZE_FORMAT
1593                                  " Ephemerons in discovered list " INTPTR_FORMAT,
1594                                  iter.processed() - refs_list.length(), iter.processed(),
1595                                  p2i(refs_list.head()));
1596     }
1597   )
1598   return ephemerons_removed;
1599 }
1600 
1601 const char* ReferenceProcessor::list_name(uint i) {
1602    assert(i <= _max_num_q * number_of_subclasses_of_ref(),
1603           "Out of bounds index");
1604 
1605    int j = i / _max_num_q;
1606    switch (j) {
1607      case 0: return "SoftRef";
1608      case 1: return "WeakRef";
1609      case 2: return "Ephemeron";
1610      case 3: return "FinalRef";
1611      case 4: return "PhantomRef";
1612    }
1613    ShouldNotReachHere();
1614    return NULL;
1615 }
1616 
< prev index next >