< prev index next >

src/hotspot/share/gc/shared/referenceProcessor.cpp

Print this page
rev 49831 : imported patch 8201492-properly-implement-non-contiguous-reference-processing
rev 49834 : [mq]: 8202021-cleanup-referenceprocessor


  93 }
  94 
  95 ReferenceProcessor::ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery,
  96                                        bool      mt_processing,
  97                                        uint      mt_processing_degree,
  98                                        bool      mt_discovery,
  99                                        uint      mt_discovery_degree,
 100                                        bool      atomic_discovery,
 101                                        BoolObjectClosure* is_alive_non_header)  :
 102   _is_subject_to_discovery(is_subject_to_discovery),
 103   _discovering_refs(false),
 104   _enqueuing_is_done(false),
 105   _is_alive_non_header(is_alive_non_header),
 106   _processing_is_mt(mt_processing),
 107   _next_id(0)
 108 {
 109   assert(is_subject_to_discovery != NULL, "must be set");
 110 
 111   _discovery_is_atomic = atomic_discovery;
 112   _discovery_is_mt     = mt_discovery;
 113   _num_q               = MAX2(1U, mt_processing_degree);
 114   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 115   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 116             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 117 
 118   if (_discovered_refs == NULL) {
 119     vm_exit_during_initialization("Could not allocated RefProc Array");
 120   }
 121   _discoveredSoftRefs    = &_discovered_refs[0];
 122   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 123   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 124   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 125 
 126   // Initialize all entries to NULL
 127   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 128     _discovered_refs[i].set_head(NULL);
 129     _discovered_refs[i].set_length(0);
 130   }
 131 
 132   setup_policy(false /* default soft ref policy */);
 133 }
 134 
 135 SpanReferenceProcessor::SpanReferenceProcessor(MemRegion span,
 136                                                          bool      mt_processing,
 137                                                          uint      mt_processing_degree,
 138                                                          bool      mt_discovery,
 139                                                          uint      mt_discovery_degree,
 140                                                          bool      atomic_discovery,
 141                                                          BoolObjectClosure* is_alive_non_header)  :
 142   ReferenceProcessor(&_span_based_discoverer,
 143                      mt_processing,
 144                      mt_processing_degree,
 145                      mt_discovery,
 146                      mt_discovery_degree,
 147                      atomic_discovery,
 148                      is_alive_non_header),
 149   _span_based_discoverer(span) {
 150 
 151 }
 152 
 153 #ifndef PRODUCT
 154 void ReferenceProcessor::verify_no_references_recorded() {
 155   guarantee(!_discovering_refs, "Discovering refs?");
 156   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 157     guarantee(_discovered_refs[i].is_empty(),
 158               "Found non-empty discovered list at %u", i);
 159   }
 160 }
 161 #endif
 162 
 163 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
 164   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 165     if (UseCompressedOops) {
 166       f->do_oop((narrowOop*)_discovered_refs[i].adr_head());
 167     } else {
 168       f->do_oop((oop*)_discovered_refs[i].adr_head());
 169     }
 170   }
 171 }
 172 
 173 void ReferenceProcessor::update_soft_ref_master_clock() {
 174   // Update (advance) the soft ref master clock field. This must be done
 175   // after processing the soft ref list.
 176 
 177   // We need a monotonically non-decreasing time in ms but
 178   // os::javaTimeMillis() does not guarantee monotonicity.
 179   jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
 180   jlong soft_ref_clock = java_lang_ref_SoftReference::clock();
 181   assert(soft_ref_clock == _soft_ref_timestamp_clock, "soft ref clocks out of sync");
 182 
 183   NOT_PRODUCT(
 184   if (now < _soft_ref_timestamp_clock) {
 185     log_warning(gc)("time warp: " JLONG_FORMAT " to " JLONG_FORMAT,
 186                     _soft_ref_timestamp_clock, now);
 187   }
 188   )
 189   // The values of now and _soft_ref_timestamp_clock are set using
 190   // javaTimeNanos(), which is guaranteed to be monotonically
 191   // non-decreasing provided the underlying platform provides such
 192   // a time source (and it is bug free).
 193   // In product mode, however, protect ourselves from non-monotonicity.
 194   if (now > _soft_ref_timestamp_clock) {
 195     _soft_ref_timestamp_clock = now;
 196     java_lang_ref_SoftReference::set_clock(now);
 197   }
 198   // Else leave clock stalled at its old value until time progresses
 199   // past clock value.
 200 }
 201 
 202 size_t ReferenceProcessor::total_count(DiscoveredList lists[]) const {
 203   size_t total = 0;
 204   for (uint i = 0; i < _max_num_q; ++i) {
 205     total += lists[i].length();
 206   }
 207   return total;
 208 }
 209 
 210 ReferenceProcessorStats ReferenceProcessor::process_discovered_references(
 211   BoolObjectClosure*            is_alive,
 212   OopClosure*                   keep_alive,
 213   VoidClosure*                  complete_gc,
 214   AbstractRefProcTaskExecutor*  task_executor,
 215   ReferenceProcessorPhaseTimes* phase_times) {
 216 
 217   double start_time = os::elapsedTime();
 218 
 219   assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
 220   // Stop treating discovered references specially.
 221   disable_discovery();
 222 
 223   // If discovery was concurrent, someone could have modified
 224   // the value of the static field in the j.l.r.SoftReference


 284   // Stop treating discovered references specially.
 285   disable_discovery();
 286 }
 287 
 288 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list) {
 289   // Given a list of refs linked through the "discovered" field
 290   // (java.lang.ref.Reference.discovered), self-loop their "next" field
 291   // thus distinguishing them from active References, then
 292   // prepend them to the pending list.
 293   //
 294   // The Java threads will see the Reference objects linked together through
 295   // the discovered field. Instead of trying to do the write barrier updates
 296   // in all places in the reference processor where we manipulate the discovered
 297   // field we make sure to do the barrier here where we anyway iterate through
 298   // all linked Reference objects. Note that it is important to not dirty any
 299   // cards during reference processing since this will cause card table
 300   // verification to fail for G1.
 301   log_develop_trace(gc, ref)("ReferenceProcessor::enqueue_discovered_reflist list " INTPTR_FORMAT, p2i(&refs_list));
 302 
 303   oop obj = NULL;
 304   oop next_d = refs_list.head();
 305   // Walk down the list, self-looping the next field
 306   // so that the References are not considered active.
 307   while (obj != next_d) {
 308     obj = next_d;
 309     assert(obj->is_instance(), "should be an instance object");
 310     assert(InstanceKlass::cast(obj->klass())->is_reference_instance_klass(), "should be reference object");
 311     next_d = java_lang_ref_Reference::discovered(obj);
 312     log_develop_trace(gc, ref)("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT, p2i(obj), p2i(next_d));
 313     assert(java_lang_ref_Reference::next(obj) == NULL,
 314            "Reference not active; should not be discovered");
 315     // Self-loop next, so as to make Ref not active.
 316     java_lang_ref_Reference::set_next_raw(obj, obj);
 317     if (next_d != obj) {
 318       HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, next_d);
 319     } else {
 320       // This is the last object.
 321       // Swap refs_list into pending list and set obj's
 322       // discovered to what we read from the pending list.
 323       oop old = Universe::swap_reference_pending_list(refs_list.head());
 324       HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, old);
 325     }
 326   }
 327 }
 328 
 329 // Parallel enqueue task
 330 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
 331 public:
 332   RefProcEnqueueTask(ReferenceProcessor&           ref_processor,
 333                      DiscoveredList                discovered_refs[],
 334                      int                           n_queues,
 335                      ReferenceProcessorPhaseTimes* phase_times)
 336     : EnqueueTask(ref_processor, discovered_refs, n_queues, phase_times)
 337   { }
 338 
 339   virtual void work(unsigned int work_id) {
 340     RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefEnqueue, _phase_times, work_id);
 341 
 342     assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
 343     // Simplest first cut: static partitioning.
 344     int index = work_id;
 345     // The increment on "index" must correspond to the maximum number of queues
 346     // (n_queues) with which that ReferenceProcessor was created.  That
 347     // is because of the "clever" way the discovered references lists were
 348     // allocated and are indexed into.
 349     assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
 350     for (int j = 0;
 351          j < ReferenceProcessor::number_of_subclasses_of_ref();
 352          j++, index += _n_queues) {
 353       _ref_processor.enqueue_discovered_reflist(_refs_lists[index]);
 354       _refs_lists[index].set_head(NULL);
 355       _refs_lists[index].set_length(0);
 356     }
 357   }
 358 };
 359 
 360 // Enqueue references that are not made active again
 361 void ReferenceProcessor::enqueue_discovered_reflists(AbstractRefProcTaskExecutor*  task_executor,
 362                                                      ReferenceProcessorPhaseTimes* phase_times) {
 363 
 364   ReferenceProcessorStats stats(total_count(_discoveredSoftRefs),
 365                                 total_count(_discoveredWeakRefs),
 366                                 total_count(_discoveredFinalRefs),
 367                                 total_count(_discoveredPhantomRefs));
 368 
 369   RefProcEnqueueTimeTracker tt(phase_times, stats);
 370 
 371   if (_processing_is_mt && task_executor != NULL) {
 372     // Parallel code
 373     RefProcEnqueueTask tsk(*this, _discovered_refs, _max_num_q, phase_times);
 374     task_executor->execute(tsk);
 375   } else {
 376     // Serial code: call the parent class's implementation
 377     for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 378       enqueue_discovered_reflist(_discovered_refs[i]);
 379       _discovered_refs[i].set_head(NULL);
 380       _discovered_refs[i].set_length(0);
 381     }
 382   }
 383 }
 384 
 385 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
 386   _discovered_addr = java_lang_ref_Reference::discovered_addr_raw(_ref);
 387   oop discovered = java_lang_ref_Reference::discovered(_ref);
 388   assert(_discovered_addr && oopDesc::is_oop_or_null(discovered),
 389          "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
 390   _next = discovered;
 391   _referent_addr = java_lang_ref_Reference::referent_addr_raw(_ref);
 392   _referent = java_lang_ref_Reference::referent(_ref);
 393   assert(Universe::heap()->is_in_reserved_or_null(_referent),
 394          "Wrong oop found in java.lang.Reference object");
 395   assert(allow_null_referent ?
 396              oopDesc::is_oop_or_null(_referent)
 397            : oopDesc::is_oop(_referent),
 398          "Expected an oop%s for referent field at " PTR_FORMAT,
 399          (allow_null_referent ? " or NULL" : ""),
 400          p2i(_referent));
 401 }
 402 
 403 void DiscoveredListIterator::remove() {
 404   assert(oopDesc::is_oop(_ref), "Dropping a bad reference");
 405   RawAccess<>::oop_store(_discovered_addr, oop(NULL));
 406 
 407   // First _prev_next ref actually points into DiscoveredList (gross).
 408   oop new_next;
 409   if (_next == _ref) {
 410     // At the end of the list, we should make _prev point to itself.
 411     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 412     // and _prev will be NULL.
 413     new_next = _prev;
 414   } else {
 415     new_next = _next;
 416   }
 417   // Remove Reference object from discovered list. Note that G1 does not need a
 418   // pre-barrier here because we know the Reference has already been found/marked,
 419   // that's how it ended up in the discovered list in the first place.
 420   RawAccess<>::oop_store(_prev_next, new_next);
 421   NOT_PRODUCT(_removed++);
 422   _refs_list.dec_length(1);
 423 }
 424 
 425 void DiscoveredListIterator::clear_referent() {
 426   RawAccess<>::oop_store(_referent_addr, oop(NULL));
 427 }
 428 
 429 // NOTE: process_phase*() are largely similar, and at a high level
 430 // merely iterate over the extant list applying a predicate to
 431 // each of its elements and possibly removing that element from the
 432 // list and applying some further closures to that element.
 433 // We should consider the possibility of replacing these
 434 // process_phase*() methods by abstracting them into
 435 // a single general iterator invocation that receives appropriate
 436 // closures that accomplish this work.
 437 
 438 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
 439 // referents are not alive, but that should be kept alive for policy reasons.
 440 // Keep alive the transitive closure of all such referents.


 540         keep_alive->do_oop((narrowOop*)next_addr);
 541       } else {
 542         keep_alive->do_oop((oop*)next_addr);
 543       }
 544       iter.move_to_next();
 545     } else {
 546       iter.next();
 547     }
 548   }
 549   // Now close the newly reachable set
 550   complete_gc->do_void();
 551   NOT_PRODUCT(
 552     if (iter.processed() > 0) {
 553       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 554         " Refs in discovered list " INTPTR_FORMAT,
 555         iter.removed(), iter.processed(), p2i(&refs_list));
 556     }
 557   )
 558 }
 559 
 560 // Traverse the list and process the referents, by either
 561 // clearing them or keeping them (and their reachable
 562 // closure) alive.
 563 void
 564 ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,
 565                                    bool               clear_referent,
 566                                    BoolObjectClosure* is_alive,
 567                                    OopClosure*        keep_alive,
 568                                    VoidClosure*       complete_gc) {
 569   ResourceMark rm;
 570   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 571   while (iter.has_next()) {
 572     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 573     if (clear_referent) {
 574       // NULL out referent pointer
 575       iter.clear_referent();
 576     } else {
 577       // keep the referent around
 578       iter.make_referent_alive();
 579     }
 580     log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 581                                clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
 582     assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
 583     iter.next();
 584   }
 585   // Close the reachable set
 586   complete_gc->do_void();
 587 }
 588 
 589 void
 590 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 591   oop obj = NULL;
 592   oop next = refs_list.head();
 593   while (next != obj) {
 594     obj = next;
 595     next = java_lang_ref_Reference::discovered(obj);
 596     java_lang_ref_Reference::set_discovered_raw(obj, NULL);
 597   }
 598   refs_list.set_head(NULL);
 599   refs_list.set_length(0);
 600 }
 601 
 602 void ReferenceProcessor::abandon_partial_discovery() {
 603   // loop over the lists
 604   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 605     if ((i % _max_num_q) == 0) {
 606       log_develop_trace(gc, ref)("Abandoning %s discovered list", list_name(i));
 607     }
 608     clear_discovered_references(_discovered_refs[i]);
 609   }
 610 }
 611 
 612 size_t ReferenceProcessor::total_reference_count(ReferenceType type) const {
 613   DiscoveredList* list = NULL;
 614 
 615   switch (type) {
 616     case REF_SOFT:
 617       list = _discoveredSoftRefs;
 618       break;
 619     case REF_WEAK:
 620       list = _discoveredWeakRefs;
 621       break;
 622     case REF_FINAL:
 623       list = _discoveredFinalRefs;
 624       break;
 625     case REF_PHANTOM:


 693 
 694     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 695                                   &is_alive, &keep_alive, &complete_gc);
 696   }
 697 private:
 698   bool _clear_referent;
 699 };
 700 
 701 #ifndef PRODUCT
 702 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_refs) {
 703   if (!log_is_enabled(Trace, gc, ref)) {
 704     return;
 705   }
 706 
 707   stringStream st;
 708   for (uint i = 0; i < active_length; ++i) {
 709     st.print(SIZE_FORMAT " ", ref_lists[i].length());
 710   }
 711   log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
 712 #ifdef ASSERT
 713   for (uint i = active_length; i < _max_num_q; i++) {
 714     assert(ref_lists[i].length() == 0, SIZE_FORMAT " unexpected References in %u",
 715            ref_lists[i].length(), i);
 716   }
 717 #endif
 718 }
 719 #endif
 720 
 721 void ReferenceProcessor::set_active_mt_degree(uint v) {
 722   _num_q = v;
 723   _next_id = 0;
 724 }
 725 
 726 // Balances reference queues.
 727 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 728 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 729 // corresponding to the active workers will be processed.
 730 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
 731 {
 732   // calculate total length
 733   size_t total_refs = 0;
 734   log_develop_trace(gc, ref)("Balance ref_lists ");
 735 
 736   for (uint i = 0; i < _max_num_q; ++i) {
 737     total_refs += ref_lists[i].length();
 738   }
 739   log_reflist_counts(ref_lists, _max_num_q, total_refs);
 740   size_t avg_refs = total_refs / _num_q + 1;
 741   uint to_idx = 0;
 742   for (uint from_idx = 0; from_idx < _max_num_q; from_idx++) {
 743     bool move_all = false;
 744     if (from_idx >= _num_q) {
 745       move_all = ref_lists[from_idx].length() > 0;
 746     }
 747     while ((ref_lists[from_idx].length() > avg_refs) ||
 748            move_all) {
 749       assert(to_idx < _num_q, "Sanity Check!");
 750       if (ref_lists[to_idx].length() < avg_refs) {
 751         // move superfluous refs
 752         size_t refs_to_move;
 753         // Move all the Ref's if the from queue will not be processed.
 754         if (move_all) {
 755           refs_to_move = MIN2(ref_lists[from_idx].length(),
 756                               avg_refs - ref_lists[to_idx].length());
 757         } else {
 758           refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
 759                               avg_refs - ref_lists[to_idx].length());
 760         }
 761 
 762         assert(refs_to_move > 0, "otherwise the code below will fail");
 763 
 764         oop move_head = ref_lists[from_idx].head();
 765         oop move_tail = move_head;
 766         oop new_head  = move_head;
 767         // find an element to split the list on
 768         for (size_t j = 0; j < refs_to_move; ++j) {
 769           move_tail = new_head;


 775           // to list is empty. Make a loop at the end.
 776           java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
 777         } else {
 778           java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
 779         }
 780         ref_lists[to_idx].set_head(move_head);
 781         ref_lists[to_idx].inc_length(refs_to_move);
 782 
 783         // Remove the chain from the from list.
 784         if (move_tail == new_head) {
 785           // We found the end of the from list.
 786           ref_lists[from_idx].set_head(NULL);
 787         } else {
 788           ref_lists[from_idx].set_head(new_head);
 789         }
 790         ref_lists[from_idx].dec_length(refs_to_move);
 791         if (ref_lists[from_idx].length() == 0) {
 792           break;
 793         }
 794       } else {
 795         to_idx = (to_idx + 1) % _num_q;
 796       }
 797     }
 798   }
 799 #ifdef ASSERT
 800   size_t balanced_total_refs = 0;
 801   for (uint i = 0; i < _num_q; ++i) {
 802     balanced_total_refs += ref_lists[i].length();
 803   }
 804   log_reflist_counts(ref_lists, _num_q, balanced_total_refs);
 805   assert(total_refs == balanced_total_refs, "Balancing was incomplete");
 806 #endif
 807 }
 808 
 809 void ReferenceProcessor::balance_all_queues() {
 810   balance_queues(_discoveredSoftRefs);
 811   balance_queues(_discoveredWeakRefs);
 812   balance_queues(_discoveredFinalRefs);
 813   balance_queues(_discoveredPhantomRefs);
 814 }
 815 
 816 void ReferenceProcessor::process_discovered_reflist(
 817   DiscoveredList                refs_lists[],
 818   ReferencePolicy*              policy,
 819   bool                          clear_referent,
 820   BoolObjectClosure*            is_alive,
 821   OopClosure*                   keep_alive,
 822   VoidClosure*                  complete_gc,
 823   AbstractRefProcTaskExecutor*  task_executor,
 824   ReferenceProcessorPhaseTimes* phase_times)


 827 
 828   phase_times->set_processing_is_mt(mt_processing);
 829 
 830   if (mt_processing && ParallelRefProcBalancingEnabled) {
 831     RefProcBalanceQueuesTimeTracker tt(phase_times);
 832     balance_queues(refs_lists);
 833   }
 834 
 835   // Phase 1 (soft refs only):
 836   // . Traverse the list and remove any SoftReferences whose
 837   //   referents are not alive, but that should be kept alive for
 838   //   policy reasons. Keep alive the transitive closure of all
 839   //   such referents.
 840   if (policy != NULL) {
 841     RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase1, phase_times);
 842 
 843     if (mt_processing) {
 844       RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/, phase_times);
 845       task_executor->execute(phase1);
 846     } else {
 847       for (uint i = 0; i < _max_num_q; i++) {
 848         process_phase1(refs_lists[i], policy,
 849                        is_alive, keep_alive, complete_gc);
 850       }
 851     }
 852   } else { // policy == NULL
 853     assert(refs_lists != _discoveredSoftRefs,
 854            "Policy must be specified for soft references.");
 855   }
 856 
 857   // Phase 2:
 858   // . Traverse the list and remove any refs whose referents are alive.
 859   {
 860     RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase2, phase_times);
 861 
 862     if (mt_processing) {
 863       RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/, phase_times);
 864       task_executor->execute(phase2);
 865     } else {
 866       for (uint i = 0; i < _max_num_q; i++) {
 867         process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
 868       }
 869     }
 870   }
 871 
 872   // Phase 3:
 873   // . Traverse the list and process referents as appropriate.
 874   {
 875     RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, phase_times);
 876 
 877     if (mt_processing) {
 878       RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/, phase_times);
 879       task_executor->execute(phase3);
 880     } else {
 881       for (uint i = 0; i < _max_num_q; i++) {
 882         process_phase3(refs_lists[i], clear_referent,
 883                        is_alive, keep_alive, complete_gc);
 884       }
 885     }
 886   }
 887 }
 888 
 889 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
 890   uint id = 0;
 891   // Determine the queue index to use for this object.
 892   if (_discovery_is_mt) {
 893     // During a multi-threaded discovery phase,
 894     // each thread saves to its "own" list.
 895     Thread* thr = Thread::current();
 896     id = thr->as_Worker_thread()->id();
 897   } else {
 898     // single-threaded discovery, we save in round-robin
 899     // fashion to each of the lists.
 900     if (_processing_is_mt) {
 901       id = next_id();
 902     }
 903   }
 904   assert(id < _max_num_q, "Id is out-of-bounds id %u and max id %u)", id, _max_num_q);
 905 
 906   // Get the discovered queue to which we will add
 907   DiscoveredList* list = NULL;
 908   switch (rt) {
 909     case REF_OTHER:
 910       // Unknown reference type, no special treatment
 911       break;
 912     case REF_SOFT:
 913       list = &_discoveredSoftRefs[id];
 914       break;
 915     case REF_WEAK:
 916       list = &_discoveredWeakRefs[id];
 917       break;
 918     case REF_FINAL:
 919       list = &_discoveredFinalRefs[id];
 920       break;
 921     case REF_PHANTOM:
 922       list = &_discoveredPhantomRefs[id];
 923       break;
 924     case REF_NONE:


1098   } else {
1099     // We do a raw store here: the field will be visited later when processing
1100     // the discovered references.
1101     oop current_head = list->head();
1102     // The last ref must have its discovered field pointing to itself.
1103     oop next_discovered = (current_head != NULL) ? current_head : obj;
1104 
1105     assert(discovered == NULL, "control point invariant");
1106     RawAccess<>::oop_store(discovered_addr, next_discovered);
1107     list->set_head(obj);
1108     list->inc_length(1);
1109 
1110     log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
1111   }
1112   assert(oopDesc::is_oop(obj), "Discovered a bad reference");
1113   verify_referent(obj);
1114   return true;
1115 }
1116 
1117 bool ReferenceProcessor::has_discovered_references() {
1118   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
1119     if (!_discovered_refs[i].is_empty()) {
1120       return true;
1121     }
1122   }
1123   return false;
1124 }
1125 
1126 // Preclean the discovered references by removing those
1127 // whose referents are alive, and by marking from those that
1128 // are not active. These lists can be handled here
1129 // in any order and, indeed, concurrently.
1130 void ReferenceProcessor::preclean_discovered_references(
1131   BoolObjectClosure* is_alive,
1132   OopClosure* keep_alive,
1133   VoidClosure* complete_gc,
1134   YieldClosure* yield,
1135   GCTimer* gc_timer) {
1136 
1137   // Soft references
1138   {
1139     GCTraceTime(Debug, gc, ref) tm("Preclean SoftReferences", gc_timer);
1140     for (uint i = 0; i < _max_num_q; i++) {
1141       if (yield->should_return()) {
1142         return;
1143       }
1144       preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1145                                   keep_alive, complete_gc, yield);
1146     }
1147   }
1148 
1149   // Weak references
1150   {
1151     GCTraceTime(Debug, gc, ref) tm("Preclean WeakReferences", gc_timer);
1152     for (uint i = 0; i < _max_num_q; i++) {
1153       if (yield->should_return()) {
1154         return;
1155       }
1156       preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
1157                                   keep_alive, complete_gc, yield);
1158     }
1159   }
1160 
1161   // Final references
1162   {
1163     GCTraceTime(Debug, gc, ref) tm("Preclean FinalReferences", gc_timer);
1164     for (uint i = 0; i < _max_num_q; i++) {
1165       if (yield->should_return()) {
1166         return;
1167       }
1168       preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1169                                   keep_alive, complete_gc, yield);
1170     }
1171   }
1172 
1173   // Phantom references
1174   {
1175     GCTraceTime(Debug, gc, ref) tm("Preclean PhantomReferences", gc_timer);
1176     for (uint i = 0; i < _max_num_q; i++) {
1177       if (yield->should_return()) {
1178         return;
1179       }
1180       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1181                                   keep_alive, complete_gc, yield);
1182     }
1183   }
1184 }
1185 
1186 // Walk the given discovered ref list, and remove all reference objects
1187 // whose referents are still alive, whose referents are NULL or which
1188 // are not active (have a non-NULL next field). NOTE: When we are
1189 // thus precleaning the ref lists (which happens single-threaded today),
1190 // we do not disable refs discovery to honor the correct semantics of
1191 // java.lang.Reference. As a result, we need to be careful below
1192 // that ref removal steps interleave safely with ref discovery steps
1193 // (in this thread).
1194 void
1195 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1196                                                 BoolObjectClosure* is_alive,


1219         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr_raw(obj);
1220         keep_alive->do_oop(next_addr);
1221       }
1222       iter.move_to_next();
1223     } else {
1224       iter.next();
1225     }
1226   }
1227   // Close the reachable set
1228   complete_gc->do_void();
1229 
1230   NOT_PRODUCT(
1231     if (iter.processed() > 0) {
1232       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1233         iter.removed(), iter.processed(), p2i(&refs_list));
1234     }
1235   )
1236 }
1237 
1238 const char* ReferenceProcessor::list_name(uint i) {
1239    assert(i <= _max_num_q * number_of_subclasses_of_ref(),
1240           "Out of bounds index");
1241 
1242    int j = i / _max_num_q;
1243    switch (j) {
1244      case 0: return "SoftRef";
1245      case 1: return "WeakRef";
1246      case 2: return "FinalRef";
1247      case 3: return "PhantomRef";
1248    }
1249    ShouldNotReachHere();
1250    return NULL;
1251 }


  93 }
  94 
  95 ReferenceProcessor::ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery,
  96                                        bool      mt_processing,
  97                                        uint      mt_processing_degree,
  98                                        bool      mt_discovery,
  99                                        uint      mt_discovery_degree,
 100                                        bool      atomic_discovery,
 101                                        BoolObjectClosure* is_alive_non_header)  :
 102   _is_subject_to_discovery(is_subject_to_discovery),
 103   _discovering_refs(false),
 104   _enqueuing_is_done(false),
 105   _is_alive_non_header(is_alive_non_header),
 106   _processing_is_mt(mt_processing),
 107   _next_id(0)
 108 {
 109   assert(is_subject_to_discovery != NULL, "must be set");
 110 
 111   _discovery_is_atomic = atomic_discovery;
 112   _discovery_is_mt     = mt_discovery;
 113   _num_queues          = MAX2(1U, mt_processing_degree);
 114   _max_num_queues      = MAX2(_num_queues, mt_discovery_degree);
 115   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 116             _max_num_queues * number_of_subclasses_of_ref(), mtGC);
 117 
 118   if (_discovered_refs == NULL) {
 119     vm_exit_during_initialization("Could not allocated RefProc Array");
 120   }
 121   _discoveredSoftRefs    = &_discovered_refs[0];
 122   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_queues];
 123   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_queues];
 124   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_queues];
 125 
 126   // Initialize all entries to NULL
 127   for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
 128     _discovered_refs[i].set_head(NULL);
 129     _discovered_refs[i].set_length(0);
 130   }
 131 
 132   setup_policy(false /* default soft ref policy */);
 133 }
 134 
 135 SpanReferenceProcessor::SpanReferenceProcessor(MemRegion span,
 136                                                          bool      mt_processing,
 137                                                          uint      mt_processing_degree,
 138                                                          bool      mt_discovery,
 139                                                          uint      mt_discovery_degree,
 140                                                          bool      atomic_discovery,
 141                                                          BoolObjectClosure* is_alive_non_header)  :
 142   ReferenceProcessor(&_span_based_discoverer,
 143                      mt_processing,
 144                      mt_processing_degree,
 145                      mt_discovery,
 146                      mt_discovery_degree,
 147                      atomic_discovery,
 148                      is_alive_non_header),
 149   _span_based_discoverer(span) {
 150 
 151 }
 152 
 153 #ifndef PRODUCT
 154 void ReferenceProcessor::verify_no_references_recorded() {
 155   guarantee(!_discovering_refs, "Discovering refs?");
 156   for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
 157     guarantee(_discovered_refs[i].is_empty(),
 158               "Found non-empty discovered list at %u", i);
 159   }
 160 }
 161 #endif
 162 
 163 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
 164   for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
 165     if (UseCompressedOops) {
 166       f->do_oop((narrowOop*)_discovered_refs[i].adr_head());
 167     } else {
 168       f->do_oop((oop*)_discovered_refs[i].adr_head());
 169     }
 170   }
 171 }
 172 
 173 void ReferenceProcessor::update_soft_ref_master_clock() {
 174   // Update (advance) the soft ref master clock field. This must be done
 175   // after processing the soft ref list.
 176 
 177   // We need a monotonically non-decreasing time in ms but
 178   // os::javaTimeMillis() does not guarantee monotonicity.
 179   jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
 180   jlong soft_ref_clock = java_lang_ref_SoftReference::clock();
 181   assert(soft_ref_clock == _soft_ref_timestamp_clock, "soft ref clocks out of sync");
 182 
 183   NOT_PRODUCT(
 184   if (now < _soft_ref_timestamp_clock) {
 185     log_warning(gc)("time warp: " JLONG_FORMAT " to " JLONG_FORMAT,
 186                     _soft_ref_timestamp_clock, now);
 187   }
 188   )
 189   // The values of now and _soft_ref_timestamp_clock are set using
 190   // javaTimeNanos(), which is guaranteed to be monotonically
 191   // non-decreasing provided the underlying platform provides such
 192   // a time source (and it is bug free).
 193   // In product mode, however, protect ourselves from non-monotonicity.
 194   if (now > _soft_ref_timestamp_clock) {
 195     _soft_ref_timestamp_clock = now;
 196     java_lang_ref_SoftReference::set_clock(now);
 197   }
 198   // Else leave clock stalled at its old value until time progresses
 199   // past clock value.
 200 }
 201 
 202 size_t ReferenceProcessor::total_count(DiscoveredList lists[]) const {
 203   size_t total = 0;
 204   for (uint i = 0; i < _max_num_queues; ++i) {
 205     total += lists[i].length();
 206   }
 207   return total;
 208 }
 209 
 210 ReferenceProcessorStats ReferenceProcessor::process_discovered_references(
 211   BoolObjectClosure*            is_alive,
 212   OopClosure*                   keep_alive,
 213   VoidClosure*                  complete_gc,
 214   AbstractRefProcTaskExecutor*  task_executor,
 215   ReferenceProcessorPhaseTimes* phase_times) {
 216 
 217   double start_time = os::elapsedTime();
 218 
 219   assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
 220   // Stop treating discovered references specially.
 221   disable_discovery();
 222 
 223   // If discovery was concurrent, someone could have modified
 224   // the value of the static field in the j.l.r.SoftReference


 284   // Stop treating discovered references specially.
 285   disable_discovery();
 286 }
 287 
 288 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list) {
 289   // Given a list of refs linked through the "discovered" field
 290   // (java.lang.ref.Reference.discovered), self-loop their "next" field
 291   // thus distinguishing them from active References, then
 292   // prepend them to the pending list.
 293   //
 294   // The Java threads will see the Reference objects linked together through
 295   // the discovered field. Instead of trying to do the write barrier updates
 296   // in all places in the reference processor where we manipulate the discovered
 297   // field we make sure to do the barrier here where we anyway iterate through
 298   // all linked Reference objects. Note that it is important to not dirty any
 299   // cards during reference processing since this will cause card table
 300   // verification to fail for G1.
 301   log_develop_trace(gc, ref)("ReferenceProcessor::enqueue_discovered_reflist list " INTPTR_FORMAT, p2i(&refs_list));
 302 
 303   oop obj = NULL;
 304   oop next_discovered = refs_list.head();
 305   // Walk down the list, self-looping the next field
 306   // so that the References are not considered active.
 307   while (obj != next_discovered) {
 308     obj = next_discovered;
 309     assert(obj->is_instance(), "should be an instance object");
 310     assert(InstanceKlass::cast(obj->klass())->is_reference_instance_klass(), "should be reference object");
 311     next_discovered = java_lang_ref_Reference::discovered(obj);
 312     log_develop_trace(gc, ref)("        obj " INTPTR_FORMAT "/next_discovered " INTPTR_FORMAT, p2i(obj), p2i(next_discovered));
 313     assert(java_lang_ref_Reference::next(obj) == NULL,
 314            "Reference not active; should not be discovered");
 315     // Self-loop next, so as to make Ref not active.
 316     java_lang_ref_Reference::set_next_raw(obj, obj);
 317     if (next_discovered != obj) {
 318       HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, next_discovered);
 319     } else {
 320       // This is the last object.
 321       // Swap refs_list into pending list and set obj's
 322       // discovered to what we read from the pending list.
 323       oop old = Universe::swap_reference_pending_list(refs_list.head());
 324       HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, old);
 325     }
 326   }
 327 }
 328 
 329 // Parallel enqueue task
 330 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
 331 public:
 332   RefProcEnqueueTask(ReferenceProcessor&           ref_processor,
 333                      DiscoveredList                discovered_refs[],
 334                      int                           n_queues,
 335                      ReferenceProcessorPhaseTimes* phase_times)
 336     : EnqueueTask(ref_processor, discovered_refs, n_queues, phase_times)
 337   { }
 338 
 339   virtual void work(unsigned int work_id) {
 340     RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefEnqueue, _phase_times, work_id);
 341 
 342     assert(work_id < (unsigned int)_ref_processor.max_num_queues(), "Index out-of-bounds");
 343     // Simplest first cut: static partitioning.
 344     int index = work_id;
 345     // The increment on "index" must correspond to the maximum number of queues
 346     // (n_queues) with which that ReferenceProcessor was created.  That
 347     // is because of the "clever" way the discovered references lists were
 348     // allocated and are indexed into.
 349     assert(_n_queues == (int) _ref_processor.max_num_queues(), "Different number not expected");
 350     for (int j = 0;
 351          j < ReferenceProcessor::number_of_subclasses_of_ref();
 352          j++, index += _n_queues) {
 353       _ref_processor.enqueue_discovered_reflist(_refs_lists[index]);
 354       _refs_lists[index].set_head(NULL);
 355       _refs_lists[index].set_length(0);
 356     }
 357   }
 358 };
 359 
 360 // Enqueue references that are not made active again
 361 void ReferenceProcessor::enqueue_discovered_reflists(AbstractRefProcTaskExecutor*  task_executor,
 362                                                      ReferenceProcessorPhaseTimes* phase_times) {
 363 
 364   ReferenceProcessorStats stats(total_count(_discoveredSoftRefs),
 365                                 total_count(_discoveredWeakRefs),
 366                                 total_count(_discoveredFinalRefs),
 367                                 total_count(_discoveredPhantomRefs));
 368 
 369   RefProcEnqueueTimeTracker tt(phase_times, stats);
 370 
 371   if (_processing_is_mt && task_executor != NULL) {
 372     // Parallel code
 373     RefProcEnqueueTask tsk(*this, _discovered_refs, _max_num_queues, phase_times);
 374     task_executor->execute(tsk);
 375   } else {
 376     // Serial code: call the parent class's implementation
 377     for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
 378       enqueue_discovered_reflist(_discovered_refs[i]);
 379       _discovered_refs[i].set_head(NULL);
 380       _discovered_refs[i].set_length(0);
 381     }
 382   }
 383 }
 384 
 385 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
 386   _discovered_addr = java_lang_ref_Reference::discovered_addr_raw(_ref);
 387   oop discovered = java_lang_ref_Reference::discovered(_ref);
 388   assert(_discovered_addr && oopDesc::is_oop_or_null(discovered),
 389          "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
 390   _next_discovered = discovered;
 391   _referent_addr = java_lang_ref_Reference::referent_addr_raw(_ref);
 392   _referent = java_lang_ref_Reference::referent(_ref);
 393   assert(Universe::heap()->is_in_reserved_or_null(_referent),
 394          "Wrong oop found in java.lang.Reference object");
 395   assert(allow_null_referent ?
 396              oopDesc::is_oop_or_null(_referent)
 397            : oopDesc::is_oop(_referent),
 398          "Expected an oop%s for referent field at " PTR_FORMAT,
 399          (allow_null_referent ? " or NULL" : ""),
 400          p2i(_referent));
 401 }
 402 
 403 void DiscoveredListIterator::remove() {
 404   assert(oopDesc::is_oop(_ref), "Dropping a bad reference");
 405   RawAccess<>::oop_store(_discovered_addr, oop(NULL));
 406 
 407   // First _prev_next ref actually points into DiscoveredList (gross).
 408   oop new_next;
 409   if (_next_discovered == _ref) {
 410     // At the end of the list, we should make _prev point to itself.
 411     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 412     // and _prev will be NULL.
 413     new_next = _prev_discovered;
 414   } else {
 415     new_next = _next_discovered;
 416   }
 417   // Remove Reference object from discovered list. Note that G1 does not need a
 418   // pre-barrier here because we know the Reference has already been found/marked,
 419   // that's how it ended up in the discovered list in the first place.
 420   RawAccess<>::oop_store(_prev_discovered_addr, new_next);
 421   NOT_PRODUCT(_removed++);
 422   _refs_list.dec_length(1);
 423 }
 424 
 425 void DiscoveredListIterator::clear_referent() {
 426   RawAccess<>::oop_store(_referent_addr, oop(NULL));
 427 }
 428 
 429 // NOTE: process_phase*() are largely similar, and at a high level
 430 // merely iterate over the extant list applying a predicate to
 431 // each of its elements and possibly removing that element from the
 432 // list and applying some further closures to that element.
 433 // We should consider the possibility of replacing these
 434 // process_phase*() methods by abstracting them into
 435 // a single general iterator invocation that receives appropriate
 436 // closures that accomplish this work.
 437 
 438 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
 439 // referents are not alive, but that should be kept alive for policy reasons.
 440 // Keep alive the transitive closure of all such referents.


 540         keep_alive->do_oop((narrowOop*)next_addr);
 541       } else {
 542         keep_alive->do_oop((oop*)next_addr);
 543       }
 544       iter.move_to_next();
 545     } else {
 546       iter.next();
 547     }
 548   }
 549   // Now close the newly reachable set
 550   complete_gc->do_void();
 551   NOT_PRODUCT(
 552     if (iter.processed() > 0) {
 553       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 554         " Refs in discovered list " INTPTR_FORMAT,
 555         iter.removed(), iter.processed(), p2i(&refs_list));
 556     }
 557   )
 558 }
 559 
 560 void ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,




 561                                         bool               clear_referent,
 562                                         BoolObjectClosure* is_alive,
 563                                         OopClosure*        keep_alive,
 564                                         VoidClosure*       complete_gc) {
 565   ResourceMark rm;
 566   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 567   while (iter.has_next()) {
 568     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 569     if (clear_referent) {
 570       // NULL out referent pointer
 571       iter.clear_referent();
 572     } else {
 573       // keep the referent around
 574       iter.make_referent_alive();
 575     }
 576     log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 577                                clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
 578     assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
 579     iter.next();
 580   }
 581   // Close the reachable set
 582   complete_gc->do_void();
 583 }
 584 
 585 void
 586 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 587   oop obj = NULL;
 588   oop next = refs_list.head();
 589   while (next != obj) {
 590     obj = next;
 591     next = java_lang_ref_Reference::discovered(obj);
 592     java_lang_ref_Reference::set_discovered_raw(obj, NULL);
 593   }
 594   refs_list.set_head(NULL);
 595   refs_list.set_length(0);
 596 }
 597 
 598 void ReferenceProcessor::abandon_partial_discovery() {
 599   // loop over the lists
 600   for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
 601     if ((i % _max_num_queues) == 0) {
 602       log_develop_trace(gc, ref)("Abandoning %s discovered list", list_name(i));
 603     }
 604     clear_discovered_references(_discovered_refs[i]);
 605   }
 606 }
 607 
 608 size_t ReferenceProcessor::total_reference_count(ReferenceType type) const {
 609   DiscoveredList* list = NULL;
 610 
 611   switch (type) {
 612     case REF_SOFT:
 613       list = _discoveredSoftRefs;
 614       break;
 615     case REF_WEAK:
 616       list = _discoveredWeakRefs;
 617       break;
 618     case REF_FINAL:
 619       list = _discoveredFinalRefs;
 620       break;
 621     case REF_PHANTOM:


 689 
 690     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 691                                   &is_alive, &keep_alive, &complete_gc);
 692   }
 693 private:
 694   bool _clear_referent;
 695 };
 696 
 697 #ifndef PRODUCT
 698 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_refs) {
 699   if (!log_is_enabled(Trace, gc, ref)) {
 700     return;
 701   }
 702 
 703   stringStream st;
 704   for (uint i = 0; i < active_length; ++i) {
 705     st.print(SIZE_FORMAT " ", ref_lists[i].length());
 706   }
 707   log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
 708 #ifdef ASSERT
 709   for (uint i = active_length; i < _max_num_queues; i++) {
 710     assert(ref_lists[i].length() == 0, SIZE_FORMAT " unexpected References in %u",
 711            ref_lists[i].length(), i);
 712   }
 713 #endif
 714 }
 715 #endif
 716 
 717 void ReferenceProcessor::set_active_mt_degree(uint v) {
 718   _num_queues = v;
 719   _next_id = 0;
 720 }
 721 
 722 // Balances reference queues.
 723 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 724 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 725 // corresponding to the active workers will be processed.
 726 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
 727 {
 728   // calculate total length
 729   size_t total_refs = 0;
 730   log_develop_trace(gc, ref)("Balance ref_lists ");
 731 
 732   for (uint i = 0; i < _max_num_queues; ++i) {
 733     total_refs += ref_lists[i].length();
 734   }
 735   log_reflist_counts(ref_lists, _max_num_queues, total_refs);
 736   size_t avg_refs = total_refs / _num_queues + 1;
 737   uint to_idx = 0;
 738   for (uint from_idx = 0; from_idx < _max_num_queues; from_idx++) {
 739     bool move_all = false;
 740     if (from_idx >= _num_queues) {
 741       move_all = ref_lists[from_idx].length() > 0;
 742     }
 743     while ((ref_lists[from_idx].length() > avg_refs) ||
 744            move_all) {
 745       assert(to_idx < _num_queues, "Sanity Check!");
 746       if (ref_lists[to_idx].length() < avg_refs) {
 747         // move superfluous refs
 748         size_t refs_to_move;
 749         // Move all the Ref's if the from queue will not be processed.
 750         if (move_all) {
 751           refs_to_move = MIN2(ref_lists[from_idx].length(),
 752                               avg_refs - ref_lists[to_idx].length());
 753         } else {
 754           refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
 755                               avg_refs - ref_lists[to_idx].length());
 756         }
 757 
 758         assert(refs_to_move > 0, "otherwise the code below will fail");
 759 
 760         oop move_head = ref_lists[from_idx].head();
 761         oop move_tail = move_head;
 762         oop new_head  = move_head;
 763         // find an element to split the list on
 764         for (size_t j = 0; j < refs_to_move; ++j) {
 765           move_tail = new_head;


 771           // to list is empty. Make a loop at the end.
 772           java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
 773         } else {
 774           java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
 775         }
 776         ref_lists[to_idx].set_head(move_head);
 777         ref_lists[to_idx].inc_length(refs_to_move);
 778 
 779         // Remove the chain from the from list.
 780         if (move_tail == new_head) {
 781           // We found the end of the from list.
 782           ref_lists[from_idx].set_head(NULL);
 783         } else {
 784           ref_lists[from_idx].set_head(new_head);
 785         }
 786         ref_lists[from_idx].dec_length(refs_to_move);
 787         if (ref_lists[from_idx].length() == 0) {
 788           break;
 789         }
 790       } else {
 791         to_idx = (to_idx + 1) % _num_queues;
 792       }
 793     }
 794   }
 795 #ifdef ASSERT
 796   size_t balanced_total_refs = 0;
 797   for (uint i = 0; i < _num_queues; ++i) {
 798     balanced_total_refs += ref_lists[i].length();
 799   }
 800   log_reflist_counts(ref_lists, _num_queues, balanced_total_refs);
 801   assert(total_refs == balanced_total_refs, "Balancing was incomplete");
 802 #endif
 803 }
 804 
 805 void ReferenceProcessor::balance_all_queues() {
 806   balance_queues(_discoveredSoftRefs);
 807   balance_queues(_discoveredWeakRefs);
 808   balance_queues(_discoveredFinalRefs);
 809   balance_queues(_discoveredPhantomRefs);
 810 }
 811 
 812 void ReferenceProcessor::process_discovered_reflist(
 813   DiscoveredList                refs_lists[],
 814   ReferencePolicy*              policy,
 815   bool                          clear_referent,
 816   BoolObjectClosure*            is_alive,
 817   OopClosure*                   keep_alive,
 818   VoidClosure*                  complete_gc,
 819   AbstractRefProcTaskExecutor*  task_executor,
 820   ReferenceProcessorPhaseTimes* phase_times)


 823 
 824   phase_times->set_processing_is_mt(mt_processing);
 825 
 826   if (mt_processing && ParallelRefProcBalancingEnabled) {
 827     RefProcBalanceQueuesTimeTracker tt(phase_times);
 828     balance_queues(refs_lists);
 829   }
 830 
 831   // Phase 1 (soft refs only):
 832   // . Traverse the list and remove any SoftReferences whose
 833   //   referents are not alive, but that should be kept alive for
 834   //   policy reasons. Keep alive the transitive closure of all
 835   //   such referents.
 836   if (policy != NULL) {
 837     RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase1, phase_times);
 838 
 839     if (mt_processing) {
 840       RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/, phase_times);
 841       task_executor->execute(phase1);
 842     } else {
 843       for (uint i = 0; i < _max_num_queues; i++) {
 844         process_phase1(refs_lists[i], policy,
 845                        is_alive, keep_alive, complete_gc);
 846       }
 847     }
 848   } else { // policy == NULL
 849     assert(refs_lists != _discoveredSoftRefs,
 850            "Policy must be specified for soft references.");
 851   }
 852 
 853   // Phase 2:
 854   // . Traverse the list and remove any refs whose referents are alive.
 855   {
 856     RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase2, phase_times);
 857 
 858     if (mt_processing) {
 859       RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/, phase_times);
 860       task_executor->execute(phase2);
 861     } else {
 862       for (uint i = 0; i < _max_num_queues; i++) {
 863         process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
 864       }
 865     }
 866   }
 867 
 868   // Phase 3:
 869   // . Traverse the list and process referents as appropriate.
 870   {
 871     RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, phase_times);
 872 
 873     if (mt_processing) {
 874       RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/, phase_times);
 875       task_executor->execute(phase3);
 876     } else {
 877       for (uint i = 0; i < _max_num_queues; i++) {
 878         process_phase3(refs_lists[i], clear_referent,
 879                        is_alive, keep_alive, complete_gc);
 880       }
 881     }
 882   }
 883 }
 884 
 885 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
 886   uint id = 0;
 887   // Determine the queue index to use for this object.
 888   if (_discovery_is_mt) {
 889     // During a multi-threaded discovery phase,
 890     // each thread saves to its "own" list.
 891     Thread* thr = Thread::current();
 892     id = thr->as_Worker_thread()->id();
 893   } else {
 894     // single-threaded discovery, we save in round-robin
 895     // fashion to each of the lists.
 896     if (_processing_is_mt) {
 897       id = next_id();
 898     }
 899   }
 900   assert(id < _max_num_queues, "Id is out-of-bounds id %u and max id %u)", id, _max_num_queues);
 901 
 902   // Get the discovered queue to which we will add
 903   DiscoveredList* list = NULL;
 904   switch (rt) {
 905     case REF_OTHER:
 906       // Unknown reference type, no special treatment
 907       break;
 908     case REF_SOFT:
 909       list = &_discoveredSoftRefs[id];
 910       break;
 911     case REF_WEAK:
 912       list = &_discoveredWeakRefs[id];
 913       break;
 914     case REF_FINAL:
 915       list = &_discoveredFinalRefs[id];
 916       break;
 917     case REF_PHANTOM:
 918       list = &_discoveredPhantomRefs[id];
 919       break;
 920     case REF_NONE:


1094   } else {
1095     // We do a raw store here: the field will be visited later when processing
1096     // the discovered references.
1097     oop current_head = list->head();
1098     // The last ref must have its discovered field pointing to itself.
1099     oop next_discovered = (current_head != NULL) ? current_head : obj;
1100 
1101     assert(discovered == NULL, "control point invariant");
1102     RawAccess<>::oop_store(discovered_addr, next_discovered);
1103     list->set_head(obj);
1104     list->inc_length(1);
1105 
1106     log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
1107   }
1108   assert(oopDesc::is_oop(obj), "Discovered a bad reference");
1109   verify_referent(obj);
1110   return true;
1111 }
1112 
1113 bool ReferenceProcessor::has_discovered_references() {
1114   for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
1115     if (!_discovered_refs[i].is_empty()) {
1116       return true;
1117     }
1118   }
1119   return false;
1120 }
1121 
1122 // Preclean the discovered references by removing those
1123 // whose referents are alive, and by marking from those that
1124 // are not active. These lists can be handled here
1125 // in any order and, indeed, concurrently.
1126 void ReferenceProcessor::preclean_discovered_references(
1127   BoolObjectClosure* is_alive,
1128   OopClosure* keep_alive,
1129   VoidClosure* complete_gc,
1130   YieldClosure* yield,
1131   GCTimer* gc_timer) {
1132 
1133   // Soft references
1134   {
1135     GCTraceTime(Debug, gc, ref) tm("Preclean SoftReferences", gc_timer);
1136     for (uint i = 0; i < _max_num_queues; i++) {
1137       if (yield->should_return()) {
1138         return;
1139       }
1140       preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1141                                   keep_alive, complete_gc, yield);
1142     }
1143   }
1144 
1145   // Weak references
1146   {
1147     GCTraceTime(Debug, gc, ref) tm("Preclean WeakReferences", gc_timer);
1148     for (uint i = 0; i < _max_num_queues; i++) {
1149       if (yield->should_return()) {
1150         return;
1151       }
1152       preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
1153                                   keep_alive, complete_gc, yield);
1154     }
1155   }
1156 
1157   // Final references
1158   {
1159     GCTraceTime(Debug, gc, ref) tm("Preclean FinalReferences", gc_timer);
1160     for (uint i = 0; i < _max_num_queues; i++) {
1161       if (yield->should_return()) {
1162         return;
1163       }
1164       preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1165                                   keep_alive, complete_gc, yield);
1166     }
1167   }
1168 
1169   // Phantom references
1170   {
1171     GCTraceTime(Debug, gc, ref) tm("Preclean PhantomReferences", gc_timer);
1172     for (uint i = 0; i < _max_num_queues; i++) {
1173       if (yield->should_return()) {
1174         return;
1175       }
1176       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1177                                   keep_alive, complete_gc, yield);
1178     }
1179   }
1180 }
1181 
1182 // Walk the given discovered ref list, and remove all reference objects
1183 // whose referents are still alive, whose referents are NULL or which
1184 // are not active (have a non-NULL next field). NOTE: When we are
1185 // thus precleaning the ref lists (which happens single-threaded today),
1186 // we do not disable refs discovery to honor the correct semantics of
1187 // java.lang.Reference. As a result, we need to be careful below
1188 // that ref removal steps interleave safely with ref discovery steps
1189 // (in this thread).
1190 void
1191 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1192                                                 BoolObjectClosure* is_alive,


1215         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr_raw(obj);
1216         keep_alive->do_oop(next_addr);
1217       }
1218       iter.move_to_next();
1219     } else {
1220       iter.next();
1221     }
1222   }
1223   // Close the reachable set
1224   complete_gc->do_void();
1225 
1226   NOT_PRODUCT(
1227     if (iter.processed() > 0) {
1228       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1229         iter.removed(), iter.processed(), p2i(&refs_list));
1230     }
1231   )
1232 }
1233 
1234 const char* ReferenceProcessor::list_name(uint i) {
1235    assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
1236           "Out of bounds index");
1237 
1238    int j = i / _max_num_queues;
1239    switch (j) {
1240      case 0: return "SoftRef";
1241      case 1: return "WeakRef";
1242      case 2: return "FinalRef";
1243      case 3: return "PhantomRef";
1244    }
1245    ShouldNotReachHere();
1246    return NULL;
1247 }
< prev index next >