Print this page
rev 2691 : [mq]: g1-reference-processing

Split Close
Expand all
Collapse all
          --- old/src/share/vm/memory/referenceProcessor.cpp
          +++ new/src/share/vm/memory/referenceProcessor.cpp
↓ open down ↓ 27 lines elided ↑ open up ↑
  28   28  #include "gc_interface/collectedHeap.hpp"
  29   29  #include "gc_interface/collectedHeap.inline.hpp"
  30   30  #include "memory/referencePolicy.hpp"
  31   31  #include "memory/referenceProcessor.hpp"
  32   32  #include "oops/oop.inline.hpp"
  33   33  #include "runtime/java.hpp"
  34   34  #include "runtime/jniHandles.hpp"
  35   35  
  36   36  ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
  37   37  ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy      = NULL;
  38      -const int        subclasses_of_ref                = REF_PHANTOM - REF_OTHER;
  39   38  bool             ReferenceProcessor::_pending_list_uses_discovered_field = false;
  40   39  
  41      -// List of discovered references.
  42      -class DiscoveredList {
  43      -public:
  44      -  DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
  45      -  oop head() const     {
  46      -     return UseCompressedOops ?  oopDesc::decode_heap_oop(_compressed_head) :
  47      -                                _oop_head;
  48      -  }
  49      -  HeapWord* adr_head() {
  50      -    return UseCompressedOops ? (HeapWord*)&_compressed_head :
  51      -                               (HeapWord*)&_oop_head;
  52      -  }
  53      -  void   set_head(oop o) {
  54      -    if (UseCompressedOops) {
  55      -      // Must compress the head ptr.
  56      -      _compressed_head = oopDesc::encode_heap_oop(o);
  57      -    } else {
  58      -      _oop_head = o;
  59      -    }
  60      -  }
  61      -  bool   empty() const          { return head() == NULL; }
  62      -  size_t length()               { return _len; }
  63      -  void   set_length(size_t len) { _len = len;  }
  64      -  void   inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
  65      -  void   dec_length(size_t dec) { _len -= dec; }
  66      -private:
  67      -  // Set value depending on UseCompressedOops. This could be a template class
  68      -  // but then we have to fix all the instantiations and declarations that use this class.
  69      -  oop       _oop_head;
  70      -  narrowOop _compressed_head;
  71      -  size_t _len;
  72      -};
  73      -
  74   40  void referenceProcessor_init() {
  75   41    ReferenceProcessor::init_statics();
  76   42  }
  77   43  
  78   44  void ReferenceProcessor::init_statics() {
  79   45    // Initialize the master soft ref clock.
  80   46    java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
  81   47  
  82   48    _always_clear_soft_ref_policy = new AlwaysClearPolicy();
  83   49    _default_soft_ref_policy      = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
↓ open down ↓ 21 lines elided ↑ open up ↑
 105   71    _discovered_list_needs_barrier(discovered_list_needs_barrier),
 106   72    _bs(NULL),
 107   73    _processing_is_mt(mt_processing),
 108   74    _next_id(0)
 109   75  {
 110   76    _span = span;
 111   77    _discovery_is_atomic = atomic_discovery;
 112   78    _discovery_is_mt     = mt_discovery;
 113   79    _num_q               = MAX2(1, mt_processing_degree);
 114   80    _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 115      -  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
       81 +  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList,
       82 +                                          _max_num_q * number_of_subclasses_of_ref());
 116   83    if (_discoveredSoftRefs == NULL) {
 117   84      vm_exit_during_initialization("Could not allocated RefProc Array");
 118   85    }
 119   86    _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 120   87    _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 121   88    _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 122   89    // Initialized all entries to NULL
 123      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
       90 +  for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 124   91      _discoveredSoftRefs[i].set_head(NULL);
 125   92      _discoveredSoftRefs[i].set_length(0);
 126   93    }
 127   94    // If we do barriers, cache a copy of the barrier set.
 128   95    if (discovered_list_needs_barrier) {
 129   96      _bs = Universe::heap()->barrier_set();
 130   97    }
 131   98    setup_policy(false /* default soft ref policy */);
 132   99  }
 133  100  
 134  101  #ifndef PRODUCT
 135  102  void ReferenceProcessor::verify_no_references_recorded() {
 136  103    guarantee(!_discovering_refs, "Discovering refs?");
 137      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
 138      -    guarantee(_discoveredSoftRefs[i].empty(),
      104 +  for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
      105 +    guarantee(_discoveredSoftRefs[i].is_empty(),
 139  106                "Found non-empty discovered list");
 140  107    }
 141  108  }
 142  109  #endif
 143  110  
 144  111  void ReferenceProcessor::weak_oops_do(OopClosure* f) {
 145      -  // Should this instead be
 146      -  // for (int i = 0; i < subclasses_of_ref; i++_ {
      112 +  // An alternative implementation of this routine
      113 +  // could use the following nested loop:
      114 +  //
      115 +  // for (int i = 0; i < number_of_subclasses_of_ref(); i++_ {
 147  116    //   for (int j = 0; j < _num_q; j++) {
 148  117    //     int index = i * _max_num_q + j;
 149      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      118 +
      119 +  for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 150  120      if (UseCompressedOops) {
 151  121        f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
 152  122      } else {
 153  123        f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
 154  124      }
 155  125    }
 156  126  }
 157  127  
 158  128  void ReferenceProcessor::update_soft_ref_master_clock() {
 159  129    // Update (advance) the soft ref master clock field. This must be done
↓ open down ↓ 237 lines elided ↑ open up ↑
 397  367    virtual void work(unsigned int work_id) {
 398  368      assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
 399  369      // Simplest first cut: static partitioning.
 400  370      int index = work_id;
 401  371      // The increment on "index" must correspond to the maximum number of queues
 402  372      // (n_queues) with which that ReferenceProcessor was created.  That
 403  373      // is because of the "clever" way the discovered references lists were
 404  374      // allocated and are indexed into.
 405  375      assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
 406  376      for (int j = 0;
 407      -         j < subclasses_of_ref;
      377 +         j < ReferenceProcessor::number_of_subclasses_of_ref();
 408  378           j++, index += _n_queues) {
 409  379        _ref_processor.enqueue_discovered_reflist(
 410  380          _refs_lists[index], _pending_list_addr);
 411  381        _refs_lists[index].set_head(NULL);
 412  382        _refs_lists[index].set_length(0);
 413  383      }
 414  384    }
 415  385  };
 416  386  
 417  387  // Enqueue references that are not made active again
 418  388  void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
 419  389    AbstractRefProcTaskExecutor* task_executor) {
 420  390    if (_processing_is_mt && task_executor != NULL) {
 421  391      // Parallel code
 422  392      RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
 423  393                             pending_list_addr, _max_num_q);
 424  394      task_executor->execute(tsk);
 425  395    } else {
 426  396      // Serial code: call the parent class's implementation
 427      -    for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      397 +    for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 428  398        enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
 429  399        _discoveredSoftRefs[i].set_head(NULL);
 430  400        _discoveredSoftRefs[i].set_length(0);
 431  401      }
 432  402    }
 433  403  }
 434  404  
 435      -// Iterator for the list of discovered references.
 436      -class DiscoveredListIterator {
 437      -public:
 438      -  inline DiscoveredListIterator(DiscoveredList&    refs_list,
 439      -                                OopClosure*        keep_alive,
 440      -                                BoolObjectClosure* is_alive);
 441      -
 442      -  // End Of List.
 443      -  inline bool has_next() const { return _ref != NULL; }
 444      -
 445      -  // Get oop to the Reference object.
 446      -  inline oop obj() const { return _ref; }
 447      -
 448      -  // Get oop to the referent object.
 449      -  inline oop referent() const { return _referent; }
 450      -
 451      -  // Returns true if referent is alive.
 452      -  inline bool is_referent_alive() const;
 453      -
 454      -  // Loads data for the current reference.
 455      -  // The "allow_null_referent" argument tells us to allow for the possibility
 456      -  // of a NULL referent in the discovered Reference object. This typically
 457      -  // happens in the case of concurrent collectors that may have done the
 458      -  // discovery concurrently, or interleaved, with mutator execution.
 459      -  inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
 460      -
 461      -  // Move to the next discovered reference.
 462      -  inline void next();
 463      -
 464      -  // Remove the current reference from the list
 465      -  inline void remove();
 466      -
 467      -  // Make the Reference object active again.
 468      -  inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
 469      -
 470      -  // Make the referent alive.
 471      -  inline void make_referent_alive() {
 472      -    if (UseCompressedOops) {
 473      -      _keep_alive->do_oop((narrowOop*)_referent_addr);
 474      -    } else {
 475      -      _keep_alive->do_oop((oop*)_referent_addr);
 476      -    }
 477      -  }
 478      -
 479      -  // Update the discovered field.
 480      -  inline void update_discovered() {
 481      -    // First _prev_next ref actually points into DiscoveredList (gross).
 482      -    if (UseCompressedOops) {
 483      -      if (!oopDesc::is_null(*(narrowOop*)_prev_next)) {
 484      -        _keep_alive->do_oop((narrowOop*)_prev_next);
 485      -      }
 486      -    } else {
 487      -      if (!oopDesc::is_null(*(oop*)_prev_next)) {
 488      -        _keep_alive->do_oop((oop*)_prev_next);
 489      -      }
 490      -    }
 491      -  }
 492      -
 493      -  // NULL out referent pointer.
 494      -  inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
 495      -
 496      -  // Statistics
 497      -  NOT_PRODUCT(
 498      -  inline size_t processed() const { return _processed; }
 499      -  inline size_t removed() const   { return _removed; }
 500      -  )
 501      -
 502      -  inline void move_to_next();
 503      -
 504      -private:
 505      -  DiscoveredList&    _refs_list;
 506      -  HeapWord*          _prev_next;
 507      -  oop                _prev;
 508      -  oop                _ref;
 509      -  HeapWord*          _discovered_addr;
 510      -  oop                _next;
 511      -  HeapWord*          _referent_addr;
 512      -  oop                _referent;
 513      -  OopClosure*        _keep_alive;
 514      -  BoolObjectClosure* _is_alive;
 515      -  DEBUG_ONLY(
 516      -  oop                _first_seen; // cyclic linked list check
 517      -  )
 518      -  NOT_PRODUCT(
 519      -  size_t             _processed;
 520      -  size_t             _removed;
 521      -  )
 522      -};
 523      -
 524      -inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList&    refs_list,
 525      -                                                      OopClosure*        keep_alive,
 526      -                                                      BoolObjectClosure* is_alive)
 527      -  : _refs_list(refs_list),
 528      -    _prev_next(refs_list.adr_head()),
 529      -    _prev(NULL),
 530      -    _ref(refs_list.head()),
 531      -#ifdef ASSERT
 532      -    _first_seen(refs_list.head()),
 533      -#endif
 534      -#ifndef PRODUCT
 535      -    _processed(0),
 536      -    _removed(0),
 537      -#endif
 538      -    _next(NULL),
 539      -    _keep_alive(keep_alive),
 540      -    _is_alive(is_alive)
 541      -{ }
 542      -
 543      -inline bool DiscoveredListIterator::is_referent_alive() const {
 544      -  return _is_alive->do_object_b(_referent);
 545      -}
 546      -
 547      -inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
      405 +void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
 548  406    _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
 549  407    oop discovered = java_lang_ref_Reference::discovered(_ref);
 550  408    assert(_discovered_addr && discovered->is_oop_or_null(),
 551  409           "discovered field is bad");
 552  410    _next = discovered;
 553  411    _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
 554  412    _referent = java_lang_ref_Reference::referent(_ref);
 555  413    assert(Universe::heap()->is_in_reserved_or_null(_referent),
 556  414           "Wrong oop found in java.lang.Reference object");
 557  415    assert(allow_null_referent ?
 558  416               _referent->is_oop_or_null()
 559  417             : _referent->is_oop(),
 560  418           "bad referent");
 561  419  }
 562  420  
 563      -inline void DiscoveredListIterator::next() {
 564      -  _prev_next = _discovered_addr;
 565      -  _prev = _ref;
 566      -  move_to_next();
 567      -}
 568      -
 569      -inline void DiscoveredListIterator::remove() {
      421 +void DiscoveredListIterator::remove() {
 570  422    assert(_ref->is_oop(), "Dropping a bad reference");
 571  423    oop_store_raw(_discovered_addr, NULL);
 572  424  
 573  425    // First _prev_next ref actually points into DiscoveredList (gross).
 574  426    oop new_next;
 575  427    if (_next == _ref) {
 576  428      // At the end of the list, we should make _prev point to itself.
 577  429      // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 578  430      // and _prev will be NULL.
 579  431      new_next = _prev;
↓ open down ↓ 5 lines elided ↑ open up ↑
 585  437      // Remove Reference object from list.
 586  438      oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
 587  439    } else {
 588  440      // Remove Reference object from list.
 589  441      oopDesc::store_heap_oop((oop*)_prev_next, new_next);
 590  442    }
 591  443    NOT_PRODUCT(_removed++);
 592  444    _refs_list.dec_length(1);
 593  445  }
 594  446  
 595      -inline void DiscoveredListIterator::move_to_next() {
 596      -  if (_ref == _next) {
 597      -    // End of the list.
 598      -    _ref = NULL;
      447 +// Make the Reference object active again.
      448 +void DiscoveredListIterator::make_active() {
      449 +  // For G1 we don't want to use set_next - it
      450 +  // will dirty the card for the next field of
      451 +  // the reference object and will fail
      452 +  // CT verification.
      453 +  if (UseG1GC) {
      454 +    BarrierSet* bs = oopDesc::bs();
      455 +    HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
      456 +
      457 +    if (UseCompressedOops) {
      458 +      bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
      459 +    } else {
      460 +      bs->write_ref_field_pre((oop*)next_addr, NULL);
      461 +    }
      462 +    java_lang_ref_Reference::set_next_raw(_ref, NULL);
 599  463    } else {
 600      -    _ref = _next;
      464 +    java_lang_ref_Reference::set_next(_ref, NULL);
 601  465    }
 602      -  assert(_ref != _first_seen, "cyclic ref_list found");
 603      -  NOT_PRODUCT(_processed++);
      466 +}
      467 +
      468 +void DiscoveredListIterator::clear_referent() {
      469 +  oop_store_raw(_referent_addr, NULL);
 604  470  }
 605  471  
 606  472  // NOTE: process_phase*() are largely similar, and at a high level
 607  473  // merely iterate over the extant list applying a predicate to
 608  474  // each of its elements and possibly removing that element from the
 609  475  // list and applying some further closures to that element.
 610  476  // We should consider the possibility of replacing these
 611  477  // process_phase*() methods by abstracting them into
 612  478  // a single general iterator invocation that receives appropriate
 613  479  // closures that accomplish this work.
↓ open down ↓ 165 lines elided ↑ open up ↑
 779  645    refs_list.set_length(0);
 780  646  }
 781  647  
 782  648  void
 783  649  ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
 784  650    clear_discovered_references(refs_list);
 785  651  }
 786  652  
 787  653  void ReferenceProcessor::abandon_partial_discovery() {
 788  654    // loop over the lists
 789      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      655 +  for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 790  656      if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 791      -      gclog_or_tty->print_cr("\nAbandoning %s discovered list",
 792      -                             list_name(i));
      657 +      gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
 793  658      }
 794  659      abandon_partial_discovered_list(_discoveredSoftRefs[i]);
 795  660    }
 796  661  }
 797  662  
 798  663  class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
 799  664  public:
 800  665    RefProcPhase1Task(ReferenceProcessor& ref_processor,
 801  666                      DiscoveredList      refs_lists[],
 802  667                      ReferencePolicy*    policy,
↓ open down ↓ 48 lines elided ↑ open up ↑
 851  716      // Thread* thr = Thread::current();
 852  717      // int refs_list_index = ((WorkerThread*)thr)->id();
 853  718      // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
 854  719      _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 855  720                                    &is_alive, &keep_alive, &complete_gc);
 856  721    }
 857  722  private:
 858  723    bool _clear_referent;
 859  724  };
 860  725  
      726 +void ReferenceProcessor::set_discovered(oop ref, oop value) {
      727 +  if (_discovered_list_needs_barrier) {
      728 +    java_lang_ref_Reference::set_discovered(ref, value);
      729 +  } else {
      730 +    java_lang_ref_Reference::set_discovered_raw(ref, value);
      731 +  }
      732 +}
      733 +
 861  734  // Balances reference queues.
 862  735  // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 863  736  // queues[0, 1, ..., _num_q-1] because only the first _num_q
 864  737  // corresponding to the active workers will be processed.
 865  738  void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
 866  739  {
 867  740    // calculate total length
 868  741    size_t total_refs = 0;
 869  742    if (TraceReferenceGC && PrintGCDetails) {
 870  743      gclog_or_tty->print_cr("\nBalance ref_lists ");
↓ open down ↓ 37 lines elided ↑ open up ↑
 908  781          oop new_head  = move_head;
 909  782          // find an element to split the list on
 910  783          for (size_t j = 0; j < refs_to_move; ++j) {
 911  784            move_tail = new_head;
 912  785            new_head = java_lang_ref_Reference::discovered(new_head);
 913  786          }
 914  787  
 915  788          // Add the chain to the to list.
 916  789          if (ref_lists[to_idx].head() == NULL) {
 917  790            // to list is empty. Make a loop at the end.
 918      -          java_lang_ref_Reference::set_discovered(move_tail, move_tail);
      791 +          set_discovered(move_tail, move_tail);
 919  792          } else {
 920      -          java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
      793 +          set_discovered(move_tail, ref_lists[to_idx].head());
 921  794          }
 922  795          ref_lists[to_idx].set_head(move_head);
 923  796          ref_lists[to_idx].inc_length(refs_to_move);
 924  797  
 925  798          // Remove the chain from the from list.
 926  799          if (move_tail == new_head) {
 927  800            // We found the end of the from list.
 928  801            ref_lists[from_idx].set_head(NULL);
 929  802          } else {
 930  803            ref_lists[from_idx].set_head(new_head);
↓ open down ↓ 100 lines elided ↑ open up ↑
1031  904    } else {
1032  905      for (int i = 0; i < _max_num_q; i++) {
1033  906        process_phase3(refs_lists[i], clear_referent,
1034  907                       is_alive, keep_alive, complete_gc);
1035  908      }
1036  909    }
1037  910  }
1038  911  
1039  912  void ReferenceProcessor::clean_up_discovered_references() {
1040  913    // loop over the lists
1041      -  // Should this instead be
1042      -  // for (int i = 0; i < subclasses_of_ref; i++_ {
      914 +
      915 +  // An alternative implementation of this routine could
      916 +  // use the following nested loop:
      917 +  //
      918 +  // for (int i = 0; i < number_of_subclasses_of_ref(); i++) {
1043  919    //   for (int j = 0; j < _num_q; j++) {
1044  920    //     int index = i * _max_num_q + j;
1045      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      921 +
      922 +  for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
1046  923      if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
1047  924        gclog_or_tty->print_cr(
1048  925          "\nScrubbing %s discovered list of Null referents",
1049  926          list_name(i));
1050  927      }
1051  928      clean_up_discovered_reflist(_discoveredSoftRefs[i]);
1052  929    }
1053  930  }
1054  931  
1055  932  void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
↓ open down ↓ 197 lines elided ↑ open up ↑
1253 1130      // can mark through them now, rather than delaying that
1254 1131      // to the reference-processing phase. Since all current
1255 1132      // time-stamp policies advance the soft-ref clock only
1256 1133      // at a major collection cycle, this is always currently
1257 1134      // accurate.
1258 1135      if (!_current_soft_ref_policy->should_clear_reference(obj)) {
1259 1136        return false;
1260 1137      }
1261 1138    }
1262 1139  
     1140 +  ResourceMark rm;      // Needed for tracing.
     1141 +
1263 1142    HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1264 1143    const oop  discovered = java_lang_ref_Reference::discovered(obj);
1265 1144    assert(discovered->is_oop_or_null(), "bad discovered field");
1266 1145    if (discovered != NULL) {
1267 1146      // The reference has already been discovered...
1268 1147      if (TraceReferenceGC) {
1269 1148        gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1270 1149                               obj, obj->blueprint()->internal_name());
1271 1150      }
1272 1151      if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
↓ open down ↓ 192 lines elided ↑ open up ↑
1465 1344    NOT_PRODUCT(
1466 1345      if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
1467 1346        gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
1468 1347          "Refs in discovered list " INTPTR_FORMAT,
1469 1348          iter.removed(), iter.processed(), (address)refs_list.head());
1470 1349      }
1471 1350    )
1472 1351  }
1473 1352  
1474 1353  const char* ReferenceProcessor::list_name(int i) {
1475      -   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
     1354 +   assert(i >= 0 && i <= _max_num_q * number_of_subclasses_of_ref(),
     1355 +          "Out of bounds index");
     1356 +
1476 1357     int j = i / _max_num_q;
1477 1358     switch (j) {
1478 1359       case 0: return "SoftRef";
1479 1360       case 1: return "WeakRef";
1480 1361       case 2: return "FinalRef";
1481 1362       case 3: return "PhantomRef";
1482 1363     }
1483 1364     ShouldNotReachHere();
1484 1365     return NULL;
1485 1366  }
1486 1367  
1487 1368  #ifndef PRODUCT
1488 1369  void ReferenceProcessor::verify_ok_to_handle_reflists() {
1489 1370    // empty for now
1490 1371  }
1491 1372  #endif
1492 1373  
1493 1374  #ifndef PRODUCT
1494 1375  void ReferenceProcessor::clear_discovered_references() {
1495 1376    guarantee(!_discovering_refs, "Discovering refs?");
1496      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
     1377 +  for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
1497 1378      clear_discovered_references(_discoveredSoftRefs[i]);
1498 1379    }
1499 1380  }
1500 1381  
1501 1382  #endif // PRODUCT
    
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX