Print this page
rev 2661 : [mq]: g1-reference-processing

Split Close
Expand all
Collapse all
          --- old/src/share/vm/memory/referenceProcessor.cpp
          +++ new/src/share/vm/memory/referenceProcessor.cpp
↓ open down ↓ 27 lines elided ↑ open up ↑
  28   28  #include "gc_interface/collectedHeap.hpp"
  29   29  #include "gc_interface/collectedHeap.inline.hpp"
  30   30  #include "memory/referencePolicy.hpp"
  31   31  #include "memory/referenceProcessor.hpp"
  32   32  #include "oops/oop.inline.hpp"
  33   33  #include "runtime/java.hpp"
  34   34  #include "runtime/jniHandles.hpp"
  35   35  
  36   36  ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
  37   37  ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy      = NULL;
  38      -const int        subclasses_of_ref                = REF_PHANTOM - REF_OTHER;
  39   38  bool             ReferenceProcessor::_pending_list_uses_discovered_field = false;
  40   39  
  41      -// List of discovered references.
  42      -class DiscoveredList {
  43      -public:
  44      -  DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
  45      -  oop head() const     {
  46      -     return UseCompressedOops ?  oopDesc::decode_heap_oop(_compressed_head) :
  47      -                                _oop_head;
  48      -  }
  49      -  HeapWord* adr_head() {
  50      -    return UseCompressedOops ? (HeapWord*)&_compressed_head :
  51      -                               (HeapWord*)&_oop_head;
  52      -  }
  53      -  void   set_head(oop o) {
  54      -    if (UseCompressedOops) {
  55      -      // Must compress the head ptr.
  56      -      _compressed_head = oopDesc::encode_heap_oop(o);
  57      -    } else {
  58      -      _oop_head = o;
  59      -    }
  60      -  }
  61      -  bool   empty() const          { return head() == NULL; }
  62      -  size_t length()               { return _len; }
  63      -  void   set_length(size_t len) { _len = len;  }
  64      -  void   inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
  65      -  void   dec_length(size_t dec) { _len -= dec; }
  66      -private:
  67      -  // Set value depending on UseCompressedOops. This could be a template class
  68      -  // but then we have to fix all the instantiations and declarations that use this class.
  69      -  oop       _oop_head;
  70      -  narrowOop _compressed_head;
  71      -  size_t _len;
  72      -};
  73      -
  74   40  void referenceProcessor_init() {
  75   41    ReferenceProcessor::init_statics();
  76   42  }
  77   43  
  78   44  void ReferenceProcessor::init_statics() {
  79   45    // Initialize the master soft ref clock.
  80   46    java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
  81   47  
  82   48    _always_clear_soft_ref_policy = new AlwaysClearPolicy();
  83   49    _default_soft_ref_policy      = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
↓ open down ↓ 21 lines elided ↑ open up ↑
 105   71    _discovered_list_needs_barrier(discovered_list_needs_barrier),
 106   72    _bs(NULL),
 107   73    _processing_is_mt(mt_processing),
 108   74    _next_id(0)
 109   75  {
 110   76    _span = span;
 111   77    _discovery_is_atomic = atomic_discovery;
 112   78    _discovery_is_mt     = mt_discovery;
 113   79    _num_q               = MAX2(1, mt_processing_degree);
 114   80    _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 115      -  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
       81 +  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref());
 116   82    if (_discoveredSoftRefs == NULL) {
 117   83      vm_exit_during_initialization("Could not allocated RefProc Array");
 118   84    }
 119   85    _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 120   86    _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 121   87    _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 122   88    // Initialized all entries to NULL
 123      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
       89 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 124   90      _discoveredSoftRefs[i].set_head(NULL);
 125   91      _discoveredSoftRefs[i].set_length(0);
 126   92    }
 127   93    // If we do barriers, cache a copy of the barrier set.
 128   94    if (discovered_list_needs_barrier) {
 129   95      _bs = Universe::heap()->barrier_set();
 130   96    }
 131   97    setup_policy(false /* default soft ref policy */);
 132   98  }
 133   99  
 134  100  #ifndef PRODUCT
 135  101  void ReferenceProcessor::verify_no_references_recorded() {
 136  102    guarantee(!_discovering_refs, "Discovering refs?");
 137      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
 138      -    guarantee(_discoveredSoftRefs[i].empty(),
      103 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
      104 +    guarantee(_discoveredSoftRefs[i].is_empty(),
 139  105                "Found non-empty discovered list");
 140  106    }
 141  107  }
 142  108  #endif
 143  109  
 144  110  void ReferenceProcessor::weak_oops_do(OopClosure* f) {
 145  111    // Should this instead be
 146      -  // for (int i = 0; i < subclasses_of_ref; i++_ {
      112 +  // for (int i = 0; i < subclasses_of_ref(); i++_ {
 147  113    //   for (int j = 0; j < _num_q; j++) {
 148  114    //     int index = i * _max_num_q + j;
 149      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      115 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 150  116      if (UseCompressedOops) {
 151  117        f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
 152  118      } else {
 153  119        f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
 154  120      }
 155  121    }
 156  122  }
 157  123  
 158  124  void ReferenceProcessor::update_soft_ref_master_clock() {
 159  125    // Update (advance) the soft ref master clock field. This must be done
↓ open down ↓ 237 lines elided ↑ open up ↑
 397  363    virtual void work(unsigned int work_id) {
 398  364      assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
 399  365      // Simplest first cut: static partitioning.
 400  366      int index = work_id;
 401  367      // The increment on "index" must correspond to the maximum number of queues
 402  368      // (n_queues) with which that ReferenceProcessor was created.  That
 403  369      // is because of the "clever" way the discovered references lists were
 404  370      // allocated and are indexed into.
 405  371      assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
 406  372      for (int j = 0;
 407      -         j < subclasses_of_ref;
      373 +         j < ReferenceProcessor::subclasses_of_ref();
 408  374           j++, index += _n_queues) {
 409  375        _ref_processor.enqueue_discovered_reflist(
 410  376          _refs_lists[index], _pending_list_addr);
 411  377        _refs_lists[index].set_head(NULL);
 412  378        _refs_lists[index].set_length(0);
 413  379      }
 414  380    }
 415  381  };
 416  382  
 417  383  // Enqueue references that are not made active again
 418  384  void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
 419  385    AbstractRefProcTaskExecutor* task_executor) {
 420  386    if (_processing_is_mt && task_executor != NULL) {
 421  387      // Parallel code
 422  388      RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
 423  389                             pending_list_addr, _max_num_q);
 424  390      task_executor->execute(tsk);
 425  391    } else {
 426  392      // Serial code: call the parent class's implementation
 427      -    for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      393 +    for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 428  394        enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
 429  395        _discoveredSoftRefs[i].set_head(NULL);
 430  396        _discoveredSoftRefs[i].set_length(0);
 431  397      }
 432  398    }
 433  399  }
 434  400  
 435      -// Iterator for the list of discovered references.
 436      -class DiscoveredListIterator {
 437      -public:
 438      -  inline DiscoveredListIterator(DiscoveredList&    refs_list,
 439      -                                OopClosure*        keep_alive,
 440      -                                BoolObjectClosure* is_alive);
 441      -
 442      -  // End Of List.
 443      -  inline bool has_next() const { return _ref != NULL; }
 444      -
 445      -  // Get oop to the Reference object.
 446      -  inline oop obj() const { return _ref; }
 447      -
 448      -  // Get oop to the referent object.
 449      -  inline oop referent() const { return _referent; }
 450      -
 451      -  // Returns true if referent is alive.
 452      -  inline bool is_referent_alive() const;
 453      -
 454      -  // Loads data for the current reference.
 455      -  // The "allow_null_referent" argument tells us to allow for the possibility
 456      -  // of a NULL referent in the discovered Reference object. This typically
 457      -  // happens in the case of concurrent collectors that may have done the
 458      -  // discovery concurrently, or interleaved, with mutator execution.
 459      -  inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
 460      -
 461      -  // Move to the next discovered reference.
 462      -  inline void next();
 463      -
 464      -  // Remove the current reference from the list
 465      -  inline void remove();
 466      -
 467      -  // Make the Reference object active again.
 468      -  inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
 469      -
 470      -  // Make the referent alive.
 471      -  inline void make_referent_alive() {
 472      -    if (UseCompressedOops) {
 473      -      _keep_alive->do_oop((narrowOop*)_referent_addr);
 474      -    } else {
 475      -      _keep_alive->do_oop((oop*)_referent_addr);
 476      -    }
 477      -  }
 478      -
 479      -  // Update the discovered field.
 480      -  inline void update_discovered() {
 481      -    // First _prev_next ref actually points into DiscoveredList (gross).
 482      -    if (UseCompressedOops) {
 483      -      if (!oopDesc::is_null(*(narrowOop*)_prev_next)) {
 484      -        _keep_alive->do_oop((narrowOop*)_prev_next);
 485      -      }
 486      -    } else {
 487      -      if (!oopDesc::is_null(*(oop*)_prev_next)) {
 488      -        _keep_alive->do_oop((oop*)_prev_next);
 489      -      }
 490      -    }
 491      -  }
 492      -
 493      -  // NULL out referent pointer.
 494      -  inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
 495      -
 496      -  // Statistics
 497      -  NOT_PRODUCT(
 498      -  inline size_t processed() const { return _processed; }
 499      -  inline size_t removed() const   { return _removed; }
 500      -  )
 501      -
 502      -  inline void move_to_next();
 503      -
 504      -private:
 505      -  DiscoveredList&    _refs_list;
 506      -  HeapWord*          _prev_next;
 507      -  oop                _prev;
 508      -  oop                _ref;
 509      -  HeapWord*          _discovered_addr;
 510      -  oop                _next;
 511      -  HeapWord*          _referent_addr;
 512      -  oop                _referent;
 513      -  OopClosure*        _keep_alive;
 514      -  BoolObjectClosure* _is_alive;
 515      -  DEBUG_ONLY(
 516      -  oop                _first_seen; // cyclic linked list check
 517      -  )
 518      -  NOT_PRODUCT(
 519      -  size_t             _processed;
 520      -  size_t             _removed;
 521      -  )
 522      -};
 523      -
 524      -inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList&    refs_list,
 525      -                                                      OopClosure*        keep_alive,
 526      -                                                      BoolObjectClosure* is_alive)
 527      -  : _refs_list(refs_list),
 528      -    _prev_next(refs_list.adr_head()),
 529      -    _prev(NULL),
 530      -    _ref(refs_list.head()),
 531      -#ifdef ASSERT
 532      -    _first_seen(refs_list.head()),
 533      -#endif
 534      -#ifndef PRODUCT
 535      -    _processed(0),
 536      -    _removed(0),
 537      -#endif
 538      -    _next(NULL),
 539      -    _keep_alive(keep_alive),
 540      -    _is_alive(is_alive)
 541      -{ }
 542      -
 543      -inline bool DiscoveredListIterator::is_referent_alive() const {
 544      -  return _is_alive->do_object_b(_referent);
 545      -}
 546      -
 547      -inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
      401 +void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
 548  402    _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
 549  403    oop discovered = java_lang_ref_Reference::discovered(_ref);
 550  404    assert(_discovered_addr && discovered->is_oop_or_null(),
 551  405           "discovered field is bad");
 552  406    _next = discovered;
 553  407    _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
 554  408    _referent = java_lang_ref_Reference::referent(_ref);
 555  409    assert(Universe::heap()->is_in_reserved_or_null(_referent),
 556  410           "Wrong oop found in java.lang.Reference object");
 557  411    assert(allow_null_referent ?
 558  412               _referent->is_oop_or_null()
 559  413             : _referent->is_oop(),
 560  414           "bad referent");
 561  415  }
 562  416  
 563      -inline void DiscoveredListIterator::next() {
 564      -  _prev_next = _discovered_addr;
 565      -  _prev = _ref;
 566      -  move_to_next();
 567      -}
 568      -
 569      -inline void DiscoveredListIterator::remove() {
      417 +void DiscoveredListIterator::remove() {
 570  418    assert(_ref->is_oop(), "Dropping a bad reference");
 571  419    oop_store_raw(_discovered_addr, NULL);
 572  420  
 573  421    // First _prev_next ref actually points into DiscoveredList (gross).
 574  422    oop new_next;
 575  423    if (_next == _ref) {
 576  424      // At the end of the list, we should make _prev point to itself.
 577  425      // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 578  426      // and _prev will be NULL.
 579  427      new_next = _prev;
↓ open down ↓ 5 lines elided ↑ open up ↑
 585  433      // Remove Reference object from list.
 586  434      oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
 587  435    } else {
 588  436      // Remove Reference object from list.
 589  437      oopDesc::store_heap_oop((oop*)_prev_next, new_next);
 590  438    }
 591  439    NOT_PRODUCT(_removed++);
 592  440    _refs_list.dec_length(1);
 593  441  }
 594  442  
 595      -inline void DiscoveredListIterator::move_to_next() {
 596      -  if (_ref == _next) {
 597      -    // End of the list.
 598      -    _ref = NULL;
      443 +// Make the Reference object active again.
      444 +void DiscoveredListIterator::make_active() {
      445 +  // For G1 we don't want to use set_next - it
      446 +  // will dirty the card for the next field of
      447 +  // the reference object and will fail
      448 +  // CT verification.
      449 +  if (UseG1GC) {
      450 +    BarrierSet* bs = oopDesc::bs();
      451 +    HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
      452 +
      453 +    if (UseCompressedOops) {
      454 +      bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
      455 +    } else {
      456 +      bs->write_ref_field_pre((oop*)next_addr, NULL);
      457 +    }
      458 +    java_lang_ref_Reference::set_next_raw(_ref, NULL);
 599  459    } else {
 600      -    _ref = _next;
      460 +    java_lang_ref_Reference::set_next(_ref, NULL);
 601  461    }
 602      -  assert(_ref != _first_seen, "cyclic ref_list found");
 603      -  NOT_PRODUCT(_processed++);
      462 +}
      463 +
      464 +void DiscoveredListIterator::clear_referent() {
      465 +  oop_store_raw(_referent_addr, NULL);
 604  466  }
 605  467  
 606  468  // NOTE: process_phase*() are largely similar, and at a high level
 607  469  // merely iterate over the extant list applying a predicate to
 608  470  // each of its elements and possibly removing that element from the
 609  471  // list and applying some further closures to that element.
 610  472  // We should consider the possibility of replacing these
 611  473  // process_phase*() methods by abstracting them into
 612  474  // a single general iterator invocation that receives appropriate
 613  475  // closures that accomplish this work.
↓ open down ↓ 165 lines elided ↑ open up ↑
 779  641    refs_list.set_length(0);
 780  642  }
 781  643  
 782  644  void
 783  645  ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
 784  646    clear_discovered_references(refs_list);
 785  647  }
 786  648  
 787  649  void ReferenceProcessor::abandon_partial_discovery() {
 788  650    // loop over the lists
 789      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      651 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 790  652      if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 791      -      gclog_or_tty->print_cr("\nAbandoning %s discovered list",
 792      -                             list_name(i));
      653 +      gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
 793  654      }
 794  655      abandon_partial_discovered_list(_discoveredSoftRefs[i]);
 795  656    }
 796  657  }
 797  658  
 798  659  class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
 799  660  public:
 800  661    RefProcPhase1Task(ReferenceProcessor& ref_processor,
 801  662                      DiscoveredList      refs_lists[],
 802  663                      ReferencePolicy*    policy,
↓ open down ↓ 103 lines elided ↑ open up ↑
 906  767          oop move_head = ref_lists[from_idx].head();
 907  768          oop move_tail = move_head;
 908  769          oop new_head  = move_head;
 909  770          // find an element to split the list on
 910  771          for (size_t j = 0; j < refs_to_move; ++j) {
 911  772            move_tail = new_head;
 912  773            new_head = java_lang_ref_Reference::discovered(new_head);
 913  774          }
 914  775  
 915  776          // Add the chain to the to list.
 916      -        if (ref_lists[to_idx].head() == NULL) {
 917      -          // to list is empty. Make a loop at the end.
 918      -          java_lang_ref_Reference::set_discovered(move_tail, move_tail);
      777 +        if (_discovered_list_needs_barrier) {
      778 +          if (ref_lists[to_idx].head() == NULL) {
      779 +            // to list is empty. Make a loop at the end.
      780 +            java_lang_ref_Reference::set_discovered(move_tail, move_tail);
      781 +          } else {
      782 +            java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
      783 +          }
 919  784          } else {
 920      -          java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
      785 +          HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(move_tail);
      786 +          if (ref_lists[to_idx].head() == NULL) {
      787 +            // to list is empty. Make a loop at the end.
      788 +            oop_store_raw(discovered_addr, move_tail);
      789 +          } else {
      790 +            oop_store_raw(discovered_addr, ref_lists[to_idx].head());
      791 +          }
 921  792          }
 922  793          ref_lists[to_idx].set_head(move_head);
 923  794          ref_lists[to_idx].inc_length(refs_to_move);
 924  795  
 925  796          // Remove the chain from the from list.
 926  797          if (move_tail == new_head) {
 927  798            // We found the end of the from list.
 928  799            ref_lists[from_idx].set_head(NULL);
 929  800          } else {
 930  801            ref_lists[from_idx].set_head(new_head);
↓ open down ↓ 101 lines elided ↑ open up ↑
1032  903      for (int i = 0; i < _max_num_q; i++) {
1033  904        process_phase3(refs_lists[i], clear_referent,
1034  905                       is_alive, keep_alive, complete_gc);
1035  906      }
1036  907    }
1037  908  }
1038  909  
1039  910  void ReferenceProcessor::clean_up_discovered_references() {
1040  911    // loop over the lists
1041  912    // Should this instead be
1042      -  // for (int i = 0; i < subclasses_of_ref; i++_ {
      913 +  // for (int i = 0; i < subclasses_of_ref(); i++) {
1043  914    //   for (int j = 0; j < _num_q; j++) {
1044  915    //     int index = i * _max_num_q + j;
1045      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      916 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
1046  917      if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
1047  918        gclog_or_tty->print_cr(
1048  919          "\nScrubbing %s discovered list of Null referents",
1049  920          list_name(i));
1050  921      }
1051  922      clean_up_discovered_reflist(_discoveredSoftRefs[i]);
1052  923    }
1053  924  }
1054  925  
1055  926  void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
↓ open down ↓ 197 lines elided ↑ open up ↑
1253 1124      // can mark through them now, rather than delaying that
1254 1125      // to the reference-processing phase. Since all current
1255 1126      // time-stamp policies advance the soft-ref clock only
1256 1127      // at a major collection cycle, this is always currently
1257 1128      // accurate.
1258 1129      if (!_current_soft_ref_policy->should_clear_reference(obj)) {
1259 1130        return false;
1260 1131      }
1261 1132    }
1262 1133  
     1134 +  ResourceMark rm;      // Needed for tracing.
     1135 +
1263 1136    HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1264 1137    const oop  discovered = java_lang_ref_Reference::discovered(obj);
1265 1138    assert(discovered->is_oop_or_null(), "bad discovered field");
1266 1139    if (discovered != NULL) {
1267 1140      // The reference has already been discovered...
1268 1141      if (TraceReferenceGC) {
1269 1142        gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1270 1143                               obj, obj->blueprint()->internal_name());
1271 1144      }
1272 1145      if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
↓ open down ↓ 192 lines elided ↑ open up ↑
1465 1338    NOT_PRODUCT(
1466 1339      if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
1467 1340        gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
1468 1341          "Refs in discovered list " INTPTR_FORMAT,
1469 1342          iter.removed(), iter.processed(), (address)refs_list.head());
1470 1343      }
1471 1344    )
1472 1345  }
1473 1346  
1474 1347  const char* ReferenceProcessor::list_name(int i) {
1475      -   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
     1348 +   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref(), "Out of bounds index");
1476 1349     int j = i / _max_num_q;
1477 1350     switch (j) {
1478 1351       case 0: return "SoftRef";
1479 1352       case 1: return "WeakRef";
1480 1353       case 2: return "FinalRef";
1481 1354       case 3: return "PhantomRef";
1482 1355     }
1483 1356     ShouldNotReachHere();
1484 1357     return NULL;
1485 1358  }
1486 1359  
1487 1360  #ifndef PRODUCT
1488 1361  void ReferenceProcessor::verify_ok_to_handle_reflists() {
1489 1362    // empty for now
1490 1363  }
1491 1364  #endif
1492 1365  
1493 1366  #ifndef PRODUCT
1494 1367  void ReferenceProcessor::clear_discovered_references() {
1495 1368    guarantee(!_discovering_refs, "Discovering refs?");
1496      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
     1369 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
1497 1370      clear_discovered_references(_discoveredSoftRefs[i]);
1498 1371    }
1499 1372  }
1500 1373  
1501 1374  #endif // PRODUCT
    
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX