Print this page
rev 2518 : [mq]: g1-reference-processing

Split Close
Expand all
Collapse all
          --- old/src/share/vm/memory/referenceProcessor.cpp
          +++ new/src/share/vm/memory/referenceProcessor.cpp
↓ open down ↓ 28 lines elided ↑ open up ↑
  29   29  #include "gc_interface/collectedHeap.inline.hpp"
  30   30  #include "memory/referencePolicy.hpp"
  31   31  #include "memory/referenceProcessor.hpp"
  32   32  #include "oops/oop.inline.hpp"
  33   33  #include "runtime/java.hpp"
  34   34  #include "runtime/jniHandles.hpp"
  35   35  
  36   36  ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
  37   37  ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy      = NULL;
  38   38  oop              ReferenceProcessor::_sentinelRef = NULL;
  39      -const int        subclasses_of_ref                = REF_PHANTOM - REF_OTHER;
  40   39  
  41      -// List of discovered references.
  42      -class DiscoveredList {
  43      -public:
  44      -  DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
  45      -  oop head() const     {
  46      -     return UseCompressedOops ?  oopDesc::decode_heap_oop_not_null(_compressed_head) :
  47      -                                _oop_head;
  48      -  }
  49      -  HeapWord* adr_head() {
  50      -    return UseCompressedOops ? (HeapWord*)&_compressed_head :
  51      -                               (HeapWord*)&_oop_head;
  52      -  }
  53      -  void   set_head(oop o) {
  54      -    if (UseCompressedOops) {
  55      -      // Must compress the head ptr.
  56      -      _compressed_head = oopDesc::encode_heap_oop_not_null(o);
  57      -    } else {
  58      -      _oop_head = o;
  59      -    }
  60      -  }
  61      -  bool   empty() const          { return head() == ReferenceProcessor::sentinel_ref(); }
  62      -  size_t length()               { return _len; }
  63      -  void   set_length(size_t len) { _len = len;  }
  64      -  void   inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
  65      -  void   dec_length(size_t dec) { _len -= dec; }
  66      -private:
  67      -  // Set value depending on UseCompressedOops. This could be a template class
  68      -  // but then we have to fix all the instantiations and declarations that use this class.
  69      -  oop       _oop_head;
  70      -  narrowOop _compressed_head;
  71      -  size_t _len;
  72      -};
       40 +bool DiscoveredList::empty() const {
       41 +  return head() == ReferenceProcessor::sentinel_ref();
       42 +}
  73   43  
  74   44  void referenceProcessor_init() {
  75   45    ReferenceProcessor::init_statics();
  76   46  }
  77   47  
  78   48  void ReferenceProcessor::init_statics() {
  79   49    assert(_sentinelRef == NULL, "should be initialized precisely once");
  80   50    EXCEPTION_MARK;
  81   51    _sentinelRef = instanceKlass::cast(
  82   52                      SystemDictionary::Reference_klass())->
↓ open down ↓ 33 lines elided ↑ open up ↑
 116   86    _discovered_list_needs_barrier(discovered_list_needs_barrier),
 117   87    _bs(NULL),
 118   88    _processing_is_mt(mt_processing),
 119   89    _next_id(0)
 120   90  {
 121   91    _span = span;
 122   92    _discovery_is_atomic = atomic_discovery;
 123   93    _discovery_is_mt     = mt_discovery;
 124   94    _num_q               = MAX2(1, mt_processing_degree);
 125   95    _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 126      -  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
       96 +  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref());
 127   97    if (_discoveredSoftRefs == NULL) {
 128   98      vm_exit_during_initialization("Could not allocated RefProc Array");
 129   99    }
 130  100    _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 131  101    _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 132  102    _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 133  103    assert(sentinel_ref() != NULL, "_sentinelRef is NULL");
 134  104    // Initialized all entries to _sentinelRef
 135      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      105 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 136  106          _discoveredSoftRefs[i].set_head(sentinel_ref());
 137  107      _discoveredSoftRefs[i].set_length(0);
 138  108    }
 139      -  // If we do barreirs, cache a copy of the barrier set.
      109 +  // If we do barriers, cache a copy of the barrier set.
 140  110    if (discovered_list_needs_barrier) {
 141  111      _bs = Universe::heap()->barrier_set();
 142  112    }
 143  113    setup_policy(false /* default soft ref policy */);
 144  114  }
 145  115  
 146  116  #ifndef PRODUCT
 147  117  void ReferenceProcessor::verify_no_references_recorded() {
 148  118    guarantee(!_discovering_refs, "Discovering refs?");
 149      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      119 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 150  120      guarantee(_discoveredSoftRefs[i].empty(),
 151  121                "Found non-empty discovered list");
 152  122    }
 153  123  }
 154  124  #endif
 155  125  
 156  126  void ReferenceProcessor::weak_oops_do(OopClosure* f) {
 157  127    // Should this instead be
 158      -  // for (int i = 0; i < subclasses_of_ref; i++_ {
      128 +  // for (int i = 0; i < subclasses_of_ref(); i++_ {
 159  129    //   for (int j = 0; j < _num_q; j++) {
 160  130    //     int index = i * _max_num_q + j;
 161      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      131 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 162  132      if (UseCompressedOops) {
 163  133        f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
 164  134      } else {
 165  135        f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
 166  136      }
 167  137    }
 168  138  }
 169  139  
 170  140  void ReferenceProcessor::oops_do(OopClosure* f) {
 171  141    f->do_oop(adr_sentinel_ref());
↓ open down ↓ 213 lines elided ↑ open up ↑
 385  355    virtual void work(unsigned int work_id) {
 386  356      assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
 387  357      // Simplest first cut: static partitioning.
 388  358      int index = work_id;
 389  359      // The increment on "index" must correspond to the maximum number of queues
 390  360      // (n_queues) with which that ReferenceProcessor was created.  That
 391  361      // is because of the "clever" way the discovered references lists were
 392  362      // allocated and are indexed into.
 393  363      assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
 394  364      for (int j = 0;
 395      -         j < subclasses_of_ref;
      365 +         j < ReferenceProcessor::subclasses_of_ref();
 396  366           j++, index += _n_queues) {
 397  367        _ref_processor.enqueue_discovered_reflist(
 398  368          _refs_lists[index], _pending_list_addr);
 399  369        _refs_lists[index].set_head(_sentinel_ref);
 400  370        _refs_lists[index].set_length(0);
 401  371      }
 402  372    }
 403  373  };
 404  374  
 405  375  // Enqueue references that are not made active again
 406  376  void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
 407  377    AbstractRefProcTaskExecutor* task_executor) {
 408  378    if (_processing_is_mt && task_executor != NULL) {
 409  379      // Parallel code
 410  380      RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
 411  381                             pending_list_addr, sentinel_ref(), _max_num_q);
 412  382      task_executor->execute(tsk);
 413  383    } else {
 414  384      // Serial code: call the parent class's implementation
 415      -    for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      385 +    for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 416  386        enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
 417  387        _discoveredSoftRefs[i].set_head(sentinel_ref());
 418  388        _discoveredSoftRefs[i].set_length(0);
 419  389      }
 420  390    }
 421  391  }
 422  392  
 423  393  // Iterator for the list of discovered references.
 424  394  class DiscoveredListIterator {
 425  395  public:
↓ open down ↓ 20 lines elided ↑ open up ↑
 446  416    // discovery concurrently, or interleaved, with mutator execution.
 447  417    inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
 448  418  
 449  419    // Move to the next discovered reference.
 450  420    inline void next();
 451  421  
 452  422    // Remove the current reference from the list
 453  423    inline void remove();
 454  424  
 455  425    // Make the Reference object active again.
 456      -  inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
      426 +  inline void make_active() {
      427 +    // For G1 we don't want to use set_next - it
      428 +    // will dirty the card for the next field of
      429 +    // the reference object and will fail
      430 +    // CT verification.
      431 +    if (UseG1GC) {
      432 +      BarrierSet* bs = oopDesc::bs();
      433 +      HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
      434 +
      435 +      if (UseCompressedOops) {
      436 +        bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
      437 +      } else {
      438 +        bs->write_ref_field_pre((oop*)next_addr, NULL);
      439 +      }
      440 +      java_lang_ref_Reference::set_next_raw(_ref, NULL);
      441 +    } else {
      442 +      java_lang_ref_Reference::set_next(_ref, NULL);
      443 +    }
      444 +  }
 457  445  
 458  446    // Make the referent alive.
 459  447    inline void make_referent_alive() {
 460  448      if (UseCompressedOops) {
 461  449        _keep_alive->do_oop((narrowOop*)_referent_addr);
 462  450      } else {
 463  451        _keep_alive->do_oop((oop*)_referent_addr);
 464  452      }
 465  453    }
 466  454  
↓ open down ↓ 21 lines elided ↑ open up ↑
 488  476  private:
 489  477    DiscoveredList&    _refs_list;
 490  478    HeapWord*          _prev_next;
 491  479    oop                _ref;
 492  480    HeapWord*          _discovered_addr;
 493  481    oop                _next;
 494  482    HeapWord*          _referent_addr;
 495  483    oop                _referent;
 496  484    OopClosure*        _keep_alive;
 497  485    BoolObjectClosure* _is_alive;
      486 +
 498  487    DEBUG_ONLY(
 499  488    oop                _first_seen; // cyclic linked list check
 500  489    )
      490 +
 501  491    NOT_PRODUCT(
 502  492    size_t             _processed;
 503  493    size_t             _removed;
 504  494    )
 505  495  };
 506  496  
 507  497  inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList&    refs_list,
 508  498                                                        OopClosure*        keep_alive,
 509      -                                                      BoolObjectClosure* is_alive)
 510      -  : _refs_list(refs_list),
      499 +                                                      BoolObjectClosure* is_alive) :
      500 +    _refs_list(refs_list),
 511  501      _prev_next(refs_list.adr_head()),
 512  502      _ref(refs_list.head()),
 513  503  #ifdef ASSERT
 514  504      _first_seen(refs_list.head()),
 515  505  #endif
 516  506  #ifndef PRODUCT
 517  507      _processed(0),
 518  508      _removed(0),
 519  509  #endif
 520  510      _next(refs_list.head()),
↓ open down ↓ 217 lines elided ↑ open up ↑
 738  728      oop discovered = java_lang_ref_Reference::discovered(obj);
 739  729      java_lang_ref_Reference::set_discovered_raw(obj, NULL);
 740  730      obj = discovered;
 741  731    }
 742  732    refs_list.set_head(sentinel_ref());
 743  733    refs_list.set_length(0);
 744  734  }
 745  735  
 746  736  void ReferenceProcessor::abandon_partial_discovery() {
 747  737    // loop over the lists
 748      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      738 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 749  739      if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 750      -      gclog_or_tty->print_cr("\nAbandoning %s discovered list",
 751      -                             list_name(i));
      740 +      gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
 752  741      }
 753  742      abandon_partial_discovered_list(_discoveredSoftRefs[i]);
 754  743    }
 755  744  }
 756  745  
 757  746  class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
 758  747  public:
 759  748    RefProcPhase1Task(ReferenceProcessor& ref_processor,
 760  749                      DiscoveredList      refs_lists[],
 761  750                      ReferencePolicy*    policy,
↓ open down ↓ 98 lines elided ↑ open up ↑
 860  849                                avg_refs - ref_lists[to_idx].length());
 861  850          }
 862  851          oop move_head = ref_lists[from_idx].head();
 863  852          oop move_tail = move_head;
 864  853          oop new_head  = move_head;
 865  854          // find an element to split the list on
 866  855          for (size_t j = 0; j < refs_to_move; ++j) {
 867  856            move_tail = new_head;
 868  857            new_head = java_lang_ref_Reference::discovered(new_head);
 869  858          }
 870      -        java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
      859 +
      860 +        if (_discovered_list_needs_barrier) {
      861 +          java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
      862 +        } else {
      863 +          HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(move_tail);
      864 +          oop_store_raw(discovered_addr, ref_lists[to_idx].head());
      865 +        }
      866 +          
 871  867          ref_lists[to_idx].set_head(move_head);
 872  868          ref_lists[to_idx].inc_length(refs_to_move);
 873  869          ref_lists[from_idx].set_head(new_head);
 874  870          ref_lists[from_idx].dec_length(refs_to_move);
 875  871          if (ref_lists[from_idx].length() == 0) {
 876  872            break;
 877  873          }
 878  874        } else {
 879  875          to_idx = (to_idx + 1) % _num_q;
 880  876        }
↓ open down ↓ 93 lines elided ↑ open up ↑
 974  970      for (int i = 0; i < _max_num_q; i++) {
 975  971        process_phase3(refs_lists[i], clear_referent,
 976  972                       is_alive, keep_alive, complete_gc);
 977  973      }
 978  974    }
 979  975  }
 980  976  
 981  977  void ReferenceProcessor::clean_up_discovered_references() {
 982  978    // loop over the lists
 983  979    // Should this instead be
 984      -  // for (int i = 0; i < subclasses_of_ref; i++_ {
      980 +  // for (int i = 0; i < subclasses_of_ref(); i++) {
 985  981    //   for (int j = 0; j < _num_q; j++) {
 986  982    //     int index = i * _max_num_q + j;
 987      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
      983 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
 988  984      if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 989  985        gclog_or_tty->print_cr(
 990  986          "\nScrubbing %s discovered list of Null referents",
 991  987          list_name(i));
 992  988      }
 993  989      clean_up_discovered_reflist(_discoveredSoftRefs[i]);
 994  990    }
 995  991  }
 996  992  
 997  993  void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
↓ open down ↓ 200 lines elided ↑ open up ↑
1198 1194      // can mark through them now, rather than delaying that
1199 1195      // to the reference-processing phase. Since all current
1200 1196      // time-stamp policies advance the soft-ref clock only
1201 1197      // at a major collection cycle, this is always currently
1202 1198      // accurate.
1203 1199      if (!_current_soft_ref_policy->should_clear_reference(obj)) {
1204 1200        return false;
1205 1201      }
1206 1202    }
1207 1203  
     1204 +  ResourceMark rm;      // Needed for tracing.
     1205 +
1208 1206    HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1209 1207    const oop  discovered = java_lang_ref_Reference::discovered(obj);
1210 1208    assert(discovered->is_oop_or_null(), "bad discovered field");
1211 1209    if (discovered != NULL) {
1212 1210      // The reference has already been discovered...
1213 1211      if (TraceReferenceGC) {
1214 1212        gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
1215 1213                               obj, obj->blueprint()->internal_name());
1216 1214      }
1217 1215      if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
↓ open down ↓ 194 lines elided ↑ open up ↑
1412 1410    NOT_PRODUCT(
1413 1411      if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
1414 1412        gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
1415 1413          "Refs in discovered list " INTPTR_FORMAT,
1416 1414          iter.removed(), iter.processed(), (address)refs_list.head());
1417 1415      }
1418 1416    )
1419 1417  }
1420 1418  
1421 1419  const char* ReferenceProcessor::list_name(int i) {
1422      -   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
     1420 +   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref(), "Out of bounds index");
1423 1421     int j = i / _max_num_q;
1424 1422     switch (j) {
1425 1423       case 0: return "SoftRef";
1426 1424       case 1: return "WeakRef";
1427 1425       case 2: return "FinalRef";
1428 1426       case 3: return "PhantomRef";
1429 1427     }
1430 1428     ShouldNotReachHere();
1431 1429     return NULL;
1432 1430  }
↓ open down ↓ 4 lines elided ↑ open up ↑
1437 1435  }
1438 1436  #endif
1439 1437  
1440 1438  void ReferenceProcessor::verify() {
1441 1439    guarantee(sentinel_ref() != NULL && sentinel_ref()->is_oop(), "Lost _sentinelRef");
1442 1440  }
1443 1441  
1444 1442  #ifndef PRODUCT
1445 1443  void ReferenceProcessor::clear_discovered_references() {
1446 1444    guarantee(!_discovering_refs, "Discovering refs?");
1447      -  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
     1445 +  for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
1448 1446      oop obj = _discoveredSoftRefs[i].head();
1449 1447      while (obj != sentinel_ref()) {
1450 1448        oop next = java_lang_ref_Reference::discovered(obj);
1451 1449        java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
1452 1450        obj = next;
1453 1451      }
1454 1452      _discoveredSoftRefs[i].set_head(sentinel_ref());
1455 1453      _discoveredSoftRefs[i].set_length(0);
1456 1454    }
1457 1455  }
1458 1456  #endif // PRODUCT
    
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX