< prev index next >

src/hotspot/share/gc/shared/referenceProcessor.cpp

Print this page
rev 49912 : imported patch 8201492-properly-implement-non-contiguous-reference-processing
rev 49913 : imported patch 8201492-stefanj-review


  75   // Verify that we're not currently discovering refs
  76   assert(!_discovering_refs, "nested call?");
  77 
  78   if (check_no_refs) {
  79     // Verify that the discovered lists are empty
  80     verify_no_references_recorded();
  81   }
  82 #endif // ASSERT
  83 
  84   // Someone could have modified the value of the static
  85   // field in the j.l.r.SoftReference class that holds the
  86   // soft reference timestamp clock using reflection or
  87   // Unsafe between GCs. Unconditionally update the static
  88   // field in ReferenceProcessor here so that we use the new
  89   // value during reference discovery.
  90 
  91   _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
  92   _discovering_refs = true;
  93 }
  94 
  95 ReferenceProcessor::ReferenceProcessor(MemRegion span,
  96                                        bool      mt_processing,
  97                                        uint      mt_processing_degree,
  98                                        bool      mt_discovery,
  99                                        uint      mt_discovery_degree,
 100                                        bool      atomic_discovery,
 101                                        BoolObjectClosure* is_alive_non_header)  :

 102   _discovering_refs(false),
 103   _enqueuing_is_done(false),
 104   _is_alive_non_header(is_alive_non_header),
 105   _processing_is_mt(mt_processing),
 106   _next_id(0)
 107 {
 108   _span = span;

 109   _discovery_is_atomic = atomic_discovery;
 110   _discovery_is_mt     = mt_discovery;
 111   _num_q               = MAX2(1U, mt_processing_degree);
 112   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 113   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 114             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 115 
 116   if (_discovered_refs == NULL) {
 117     vm_exit_during_initialization("Could not allocated RefProc Array");
 118   }
 119   _discoveredSoftRefs    = &_discovered_refs[0];
 120   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 121   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 122   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 123 
 124   // Initialize all entries to NULL
 125   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 126     _discovered_refs[i].set_head(NULL);
 127     _discovered_refs[i].set_length(0);
 128   }


 432     bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
 433     if (referent_is_dead &&
 434         !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
 435       log_develop_trace(gc, ref)("Dropping reference (" INTPTR_FORMAT ": %s"  ") by policy",
 436                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
 437       // Remove Reference object from list
 438       iter.remove();
 439       // keep the referent around
 440       iter.make_referent_alive();
 441       iter.move_to_next();
 442     } else {
 443       iter.next();
 444     }
 445   }
 446   // Close the reachable set
 447   complete_gc->do_void();
 448   log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
 449                              iter.removed(), iter.processed(), p2i(&refs_list));
 450 }
 451 













 452 // Traverse the list and remove any Refs that are not active, or
 453 // whose referents are either alive or NULL.
 454 void
 455 ReferenceProcessor::pp2_work(DiscoveredList&    refs_list,
 456                              BoolObjectClosure* is_alive,
 457                              OopClosure*        keep_alive) {
 458   assert(discovery_is_atomic(), "Error");
 459   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 460   while (iter.has_next()) {
 461     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 462     DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
 463     assert(next == NULL, "Should not discover inactive Reference");
 464     if (iter.is_referent_alive()) {
 465       log_develop_trace(gc, ref)("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
 466                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
 467       // The referent is reachable after all.
 468       // Remove Reference object from list.
 469       iter.remove();
 470       // Update the referent pointer as necessary: Note that this
 471       // should not entail any recursive marking because the


 924     // The reference has already been discovered...
 925     log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
 926                                p2i(obj), obj->klass()->internal_name());
 927   }
 928 }
 929 
 930 #ifndef PRODUCT
 931 // Non-atomic (i.e. concurrent) discovery might allow us
 932 // to observe j.l.References with NULL referents, being those
 933 // cleared concurrently by mutators during (or after) discovery.
 934 void ReferenceProcessor::verify_referent(oop obj) {
 935   bool da = discovery_is_atomic();
 936   oop referent = java_lang_ref_Reference::referent(obj);
 937   assert(da ? oopDesc::is_oop(referent) : oopDesc::is_oop_or_null(referent),
 938          "Bad referent " INTPTR_FORMAT " found in Reference "
 939          INTPTR_FORMAT " during %satomic discovery ",
 940          p2i(referent), p2i(obj), da ? "" : "non-");
 941 }
 942 #endif
 943 




 944 // We mention two of several possible choices here:
 945 // #0: if the reference object is not in the "originating generation"
 946 //     (or part of the heap being collected, indicated by our "span"
 947 //     we don't treat it specially (i.e. we scan it as we would
 948 //     a normal oop, treating its references as strong references).
 949 //     This means that references can't be discovered unless their
 950 //     referent is also in the same span. This is the simplest,
 951 //     most "local" and most conservative approach, albeit one
 952 //     that may cause weak references to be enqueued least promptly.
 953 //     We call this choice the "ReferenceBasedDiscovery" policy.
 954 // #1: the reference object may be in any generation (span), but if
 955 //     the referent is in the generation (span) being currently collected
 956 //     then we can discover the reference object, provided
 957 //     the object has not already been discovered by
 958 //     a different concurrently running collector (as may be the
 959 //     case, for instance, if the reference object is in CMS and
 960 //     the referent in DefNewGeneration), and provided the processing
 961 //     of this reference object by the current collector will
 962 //     appear atomic to every other collector in the system.
 963 //     (Thus, for instance, a concurrent collector may not
 964 //     discover references in other generations even if the
 965 //     referent is in its own generation). This policy may,
 966 //     in certain cases, enqueue references somewhat sooner than
 967 //     might Policy #0 above, but at marginally increased cost
 968 //     and complexity in processing these references.
 969 //     We call this choice the "RefeferentBasedDiscovery" policy.
 970 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
 971   // Make sure we are discovering refs (rather than processing discovered refs).
 972   if (!_discovering_refs || !RegisterReferences) {
 973     return false;
 974   }
 975   // We only discover active references.
 976   oop next = java_lang_ref_Reference::next(obj);
 977   if (next != NULL) {   // Ref is no longer active
 978     return false;
 979   }
 980 
 981   HeapWord* obj_addr = (HeapWord*)obj;
 982   if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
 983       !_span.contains(obj_addr)) {
 984     // Reference is not in the originating generation;
 985     // don't treat it specially (i.e. we want to scan it as a normal
 986     // object with strong references).
 987     return false;
 988   }
 989 
 990   // We only discover references whose referents are not (yet)
 991   // known to be strongly reachable.
 992   if (is_alive_non_header() != NULL) {
 993     verify_referent(obj);
 994     if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
 995       return false;  // referent is reachable
 996     }
 997   }
 998   if (rt == REF_SOFT) {
 999     // For soft refs we can decide now if these are not
1000     // current candidates for clearing, in which case we
1001     // can mark through them now, rather than delaying that
1002     // to the reference-processing phase. Since all current
1003     // time-stamp policies advance the soft-ref clock only


1022       // if it's been already discovered it must be on another
1023       // generation's discovered list; so we won't discover it.
1024       return false;
1025     } else {
1026       assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1027              "Unrecognized policy");
1028       // Check assumption that an object is not potentially
1029       // discovered twice except by concurrent collectors that potentially
1030       // trace the same Reference object twice.
1031       assert(UseConcMarkSweepGC || UseG1GC,
1032              "Only possible with a concurrent marking collector");
1033       return true;
1034     }
1035   }
1036 
1037   if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1038     verify_referent(obj);
1039     // Discover if and only if EITHER:
1040     // .. reference is in our span, OR
1041     // .. we are an atomic collector and referent is in our span
1042     if (_span.contains(obj_addr) ||
1043         (discovery_is_atomic() &&
1044          _span.contains(java_lang_ref_Reference::referent(obj)))) {
1045       // should_enqueue = true;
1046     } else {
1047       return false;
1048     }
1049   } else {
1050     assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1051            _span.contains(obj_addr), "code inconsistency");
1052   }
1053 
1054   // Get the right type of discovered queue head.
1055   DiscoveredList* list = get_discovered_list(rt);
1056   if (list == NULL) {
1057     return false;   // nothing special needs to be done
1058   }
1059 
1060   if (_discovery_is_mt) {
1061     add_to_discovered_list_mt(*list, obj, discovered_addr);
1062   } else {
1063     // We do a raw store here: the field will be visited later when processing
1064     // the discovered references.
1065     oop current_head = list->head();
1066     // The last ref must have its discovered field pointing to itself.
1067     oop next_discovered = (current_head != NULL) ? current_head : obj;
1068 
1069     assert(discovered == NULL, "control point invariant");
1070     RawAccess<>::oop_store(discovered_addr, next_discovered);
1071     list->set_head(obj);




  75   // Verify that we're not currently discovering refs
  76   assert(!_discovering_refs, "nested call?");
  77 
  78   if (check_no_refs) {
  79     // Verify that the discovered lists are empty
  80     verify_no_references_recorded();
  81   }
  82 #endif // ASSERT
  83 
  84   // Someone could have modified the value of the static
  85   // field in the j.l.r.SoftReference class that holds the
  86   // soft reference timestamp clock using reflection or
  87   // Unsafe between GCs. Unconditionally update the static
  88   // field in ReferenceProcessor here so that we use the new
  89   // value during reference discovery.
  90 
  91   _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
  92   _discovering_refs = true;
  93 }
  94 
  95 ReferenceProcessor::ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery,
  96                                        bool      mt_processing,
  97                                        uint      mt_processing_degree,
  98                                        bool      mt_discovery,
  99                                        uint      mt_discovery_degree,
 100                                        bool      atomic_discovery,
 101                                        BoolObjectClosure* is_alive_non_header)  :
 102   _is_subject_to_discovery(is_subject_to_discovery),
 103   _discovering_refs(false),
 104   _enqueuing_is_done(false),
 105   _is_alive_non_header(is_alive_non_header),
 106   _processing_is_mt(mt_processing),
 107   _next_id(0)
 108 {
 109   assert(is_subject_to_discovery != NULL, "must be set");
 110 
 111   _discovery_is_atomic = atomic_discovery;
 112   _discovery_is_mt     = mt_discovery;
 113   _num_q               = MAX2(1U, mt_processing_degree);
 114   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 115   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 116             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 117 
 118   if (_discovered_refs == NULL) {
 119     vm_exit_during_initialization("Could not allocated RefProc Array");
 120   }
 121   _discoveredSoftRefs    = &_discovered_refs[0];
 122   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 123   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 124   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 125 
 126   // Initialize all entries to NULL
 127   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 128     _discovered_refs[i].set_head(NULL);
 129     _discovered_refs[i].set_length(0);
 130   }


 434     bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
 435     if (referent_is_dead &&
 436         !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
 437       log_develop_trace(gc, ref)("Dropping reference (" INTPTR_FORMAT ": %s"  ") by policy",
 438                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
 439       // Remove Reference object from list
 440       iter.remove();
 441       // keep the referent around
 442       iter.make_referent_alive();
 443       iter.move_to_next();
 444     } else {
 445       iter.next();
 446     }
 447   }
 448   // Close the reachable set
 449   complete_gc->do_void();
 450   log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
 451                              iter.removed(), iter.processed(), p2i(&refs_list));
 452 }
 453 
 454 void ReferenceProcessor::process_phase2(DiscoveredList&    refs_list,
 455                                         BoolObjectClosure* is_alive,
 456                                         OopClosure*        keep_alive,
 457                                         VoidClosure*       complete_gc) {
 458   if (discovery_is_atomic()) {
 459     // complete_gc is ignored in this case for this phase
 460     pp2_work(refs_list, is_alive, keep_alive);
 461   } else {
 462     assert(complete_gc != NULL, "Error");
 463     pp2_work_concurrent_discovery(refs_list, is_alive,
 464                                   keep_alive, complete_gc);
 465   }
 466 }
 467 // Traverse the list and remove any Refs that are not active, or
 468 // whose referents are either alive or NULL.
 469 void
 470 ReferenceProcessor::pp2_work(DiscoveredList&    refs_list,
 471                              BoolObjectClosure* is_alive,
 472                              OopClosure*        keep_alive) {
 473   assert(discovery_is_atomic(), "Error");
 474   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 475   while (iter.has_next()) {
 476     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 477     DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
 478     assert(next == NULL, "Should not discover inactive Reference");
 479     if (iter.is_referent_alive()) {
 480       log_develop_trace(gc, ref)("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
 481                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
 482       // The referent is reachable after all.
 483       // Remove Reference object from list.
 484       iter.remove();
 485       // Update the referent pointer as necessary: Note that this
 486       // should not entail any recursive marking because the


 939     // The reference has already been discovered...
 940     log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
 941                                p2i(obj), obj->klass()->internal_name());
 942   }
 943 }
 944 
 945 #ifndef PRODUCT
 946 // Non-atomic (i.e. concurrent) discovery might allow us
 947 // to observe j.l.References with NULL referents, being those
 948 // cleared concurrently by mutators during (or after) discovery.
 949 void ReferenceProcessor::verify_referent(oop obj) {
 950   bool da = discovery_is_atomic();
 951   oop referent = java_lang_ref_Reference::referent(obj);
 952   assert(da ? oopDesc::is_oop(referent) : oopDesc::is_oop_or_null(referent),
 953          "Bad referent " INTPTR_FORMAT " found in Reference "
 954          INTPTR_FORMAT " during %satomic discovery ",
 955          p2i(referent), p2i(obj), da ? "" : "non-");
 956 }
 957 #endif
 958 
 959 bool ReferenceProcessor::is_subject_to_discovery(oop const obj) const {
 960   return _is_subject_to_discovery->do_object_b(obj);
 961 }
 962 
 963 // We mention two of several possible choices here:
 964 // #0: if the reference object is not in the "originating generation"
 965 //     (or part of the heap being collected, indicated by our "span"
 966 //     we don't treat it specially (i.e. we scan it as we would
 967 //     a normal oop, treating its references as strong references).
 968 //     This means that references can't be discovered unless their
 969 //     referent is also in the same span. This is the simplest,
 970 //     most "local" and most conservative approach, albeit one
 971 //     that may cause weak references to be enqueued least promptly.
 972 //     We call this choice the "ReferenceBasedDiscovery" policy.
 973 // #1: the reference object may be in any generation (span), but if
 974 //     the referent is in the generation (span) being currently collected
 975 //     then we can discover the reference object, provided
 976 //     the object has not already been discovered by
 977 //     a different concurrently running collector (as may be the
 978 //     case, for instance, if the reference object is in CMS and
 979 //     the referent in DefNewGeneration), and provided the processing
 980 //     of this reference object by the current collector will
 981 //     appear atomic to every other collector in the system.
 982 //     (Thus, for instance, a concurrent collector may not
 983 //     discover references in other generations even if the
 984 //     referent is in its own generation). This policy may,
 985 //     in certain cases, enqueue references somewhat sooner than
 986 //     might Policy #0 above, but at marginally increased cost
 987 //     and complexity in processing these references.
 988 //     We call this choice the "RefeferentBasedDiscovery" policy.
 989 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
 990   // Make sure we are discovering refs (rather than processing discovered refs).
 991   if (!_discovering_refs || !RegisterReferences) {
 992     return false;
 993   }
 994   // We only discover active references.
 995   oop next = java_lang_ref_Reference::next(obj);
 996   if (next != NULL) {   // Ref is no longer active
 997     return false;
 998   }
 999 

1000   if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1001       !is_subject_to_discovery(obj)) {
1002     // Reference is not in the originating generation;
1003     // don't treat it specially (i.e. we want to scan it as a normal
1004     // object with strong references).
1005     return false;
1006   }
1007 
1008   // We only discover references whose referents are not (yet)
1009   // known to be strongly reachable.
1010   if (is_alive_non_header() != NULL) {
1011     verify_referent(obj);
1012     if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
1013       return false;  // referent is reachable
1014     }
1015   }
1016   if (rt == REF_SOFT) {
1017     // For soft refs we can decide now if these are not
1018     // current candidates for clearing, in which case we
1019     // can mark through them now, rather than delaying that
1020     // to the reference-processing phase. Since all current
1021     // time-stamp policies advance the soft-ref clock only


1040       // if it's been already discovered it must be on another
1041       // generation's discovered list; so we won't discover it.
1042       return false;
1043     } else {
1044       assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1045              "Unrecognized policy");
1046       // Check assumption that an object is not potentially
1047       // discovered twice except by concurrent collectors that potentially
1048       // trace the same Reference object twice.
1049       assert(UseConcMarkSweepGC || UseG1GC,
1050              "Only possible with a concurrent marking collector");
1051       return true;
1052     }
1053   }
1054 
1055   if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1056     verify_referent(obj);
1057     // Discover if and only if EITHER:
1058     // .. reference is in our span, OR
1059     // .. we are an atomic collector and referent is in our span
1060     if (is_subject_to_discovery(obj) ||
1061         (discovery_is_atomic() &&
1062          is_subject_to_discovery(java_lang_ref_Reference::referent(obj)))) {

1063     } else {
1064       return false;
1065     }
1066   } else {
1067     assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1068            is_subject_to_discovery(obj), "code inconsistency");
1069   }
1070 
1071   // Get the right type of discovered queue head.
1072   DiscoveredList* list = get_discovered_list(rt);
1073   if (list == NULL) {
1074     return false;   // nothing special needs to be done
1075   }
1076 
1077   if (_discovery_is_mt) {
1078     add_to_discovered_list_mt(*list, obj, discovered_addr);
1079   } else {
1080     // We do a raw store here: the field will be visited later when processing
1081     // the discovered references.
1082     oop current_head = list->head();
1083     // The last ref must have its discovered field pointing to itself.
1084     oop next_discovered = (current_head != NULL) ? current_head : obj;
1085 
1086     assert(discovered == NULL, "control point invariant");
1087     RawAccess<>::oop_store(discovered_addr, next_discovered);
1088     list->set_head(obj);


< prev index next >