< prev index next >

src/hotspot/share/gc/shared/referenceProcessor.cpp

Print this page
rev 49944 : imported patch 8201492-properly-implement-non-contiguous-reference-processing
rev 49945 : imported patch 8201492-stefanj-review


 115   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 116             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 117 
 118   if (_discovered_refs == NULL) {
 119     vm_exit_during_initialization("Could not allocated RefProc Array");
 120   }
 121   _discoveredSoftRefs    = &_discovered_refs[0];
 122   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 123   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 124   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 125 
 126   // Initialize all entries to NULL
 127   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 128     _discovered_refs[i].set_head(NULL);
 129     _discovered_refs[i].set_length(0);
 130   }
 131 
 132   setup_policy(false /* default soft ref policy */);
 133 }
 134 
 135 SpanReferenceProcessor::SpanReferenceProcessor(MemRegion span,
 136                                                          bool      mt_processing,
 137                                                          uint      mt_processing_degree,
 138                                                          bool      mt_discovery,
 139                                                          uint      mt_discovery_degree,
 140                                                          bool      atomic_discovery,
 141                                                          BoolObjectClosure* is_alive_non_header)  :
 142   ReferenceProcessor(&_span_based_discoverer,
 143                      mt_processing,
 144                      mt_processing_degree,
 145                      mt_discovery,
 146                      mt_discovery_degree,
 147                      atomic_discovery,
 148                      is_alive_non_header),
 149   _span_based_discoverer(span) {
 150 
 151 }
 152 
 153 #ifndef PRODUCT
 154 void ReferenceProcessor::verify_no_references_recorded() {
 155   guarantee(!_discovering_refs, "Discovering refs?");
 156   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 157     guarantee(_discovered_refs[i].is_empty(),
 158               "Found non-empty discovered list at %u", i);
 159   }
 160 }
 161 #endif
 162 
 163 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
 164   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 165     if (UseCompressedOops) {
 166       f->do_oop((narrowOop*)_discovered_refs[i].adr_head());
 167     } else {
 168       f->do_oop((oop*)_discovered_refs[i].adr_head());
 169     }
 170   }
 171 }
 172 


 957     // The reference has already been discovered...
 958     log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
 959                                p2i(obj), obj->klass()->internal_name());
 960   }
 961 }
 962 
 963 #ifndef PRODUCT
 964 // Non-atomic (i.e. concurrent) discovery might allow us
 965 // to observe j.l.References with NULL referents, being those
 966 // cleared concurrently by mutators during (or after) discovery.
 967 void ReferenceProcessor::verify_referent(oop obj) {
 968   bool da = discovery_is_atomic();
 969   oop referent = java_lang_ref_Reference::referent(obj);
 970   assert(da ? oopDesc::is_oop(referent) : oopDesc::is_oop_or_null(referent),
 971          "Bad referent " INTPTR_FORMAT " found in Reference "
 972          INTPTR_FORMAT " during %satomic discovery ",
 973          p2i(referent), p2i(obj), da ? "" : "non-");
 974 }
 975 #endif
 976 
 977 template <class T>
 978 bool ReferenceProcessor::is_subject_to_discovery(T const obj) const {
 979   return _is_subject_to_discovery->do_object_b(obj);
 980 }
 981 
 982 // We mention two of several possible choices here:
 983 // #0: if the reference object is not in the "originating generation"
 984 //     (or part of the heap being collected, indicated by our "span"
 985 //     we don't treat it specially (i.e. we scan it as we would
 986 //     a normal oop, treating its references as strong references).
 987 //     This means that references can't be discovered unless their
 988 //     referent is also in the same span. This is the simplest,
 989 //     most "local" and most conservative approach, albeit one
 990 //     that may cause weak references to be enqueued least promptly.
 991 //     We call this choice the "ReferenceBasedDiscovery" policy.
 992 // #1: the reference object may be in any generation (span), but if
 993 //     the referent is in the generation (span) being currently collected
 994 //     then we can discover the reference object, provided
 995 //     the object has not already been discovered by
 996 //     a different concurrently running collector (as may be the
 997 //     case, for instance, if the reference object is in CMS and
 998 //     the referent in DefNewGeneration), and provided the processing




 115   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 116             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 117 
 118   if (_discovered_refs == NULL) {
 119     vm_exit_during_initialization("Could not allocated RefProc Array");
 120   }
 121   _discoveredSoftRefs    = &_discovered_refs[0];
 122   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 123   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 124   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 125 
 126   // Initialize all entries to NULL
 127   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 128     _discovered_refs[i].set_head(NULL);
 129     _discovered_refs[i].set_length(0);
 130   }
 131 
 132   setup_policy(false /* default soft ref policy */);
 133 }
 134 


















 135 #ifndef PRODUCT
 136 void ReferenceProcessor::verify_no_references_recorded() {
 137   guarantee(!_discovering_refs, "Discovering refs?");
 138   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 139     guarantee(_discovered_refs[i].is_empty(),
 140               "Found non-empty discovered list at %u", i);
 141   }
 142 }
 143 #endif
 144 
 145 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
 146   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 147     if (UseCompressedOops) {
 148       f->do_oop((narrowOop*)_discovered_refs[i].adr_head());
 149     } else {
 150       f->do_oop((oop*)_discovered_refs[i].adr_head());
 151     }
 152   }
 153 }
 154 


 939     // The reference has already been discovered...
 940     log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
 941                                p2i(obj), obj->klass()->internal_name());
 942   }
 943 }
 944 
 945 #ifndef PRODUCT
 946 // Non-atomic (i.e. concurrent) discovery might allow us
 947 // to observe j.l.References with NULL referents, being those
 948 // cleared concurrently by mutators during (or after) discovery.
 949 void ReferenceProcessor::verify_referent(oop obj) {
 950   bool da = discovery_is_atomic();
 951   oop referent = java_lang_ref_Reference::referent(obj);
 952   assert(da ? oopDesc::is_oop(referent) : oopDesc::is_oop_or_null(referent),
 953          "Bad referent " INTPTR_FORMAT " found in Reference "
 954          INTPTR_FORMAT " during %satomic discovery ",
 955          p2i(referent), p2i(obj), da ? "" : "non-");
 956 }
 957 #endif
 958 
 959 bool ReferenceProcessor::is_subject_to_discovery(oop const obj) const {

 960   return _is_subject_to_discovery->do_object_b(obj);
 961 }
 962 
 963 // We mention two of several possible choices here:
 964 // #0: if the reference object is not in the "originating generation"
 965 //     (or part of the heap being collected, indicated by our "span"
 966 //     we don't treat it specially (i.e. we scan it as we would
 967 //     a normal oop, treating its references as strong references).
 968 //     This means that references can't be discovered unless their
 969 //     referent is also in the same span. This is the simplest,
 970 //     most "local" and most conservative approach, albeit one
 971 //     that may cause weak references to be enqueued least promptly.
 972 //     We call this choice the "ReferenceBasedDiscovery" policy.
 973 // #1: the reference object may be in any generation (span), but if
 974 //     the referent is in the generation (span) being currently collected
 975 //     then we can discover the reference object, provided
 976 //     the object has not already been discovered by
 977 //     a different concurrently running collector (as may be the
 978 //     case, for instance, if the reference object is in CMS and
 979 //     the referent in DefNewGeneration), and provided the processing


< prev index next >