--- old/src/hotspot/share/gc/shared/referenceProcessor.cpp 2018-04-17 15:16:33.537882111 +0200 +++ new/src/hotspot/share/gc/shared/referenceProcessor.cpp 2018-04-17 15:16:33.261873620 +0200 @@ -92,20 +92,22 @@ _discovering_refs = true; } -ReferenceProcessor::ReferenceProcessor(MemRegion span, +ReferenceProcessor::ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery, bool mt_processing, uint mt_processing_degree, bool mt_discovery, uint mt_discovery_degree, bool atomic_discovery, BoolObjectClosure* is_alive_non_header) : + _is_subject_to_discovery(is_subject_to_discovery), _discovering_refs(false), _enqueuing_is_done(false), _is_alive_non_header(is_alive_non_header), _processing_is_mt(mt_processing), _next_id(0) { - _span = span; + assert(is_subject_to_discovery != NULL, "must be set"); + _discovery_is_atomic = atomic_discovery; _discovery_is_mt = mt_discovery; _num_q = MAX2(1U, mt_processing_degree); @@ -130,6 +132,24 @@ setup_policy(false /* default soft ref policy */); } +SpanReferenceProcessor::SpanReferenceProcessor(MemRegion span, + bool mt_processing, + uint mt_processing_degree, + bool mt_discovery, + uint mt_discovery_degree, + bool atomic_discovery, + BoolObjectClosure* is_alive_non_header) : + ReferenceProcessor(&_span_based_discoverer, + mt_processing, + mt_processing_degree, + mt_discovery, + mt_discovery_degree, + atomic_discovery, + is_alive_non_header), + _span_based_discoverer(span) { + +} + #ifndef PRODUCT void ReferenceProcessor::verify_no_references_recorded() { guarantee(!_discovering_refs, "Discovering refs?"); @@ -449,6 +469,19 @@ iter.removed(), iter.processed(), p2i(&refs_list)); } +void ReferenceProcessor::process_phase2(DiscoveredList& refs_list, + BoolObjectClosure* is_alive, + OopClosure* keep_alive, + VoidClosure* complete_gc) { + if (discovery_is_atomic()) { + // complete_gc is ignored in this case for this phase + pp2_work(refs_list, is_alive, keep_alive); + } else { + assert(complete_gc != NULL, "Error"); + pp2_work_concurrent_discovery(refs_list, is_alive, + keep_alive, complete_gc); + } +} // Traverse the list and remove any Refs that are not active, or // whose referents are either alive or NULL. void @@ -941,6 +974,11 @@ } #endif +template +bool ReferenceProcessor::is_subject_to_discovery(T const obj) const { + return _is_subject_to_discovery->do_object_b(obj); +} + // We mention two of several possible choices here: // #0: if the reference object is not in the "originating generation" // (or part of the heap being collected, indicated by our "span" @@ -978,9 +1016,8 @@ return false; } - HeapWord* obj_addr = (HeapWord*)obj; if (RefDiscoveryPolicy == ReferenceBasedDiscovery && - !_span.contains(obj_addr)) { + !is_subject_to_discovery(obj)) { // Reference is not in the originating generation; // don't treat it specially (i.e. we want to scan it as a normal // object with strong references). @@ -1039,16 +1076,15 @@ // Discover if and only if EITHER: // .. reference is in our span, OR // .. we are an atomic collector and referent is in our span - if (_span.contains(obj_addr) || + if (is_subject_to_discovery(obj) || (discovery_is_atomic() && - _span.contains(java_lang_ref_Reference::referent(obj)))) { - // should_enqueue = true; + is_subject_to_discovery(java_lang_ref_Reference::referent(obj)))) { } else { return false; } } else { assert(RefDiscoveryPolicy == ReferenceBasedDiscovery && - _span.contains(obj_addr), "code inconsistency"); + is_subject_to_discovery(obj), "code inconsistency"); } // Get the right type of discovered queue head.