< prev index next >
src/hotspot/share/gc/shared/referenceProcessor.cpp
Print this page
rev 49753 : imported patch 8201487-do-not-rebalance-with-serial-processing
rev 49758 : imported patch 8201492-properly-implement-non-contiguous-reference-processing
*** 90,113 ****
_soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
_discovering_refs = true;
}
! ReferenceProcessor::ReferenceProcessor(MemRegion span,
bool mt_processing,
uint mt_processing_degree,
bool mt_discovery,
uint mt_discovery_degree,
bool atomic_discovery,
BoolObjectClosure* is_alive_non_header) :
_discovering_refs(false),
_enqueuing_is_done(false),
_is_alive_non_header(is_alive_non_header),
_processing_is_mt(mt_processing),
_next_id(0)
{
! _span = span;
_discovery_is_atomic = atomic_discovery;
_discovery_is_mt = mt_discovery;
_num_q = MAX2(1U, mt_processing_degree);
_max_num_q = MAX2(_num_q, mt_discovery_degree);
_discovered_refs = NEW_C_HEAP_ARRAY(DiscoveredList,
--- 90,115 ----
_soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
_discovering_refs = true;
}
! ReferenceProcessor::ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery,
bool mt_processing,
uint mt_processing_degree,
bool mt_discovery,
uint mt_discovery_degree,
bool atomic_discovery,
BoolObjectClosure* is_alive_non_header) :
+ _is_subject_to_discovery(is_subject_to_discovery),
_discovering_refs(false),
_enqueuing_is_done(false),
_is_alive_non_header(is_alive_non_header),
_processing_is_mt(mt_processing),
_next_id(0)
{
! assert(is_subject_to_discovery != NULL, "must be set");
!
_discovery_is_atomic = atomic_discovery;
_discovery_is_mt = mt_discovery;
_num_q = MAX2(1U, mt_processing_degree);
_max_num_q = MAX2(_num_q, mt_discovery_degree);
_discovered_refs = NEW_C_HEAP_ARRAY(DiscoveredList,
*** 128,137 ****
--- 130,157 ----
}
setup_policy(false /* default soft ref policy */);
}
+ SpanReferenceProcessor::SpanReferenceProcessor(MemRegion span,
+ bool mt_processing,
+ uint mt_processing_degree,
+ bool mt_discovery,
+ uint mt_discovery_degree,
+ bool atomic_discovery,
+ BoolObjectClosure* is_alive_non_header) :
+ ReferenceProcessor(&_span_based_discoverer,
+ mt_processing,
+ mt_processing_degree,
+ mt_discovery,
+ mt_discovery_degree,
+ atomic_discovery,
+ is_alive_non_header),
+ _span_based_discoverer(span) {
+
+ }
+
#ifndef PRODUCT
void ReferenceProcessor::verify_no_references_recorded() {
guarantee(!_discovering_refs, "Discovering refs?");
for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
guarantee(_discovered_refs[i].is_empty(),
*** 447,456 ****
--- 467,489 ----
complete_gc->do_void();
log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
iter.removed(), iter.processed(), p2i(&refs_list));
}
+ void ReferenceProcessor::process_phase2(DiscoveredList& refs_list,
+ BoolObjectClosure* is_alive,
+ OopClosure* keep_alive,
+ VoidClosure* complete_gc) {
+ if (discovery_is_atomic()) {
+ // complete_gc is ignored in this case for this phase
+ pp2_work(refs_list, is_alive, keep_alive);
+ } else {
+ assert(complete_gc != NULL, "Error");
+ pp2_work_concurrent_discovery(refs_list, is_alive,
+ keep_alive, complete_gc);
+ }
+ }
// Traverse the list and remove any Refs that are not active, or
// whose referents are either alive or NULL.
void
ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
BoolObjectClosure* is_alive,
*** 939,948 ****
--- 972,986 ----
INTPTR_FORMAT " during %satomic discovery ",
p2i(referent), p2i(obj), da ? "" : "non-");
}
#endif
+ template <class T>
+ bool ReferenceProcessor::is_subject_to_discovery(T const obj) const {
+ return _is_subject_to_discovery->do_object_b(obj);
+ }
+
// We mention two of several possible choices here:
// #0: if the reference object is not in the "originating generation"
// (or part of the heap being collected, indicated by our "span"
// we don't treat it specially (i.e. we scan it as we would
// a normal oop, treating its references as strong references).
*** 976,988 ****
oop next = java_lang_ref_Reference::next(obj);
if (next != NULL) { // Ref is no longer active
return false;
}
- HeapWord* obj_addr = (HeapWord*)obj;
if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
! !_span.contains(obj_addr)) {
// Reference is not in the originating generation;
// don't treat it specially (i.e. we want to scan it as a normal
// object with strong references).
return false;
}
--- 1014,1025 ----
oop next = java_lang_ref_Reference::next(obj);
if (next != NULL) { // Ref is no longer active
return false;
}
if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
! !is_subject_to_discovery(obj)) {
// Reference is not in the originating generation;
// don't treat it specially (i.e. we want to scan it as a normal
// object with strong references).
return false;
}
*** 1037,1056 ****
if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
verify_referent(obj);
// Discover if and only if EITHER:
// .. reference is in our span, OR
// .. we are an atomic collector and referent is in our span
! if (_span.contains(obj_addr) ||
(discovery_is_atomic() &&
! _span.contains(java_lang_ref_Reference::referent(obj)))) {
! // should_enqueue = true;
} else {
return false;
}
} else {
assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
! _span.contains(obj_addr), "code inconsistency");
}
// Get the right type of discovered queue head.
DiscoveredList* list = get_discovered_list(rt);
if (list == NULL) {
--- 1074,1092 ----
if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
verify_referent(obj);
// Discover if and only if EITHER:
// .. reference is in our span, OR
// .. we are an atomic collector and referent is in our span
! if (is_subject_to_discovery(obj) ||
(discovery_is_atomic() &&
! is_subject_to_discovery(java_lang_ref_Reference::referent(obj)))) {
} else {
return false;
}
} else {
assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
! is_subject_to_discovery(obj), "code inconsistency");
}
// Get the right type of discovered queue head.
DiscoveredList* list = get_discovered_list(rt);
if (list == NULL) {
< prev index next >