src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp
Print this page
rev 6719 : imported patch fast-reclaim-main-patch
rev 6720 : imported patch fast-reclaim-alt1
rev 6721 : [mq]: fixes1
*** 2179,2188 ****
--- 2179,2189 ----
G1StringDedup::stop();
}
}
void G1CollectedHeap::clear_humongous_is_live_table() {
+ guarantee(G1ReclaimDeadHumongousObjectsAtYoungGC, "Should only be called if true");
_humongous_is_live.clear();
}
size_t G1CollectedHeap::conservative_max_heap_alignment() {
return HeapRegion::max_region_size();
*** 3690,3703 ****
if (G1SummarizeRSetStats && (G1SummarizeRSetStatsPeriod > 0) &&
(total_collections() % G1SummarizeRSetStatsPeriod == 0)) {
g1_rem_set()->print_periodic_summary_info("Before GC RS summary");
}
-
- if (G1ReclaimDeadHumongousObjectsAtYoungGC) {
- clear_humongous_is_live_table();
- }
}
void G1CollectedHeap::gc_epilogue(bool full /* Ignored */) {
if (G1SummarizeRSetStats &&
--- 3691,3700 ----
*** 3773,3783 ****
size_t G1CollectedHeap::cards_scanned() {
return g1_rem_set()->cardsScanned();
}
! bool G1CollectedHeap::humongous_region_is_always_live(HeapRegion* region) {
assert(region->startsHumongous(), "Must start a humongous object");
return oop(region->bottom())->is_objArray() || !region->rem_set()->is_empty();
}
class RegisterHumongousWithInCSetFastTestClosure : public HeapRegionClosure {
--- 3770,3781 ----
size_t G1CollectedHeap::cards_scanned() {
return g1_rem_set()->cardsScanned();
}
! bool G1CollectedHeap::humongous_region_is_always_live(uint index) {
! HeapRegion* region = region_at(index);
assert(region->startsHumongous(), "Must start a humongous object");
return oop(region->bottom())->is_objArray() || !region->rem_set()->is_empty();
}
class RegisterHumongousWithInCSetFastTestClosure : public HeapRegionClosure {
*** 3792,3807 ****
if (!r->startsHumongous()) {
return false;
}
G1CollectedHeap* g1h = G1CollectedHeap::heap();
! bool is_candidate = !g1h->humongous_region_is_always_live(r);
if (is_candidate) {
! // Do not even try to reclaim a humongous object that we already know will
! // not be treated as live later. A young collection will not decrease the
! // amount of remembered set entries for that region.
! g1h->register_humongous_region_with_in_cset_fast_test(r->hrs_index());
_candidate_humongous++;
}
_total_humongous++;
return false;
--- 3790,3806 ----
if (!r->startsHumongous()) {
return false;
}
G1CollectedHeap* g1h = G1CollectedHeap::heap();
! uint region_idx = r->hrs_index();
! bool is_candidate = !g1h->humongous_region_is_always_live(region_idx);
! // Is_candidate already filters out humongous regions with some remembered set.
! // This will not lead to humongous object that we mistakenly keep alive because
! // during young collection the remembered sets will only be added to.
if (is_candidate) {
! g1h->register_humongous_region_with_in_cset_fast_test(region_idx);
_candidate_humongous++;
}
_total_humongous++;
return false;
*** 3810,3824 ****
--- 3809,3832 ----
size_t total_humongous() const { return _total_humongous; }
size_t candidate_humongous() const { return _candidate_humongous; }
};
void G1CollectedHeap::register_humongous_regions_with_in_cset_fast_test() {
+ if (!G1ReclaimDeadHumongousObjectsAtYoungGC) {
+ g1_policy()->phase_times()->record_fast_reclaim_humongous_stats(0, 0);
+ return;
+ }
+
RegisterHumongousWithInCSetFastTestClosure cl;
heap_region_iterate(&cl);
g1_policy()->phase_times()->record_fast_reclaim_humongous_stats(cl.total_humongous(),
cl.candidate_humongous());
_has_humongous_reclaim_candidates = cl.candidate_humongous() > 0;
+
+ if (_has_humongous_reclaim_candidates) {
+ clear_humongous_is_live_table();
+ }
}
void
G1CollectedHeap::setup_surviving_young_words() {
assert(_surviving_young_words == NULL, "pre-condition");
*** 4104,4116 ****
g1_policy()->print_collection_set(g1_policy()->inc_cset_head(), gclog_or_tty);
#endif // YOUNG_LIST_VERBOSE
g1_policy()->finalize_cset(target_pause_time_ms, evacuation_info);
- if (G1ReclaimDeadHumongousObjectsAtYoungGC) {
register_humongous_regions_with_in_cset_fast_test();
- }
_cm->note_start_of_gc();
// We should not verify the per-thread SATB buffers given that
// we have not filtered them yet (we'll do so during the
// GC). We also call this after finalize_cset() to
--- 4112,4122 ----
*** 4158,4170 ****
false /* verify_enqueued_buffers */,
true /* verify_thread_buffers */,
true /* verify_fingers */);
free_collection_set(g1_policy()->collection_set(), evacuation_info);
! if (G1ReclaimDeadHumongousObjectsAtYoungGC && _has_humongous_reclaim_candidates) {
eagerly_reclaim_humongous_regions();
! }
g1_policy()->clear_collection_set();
cleanup_surviving_young_words();
// Start a new incremental collection set for the next pause.
--- 4164,4176 ----
false /* verify_enqueued_buffers */,
true /* verify_thread_buffers */,
true /* verify_fingers */);
free_collection_set(g1_policy()->collection_set(), evacuation_info);
!
eagerly_reclaim_humongous_regions();
!
g1_policy()->clear_collection_set();
cleanup_surviving_young_words();
// Start a new incremental collection set for the next pause.
*** 4660,4693 ****
}
oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);
assert(_worker_id == _par_scan_state->queue_num(), "sanity");
- bool needs_marking = true;
! if (_g1->is_in_cset_or_humongous(obj)) {
oop forwardee;
if (obj->is_forwarded()) {
forwardee = obj->forwardee();
} else {
forwardee = _par_scan_state->copy_to_survivor_space(obj);
}
! if (forwardee != NULL) {
oopDesc::encode_store_heap_oop(p, forwardee);
if (do_mark_object != G1MarkNone && forwardee != obj) {
// If the object is self-forwarded we don't need to explicitly
// mark it, the evacuation failure protocol will do so.
mark_forwarded_object(obj, forwardee);
}
if (barrier == G1BarrierKlass) {
do_klass_barrier(p, forwardee);
}
! needs_marking = false;
! }
}
- if (needs_marking) {
// The object is not in collection set. If we're a root scanning
// closure during an initial mark pause then attempt to mark the object.
if (do_mark_object == G1MarkFromRoot) {
mark_object(obj);
}
--- 4666,4700 ----
}
oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);
assert(_worker_id == _par_scan_state->queue_num(), "sanity");
! G1FastCSetBiasedMappedArray::in_cset_state_t state = _g1->in_cset_state(obj);
!
! if (state == G1FastCSetBiasedMappedArray::InCSet) {
oop forwardee;
if (obj->is_forwarded()) {
forwardee = obj->forwardee();
} else {
forwardee = _par_scan_state->copy_to_survivor_space(obj);
}
! assert(forwardee != NULL, "forwardee should not be NULL");
oopDesc::encode_store_heap_oop(p, forwardee);
if (do_mark_object != G1MarkNone && forwardee != obj) {
// If the object is self-forwarded we don't need to explicitly
// mark it, the evacuation failure protocol will do so.
mark_forwarded_object(obj, forwardee);
}
if (barrier == G1BarrierKlass) {
do_klass_barrier(p, forwardee);
}
! } else {
! if (state == G1FastCSetBiasedMappedArray::IsHumongous) {
! _g1->set_humongous_is_live(obj);
}
// The object is not in collection set. If we're a root scanning
// closure during an initial mark pause then attempt to mark the object.
if (do_mark_object == G1MarkFromRoot) {
mark_object(obj);
}
*** 5510,5527 ****
G1KeepAliveClosure(G1CollectedHeap* g1) : _g1(g1) {}
void do_oop(narrowOop* p) { guarantee(false, "Not needed"); }
void do_oop(oop* p) {
oop obj = *p;
! if (obj == NULL || !_g1->is_in_cset_or_humongous(obj)) {
return;
}
! if (_g1->is_in_cset(obj)) {
assert( obj->is_forwarded(), "invariant" );
*p = obj->forwardee();
} else {
assert(!obj->is_forwarded(), "invariant" );
_g1->set_humongous_is_live(obj);
}
}
};
--- 5517,5537 ----
G1KeepAliveClosure(G1CollectedHeap* g1) : _g1(g1) {}
void do_oop(narrowOop* p) { guarantee(false, "Not needed"); }
void do_oop(oop* p) {
oop obj = *p;
! G1FastCSetBiasedMappedArray::in_cset_state_t cset_state = _g1->in_cset_state(obj);
! if (obj == NULL || cset_state == G1FastCSetBiasedMappedArray::InNeither) {
return;
}
! if (cset_state == G1FastCSetBiasedMappedArray::InCSet) {
assert( obj->is_forwarded(), "invariant" );
*p = obj->forwardee();
} else {
assert(!obj->is_forwarded(), "invariant" );
+ assert(cset_state == G1FastCSetBiasedMappedArray::IsHumongous,
+ err_msg("Only allowed InCSet state is IsHumongous, but is %d", cset_state));
_g1->set_humongous_is_live(obj);
}
}
};
*** 6548,6568 ****
// - they would also pose considerable effort for cleaning up the the remembered
// sets.
// While this cleanup is not strictly necessary to be done (or done instantly),
// given that their occurrence is very low, this saves us this additional
// complexity.
! if (g1h->humongous_is_live(r->hrs_index()) ||
! g1h->humongous_region_is_always_live(r)) {
if (G1TraceReclaimDeadHumongousObjectsAtYoungGC) {
gclog_or_tty->print_cr("Live humongous %d region %d with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is dead-bitmap %d live-other %d obj array %d",
r->isHumongous(),
! r->hrs_index(),
r->rem_set()->occupied(),
r->rem_set()->strong_code_roots_list_length(),
g1h->mark_in_progress() && !g1h->g1_policy()->during_initial_mark_pause(),
! g1h->humongous_is_live(r->hrs_index()),
oop(r->bottom())->is_objArray()
);
}
return false;
--- 6558,6579 ----
// - they would also pose considerable effort for cleaning up the the remembered
// sets.
// While this cleanup is not strictly necessary to be done (or done instantly),
// given that their occurrence is very low, this saves us this additional
// complexity.
! uint region_idx = r->hrs_index();
! if (g1h->humongous_is_live(region_idx) ||
! g1h->humongous_region_is_always_live(region_idx)) {
if (G1TraceReclaimDeadHumongousObjectsAtYoungGC) {
gclog_or_tty->print_cr("Live humongous %d region %d with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is dead-bitmap %d live-other %d obj array %d",
r->isHumongous(),
! region_idx,
r->rem_set()->occupied(),
r->rem_set()->strong_code_roots_list_length(),
g1h->mark_in_progress() && !g1h->g1_policy()->during_initial_mark_pause(),
! g1h->humongous_is_live(region_idx),
oop(r->bottom())->is_objArray()
);
}
return false;
*** 6574,6589 ****
if (G1TraceReclaimDeadHumongousObjectsAtYoungGC) {
gclog_or_tty->print_cr("Reclaim humongous region %d start "PTR_FORMAT" region %d length "UINT32_FORMAT" with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is dead-bitmap %d live-other %d obj array %d",
r->isHumongous(),
r->bottom(),
! r->hrs_index(),
r->region_num(),
r->rem_set()->occupied(),
r->rem_set()->strong_code_roots_list_length(),
g1h->mark_in_progress() && !g1h->g1_policy()->during_initial_mark_pause(),
! g1h->humongous_is_live(r->hrs_index()),
oop(r->bottom())->is_objArray()
);
}
_freed_bytes += r->used();
r->set_containing_set(NULL);
--- 6585,6600 ----
if (G1TraceReclaimDeadHumongousObjectsAtYoungGC) {
gclog_or_tty->print_cr("Reclaim humongous region %d start "PTR_FORMAT" region %d length "UINT32_FORMAT" with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is dead-bitmap %d live-other %d obj array %d",
r->isHumongous(),
r->bottom(),
! region_idx,
r->region_num(),
r->rem_set()->occupied(),
r->rem_set()->strong_code_roots_list_length(),
g1h->mark_in_progress() && !g1h->g1_policy()->during_initial_mark_pause(),
! g1h->humongous_is_live(region_idx),
oop(r->bottom())->is_objArray()
);
}
_freed_bytes += r->used();
r->set_containing_set(NULL);
*** 6606,6617 ****
}
};
void G1CollectedHeap::eagerly_reclaim_humongous_regions() {
assert_at_safepoint(true);
! guarantee(G1ReclaimDeadHumongousObjectsAtYoungGC, "Feature must be enabled");
! guarantee(_has_humongous_reclaim_candidates, "Should not reach here if no candidates for eager reclaim were found.");
double start_time = os::elapsedTime();
FreeRegionList local_cleanup_list("Local Humongous Cleanup List");
--- 6617,6631 ----
}
};
void G1CollectedHeap::eagerly_reclaim_humongous_regions() {
assert_at_safepoint(true);
!
! if (!G1ReclaimDeadHumongousObjectsAtYoungGC || !_has_humongous_reclaim_candidates) {
! g1_policy()->phase_times()->record_fast_reclaim_humongous_time_ms(0.0, 0);
! return;
! }
double start_time = os::elapsedTime();
FreeRegionList local_cleanup_list("Local Humongous Cleanup List");