< prev index next >
src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp
Print this page
rev 59271 : 8240870: Shenandoah: merge evac and update phases
Reviewed-by: XXX
@@ -922,38 +922,93 @@
// Out of memory
return NULL;
}
+typedef Stack<oop, mtGC> ShenandoahEvacUpdateStack;
+
+class ShenandoahEvacuateContentsOopClosure : public OopIterateClosure {
+private:
+ ShenandoahHeap* const _heap;
+ ShenandoahEvacUpdateStack* const _stack;
+
+ template<class T>
+ void do_work(T* p) {
+ oop fwd = _heap->evac_update_with_forwarded(p);
+ if (fwd != NULL) {
+ _stack->push(fwd);
+ }
+ }
+
+public:
+ ShenandoahEvacuateContentsOopClosure(ShenandoahEvacUpdateStack* stack) : _heap(ShenandoahHeap::heap()), _stack(stack) {}
+
+ void do_oop(oop* p) { do_work(p); }
+ void do_oop(narrowOop* p) { do_work(p); }
+
+ virtual bool do_metadata() { return false; }
+ virtual void do_klass(Klass *k) { }
+ virtual void do_cld(ClassLoaderData *cld) { }
+};
+
+template<class T>
+oop ShenandoahHeap::evacuate_object_recursively(oop obj, T* load_addr, Thread* thread) {
+ assert(load_addr != NULL || !is_in(load_addr), "Should know the on-heap address.");
+
+ oop fwd = obj;
+ if (in_collection_set(obj)) {
+ fwd = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
+ if (obj == fwd) {
+ fwd = evacuate_object(obj, thread);
+ }
+ // TODO: note, we also filter out-of-heap stores
+ if (load_addr != NULL && fwd != obj && is_in(load_addr)) {
+ ShenandoahHeap::cas_oop(fwd, load_addr, obj);
+ }
+ }
+
+ Stack<oop, mtGC> stack;
+
+ ShenandoahEvacuateContentsOopClosure cl(&stack);
+ fwd->oop_iterate(&cl);
+
+ while (!stack.is_empty()) {
+ oop obj = stack.pop();
+ obj->oop_iterate(&cl);
+ }
+
+ return fwd;
+}
+
class ShenandoahConcurrentEvacuateRegionObjectClosure : public ObjectClosure {
private:
ShenandoahHeap* const _heap;
Thread* const _thread;
public:
ShenandoahConcurrentEvacuateRegionObjectClosure(ShenandoahHeap* heap) :
_heap(heap), _thread(Thread::current()) {}
void do_object(oop p) {
- shenandoah_assert_marked(NULL, p);
- if (!p->is_forwarded()) {
- _heap->evacuate_object(p, _thread);
- }
+ shenandoah_assert_not_in_cset(NULL, p);
+
+ // fine to pass NULL as source address here, since the object is never in collection set
+ _heap->evacuate_object_recursively(p, (oop*)NULL, _thread);
}
};
class ShenandoahEvacuationTask : public AbstractGangTask {
private:
ShenandoahHeap* const _sh;
- ShenandoahCollectionSet* const _cs;
+ ShenandoahRegionIterator* const _regions;
bool _concurrent;
public:
ShenandoahEvacuationTask(ShenandoahHeap* sh,
- ShenandoahCollectionSet* cs,
+ ShenandoahRegionIterator* regions,
bool concurrent) :
AbstractGangTask("Parallel Evacuation Task"),
_sh(sh),
- _cs(cs),
+ _regions(regions),
_concurrent(concurrent)
{}
void work(uint worker_id) {
if (_concurrent) {
@@ -969,22 +1024,35 @@
}
private:
void do_work() {
ShenandoahConcurrentEvacuateRegionObjectClosure cl(_sh);
- ShenandoahHeapRegion* r;
- while ((r =_cs->claim_next()) != NULL) {
- assert(r->has_live(), "Region " SIZE_FORMAT " should have been reclaimed early", r->index());
- _sh->marked_object_iterate(r, &cl);
+ ShenandoahHeapRegion *r = _regions->next();
+ ShenandoahMarkingContext *const ctx = _sh->complete_marking_context();
+ while (r != NULL) {
+ HeapWord* update_watermark = r->get_update_watermark();
+ assert (update_watermark >= r->bottom(), "sanity");
+ if (r->is_humongous()) {
+ if (r->is_humongous_start()) {
+ HeapWord* bottom = r->bottom();
+ oop obj = oop(bottom);
+ if (update_watermark > bottom && ctx->is_marked(obj)) {
+ cl.do_object(obj);
+ }
+ }
+ } else if (r->is_active() && !r->is_cset()) {
+ _sh->marked_object_iterate(r, &cl, update_watermark);
+ }
+ r->set_update_watermark(r->bottom());
if (ShenandoahPacing) {
- _sh->pacer()->report_evac(r->used() >> LogHeapWordSize);
+ _sh->pacer()->report_evac_update(pointer_delta(update_watermark, r->bottom()));
}
-
if (_sh->check_cancelled_gc_and_yield(_concurrent)) {
break;
}
+ r = _regions->next();
}
}
};
void ShenandoahHeap::trash_cset_regions() {
@@ -1568,16 +1636,15 @@
if (!is_degenerated_gc_in_progress()) {
if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
ShenandoahCodeRoots::arm_nmethods();
}
- evacuate_and_update_roots();
}
- if (ShenandoahPacing) {
- pacer()->setup_for_evac();
- }
+ // Need to evac all roots, because we would not visit them in collection set
+ // with evac-update otherwise.
+ evacuate_and_update_roots();
if (ShenandoahVerify) {
ShenandoahRootVerifier::RootTypes types = ShenandoahRootVerifier::None;
if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
types = ShenandoahRootVerifier::combine(ShenandoahRootVerifier::JNIHandleRoots, ShenandoahRootVerifier::WeakRoots);
@@ -1589,10 +1656,15 @@
types = ShenandoahRootVerifier::combine(types, ShenandoahRootVerifier::CodeRoots);
}
verifier()->verify_roots_no_forwarded_except(types);
verifier()->verify_during_evacuation();
}
+
+ if (ShenandoahPacing) {
+ pacer()->setup_for_evac_update(); // TODO: Really?
+ }
+
} else {
if (ShenandoahVerify) {
verifier()->verify_after_concmark();
}
@@ -1615,24 +1687,20 @@
rp->verify_no_references_recorded();
}
}
}
-void ShenandoahHeap::op_conc_evac() {
- ShenandoahEvacuationTask task(this, _collection_set, true);
+void ShenandoahHeap::op_conc_evac_update() {
+ ShenandoahEvacuationTask task(this, &_update_refs_iterator,true);
workers()->run_task(&task);
}
-void ShenandoahHeap::op_stw_evac() {
- ShenandoahEvacuationTask task(this, _collection_set, false);
+void ShenandoahHeap::op_stw_evac_update() {
+ ShenandoahEvacuationTask task(this, &_update_refs_iterator, false);
workers()->run_task(&task);
}
-void ShenandoahHeap::op_updaterefs() {
- update_heap_references(true);
-}
-
void ShenandoahHeap::op_cleanup_early() {
free_set()->recycle_trash();
}
void ShenandoahHeap::op_cleanup_complete() {
@@ -1710,11 +1778,11 @@
_dead_counter ++;
}
} else if (_evac_in_progress && _heap->in_collection_set(obj)) {
oop resolved = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
if (resolved == obj) {
- resolved = _heap->evacuate_object(obj, _thread);
+ resolved = _heap->evacuate_object_recursively(obj, p, _thread);
}
Atomic::cmpxchg(p, obj, resolved);
assert(_heap->cancelled_gc() ||
_mark_context->is_marked(resolved) && !_heap->in_collection_set(resolved),
"Sanity");
@@ -1959,15 +2027,22 @@
ShenandoahCodeRoots::disarm_nmethods();
}
op_cleanup_early();
- case _degenerated_evac:
+ case _degenerated_evac_update:
// If heuristics thinks we should do the cycle, this flag would be set,
// and we can do evacuation. Otherwise, it would be the shortcut cycle.
if (is_evacuation_in_progress()) {
+ if (point == _degenerated_evac_update) {
+ // TODO: Cannot handle it properly yet, Full GC.
+ cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
+ op_degenerated_fail();
+ return;
+ }
+
// Degeneration under oom-evac protocol might have left some objects in
// collection set un-evacuated. Restart evacuation from the beginning to
// capture all objects. For all the objects that are already evacuated,
// it would be a simple check, which is supposed to be fast. This is also
// safe to do even without degeneration, as CSet iterator is at beginning
@@ -1994,30 +2069,19 @@
}
collection_set()->clear_current_index();
}
- op_stw_evac();
- if (cancelled_gc()) {
- op_degenerated_fail();
- return;
- }
- }
-
- // If heuristics thinks we should do the cycle, this flag would be set,
- // and we need to do update-refs. Otherwise, it would be the shortcut cycle.
- if (has_forwarded_objects()) {
- op_init_updaterefs();
+ op_init_evac_update();
if (cancelled_gc()) {
op_degenerated_fail();
return;
}
}
- case _degenerated_updaterefs:
if (has_forwarded_objects()) {
- op_final_updaterefs();
+ op_final_evac_update();
if (cancelled_gc()) {
op_degenerated_fail();
return;
}
}
@@ -2435,86 +2499,31 @@
guarantee(ShenandoahVerify, "Should be enabled");
assert (_verifier != NULL, "sanity");
return _verifier;
}
-template<class T>
-class ShenandoahUpdateHeapRefsTask : public AbstractGangTask {
-private:
- T cl;
- ShenandoahHeap* _heap;
- ShenandoahRegionIterator* _regions;
- bool _concurrent;
-public:
- ShenandoahUpdateHeapRefsTask(ShenandoahRegionIterator* regions, bool concurrent) :
- AbstractGangTask("Concurrent Update References Task"),
- cl(T()),
- _heap(ShenandoahHeap::heap()),
- _regions(regions),
- _concurrent(concurrent) {
- }
-
- void work(uint worker_id) {
- if (_concurrent) {
- ShenandoahConcurrentWorkerSession worker_session(worker_id);
- ShenandoahSuspendibleThreadSetJoiner stsj(ShenandoahSuspendibleWorkers);
- do_work();
- } else {
- ShenandoahParallelWorkerSession worker_session(worker_id);
- do_work();
- }
- }
-
-private:
- void do_work() {
- ShenandoahHeapRegion* r = _regions->next();
- ShenandoahMarkingContext* const ctx = _heap->complete_marking_context();
- while (r != NULL) {
- HeapWord* update_watermark = r->get_update_watermark();
- assert (update_watermark >= r->bottom(), "sanity");
- if (r->is_active() && !r->is_cset()) {
- _heap->marked_object_oop_iterate(r, &cl, update_watermark);
- }
- if (ShenandoahPacing) {
- _heap->pacer()->report_updaterefs(pointer_delta(update_watermark, r->bottom()));
- }
- if (_heap->check_cancelled_gc_and_yield(_concurrent)) {
- return;
- }
- r = _regions->next();
- }
- }
-};
-
-void ShenandoahHeap::update_heap_references(bool concurrent) {
- ShenandoahUpdateHeapRefsTask<ShenandoahUpdateHeapRefsClosure> task(&_update_refs_iterator, concurrent);
- workers()->run_task(&task);
-}
-
-void ShenandoahHeap::op_init_updaterefs() {
+void ShenandoahHeap::op_init_evac_update() {
assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
- set_evacuation_in_progress(false);
-
{
- ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_refs_retire_gclabs);
+ ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_evac_update_retire_gclabs);
retire_and_reset_gclabs();
}
if (ShenandoahVerify) {
- if (!is_degenerated_gc_in_progress()) {
+ if (!is_degenerated_gc_in_progress() && !cancelled_gc()) {
verifier()->verify_roots_in_to_space_except(ShenandoahRootVerifier::ThreadRoots);
}
- verifier()->verify_before_updaterefs();
+ verifier()->verify_before_evac_update();
}
set_update_refs_in_progress(true);
_update_refs_iterator.reset();
if (ShenandoahPacing) {
- pacer()->setup_for_updaterefs();
+ pacer()->setup_for_evac_update();
}
}
class ShenandoahFinalUpdateRefsUpdateRegionStateClosure : public ShenandoahHeapRegionClosure {
private:
@@ -2543,72 +2552,74 @@
}
bool is_thread_safe() { return true; }
};
-void ShenandoahHeap::op_final_updaterefs() {
+void ShenandoahHeap::op_final_evac_update() {
assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
+ set_evacuation_in_progress(false);
+
finish_concurrent_unloading();
// Check if there is left-over work, and finish it
if (_update_refs_iterator.has_next()) {
- ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_finish_work);
+ ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_evac_update_finish_work);
// Finish updating references where we left off.
clear_cancelled_gc();
- update_heap_references(false);
+ op_stw_evac_update();
}
// Clear cancelled GC, if set. On cancellation path, the block before would handle
// everything. On degenerated paths, cancelled gc would not be set anyway.
if (cancelled_gc()) {
clear_cancelled_gc();
}
assert(!cancelled_gc(), "Should have been done right before");
- if (ShenandoahVerify && !is_degenerated_gc_in_progress()) {
+ if (ShenandoahVerify && !is_degenerated_gc_in_progress() && !cancelled_gc()) {
verifier()->verify_roots_in_to_space_except(ShenandoahRootVerifier::ThreadRoots);
}
if (is_degenerated_gc_in_progress()) {
concurrent_mark()->update_roots(ShenandoahPhaseTimings::degen_gc_update_roots);
} else {
- concurrent_mark()->update_thread_roots(ShenandoahPhaseTimings::final_update_refs_roots);
+ concurrent_mark()->update_thread_roots(ShenandoahPhaseTimings::final_evac_update_roots);
}
// Has to be done before cset is clear
if (ShenandoahVerify) {
verifier()->verify_roots_in_to_space();
}
{
- ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_update_region_states);
+ ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_evac_update_update_region_states);
ShenandoahFinalUpdateRefsUpdateRegionStateClosure cl;
parallel_heap_region_iterate(&cl);
assert_pinned_region_status();
}
{
- ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_trash_cset);
+ ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_evac_update_trash_cset);
trash_cset_regions();
}
set_has_forwarded_objects(false);
set_update_refs_in_progress(false);
if (ShenandoahVerify) {
- verifier()->verify_after_updaterefs();
+ verifier()->verify_after_evac_update();
}
if (VerifyAfterGC) {
Universe::verify();
}
{
- ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_rebuild_freeset);
+ ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_evac_update_rebuild_freeset);
ShenandoahHeapLocker locker(lock());
_free_set->rebuild();
}
}
@@ -2708,25 +2719,25 @@
try_inject_alloc_failure();
VM_ShenandoahFinalMarkStartEvac op;
VMThread::execute(&op); // jump to entry_final_mark under safepoint
}
-void ShenandoahHeap::vmop_entry_init_updaterefs() {
+void ShenandoahHeap::vmop_entry_init_evac_update() {
TraceCollectorStats tcs(monitoring_support()->stw_collection_counters());
- ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::init_update_refs_gross);
+ ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::init_evac_update_gross);
try_inject_alloc_failure();
- VM_ShenandoahInitUpdateRefs op;
+ VM_ShenandoahInitEvacUpdate op;
VMThread::execute(&op);
}
-void ShenandoahHeap::vmop_entry_final_updaterefs() {
+void ShenandoahHeap::vmop_entry_final_evac_update() {
TraceCollectorStats tcs(monitoring_support()->stw_collection_counters());
- ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_update_refs_gross);
+ ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_evac_update_gross);
try_inject_alloc_failure();
- VM_ShenandoahFinalUpdateRefs op;
+ VM_ShenandoahFinalEvacUpdate op;
VMThread::execute(&op);
}
void ShenandoahHeap::vmop_entry_full(GCCause::Cause cause) {
TraceCollectorStats tcs(monitoring_support()->full_stw_collection_counters());
@@ -2767,30 +2778,30 @@
"final marking");
op_final_mark();
}
-void ShenandoahHeap::entry_init_updaterefs() {
- static const char* msg = "Pause Init Update Refs";
- ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::init_update_refs);
+void ShenandoahHeap::entry_init_evac_update() {
+ static const char* msg = "Pause Init Evac-Update";
+ ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::init_evac_update);
EventMark em("%s", msg);
// No workers used in this phase, no setup required
- op_init_updaterefs();
+ op_init_evac_update();
}
-void ShenandoahHeap::entry_final_updaterefs() {
- static const char* msg = "Pause Final Update Refs";
- ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::final_update_refs);
+void ShenandoahHeap::entry_final_evac_update() {
+ static const char* msg = "Pause Final Evac-Update";
+ ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::final_evac_update);
EventMark em("%s", msg);
ShenandoahWorkerScope scope(workers(),
ShenandoahWorkerPolicy::calc_workers_for_final_update_ref(),
- "final reference update");
+ "final evac-update");
- op_final_updaterefs();
+ op_final_evac_update();
}
void ShenandoahHeap::entry_full(GCCause::Cause cause) {
static const char* msg = "Pause Full";
ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::full_gc, true /* log_heap_usage */);
@@ -2831,36 +2842,23 @@
try_inject_alloc_failure();
op_mark();
}
-void ShenandoahHeap::entry_evac() {
+void ShenandoahHeap::entry_evac_update() {
TraceCollectorStats tcs(monitoring_support()->concurrent_collection_counters());
- static const char* msg = "Concurrent evacuation";
- ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_evac);
- EventMark em("%s", msg);
-
- ShenandoahWorkerScope scope(workers(),
- ShenandoahWorkerPolicy::calc_workers_for_conc_evac(),
- "concurrent evacuation");
-
- try_inject_alloc_failure();
- op_conc_evac();
-}
-
-void ShenandoahHeap::entry_updaterefs() {
- static const char* msg = "Concurrent update references";
- ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_update_refs);
+ static const char* msg = "Concurrent evac-update";
+ ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_evac_update);
EventMark em("%s", msg);
ShenandoahWorkerScope scope(workers(),
- ShenandoahWorkerPolicy::calc_workers_for_conc_update_ref(),
- "concurrent reference update");
+ ShenandoahWorkerPolicy::calc_workers_for_conc_evac_update(),
+ "concurrent evacuation/update");
try_inject_alloc_failure();
- op_updaterefs();
+ op_conc_evac_update();
}
void ShenandoahHeap::entry_weak_roots() {
static const char* msg = "Concurrent weak roots";
ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_weak_roots);
@@ -3093,14 +3091,12 @@
return "Pause Degenerated GC (<UNSET>)";
case _degenerated_outside_cycle:
return "Pause Degenerated GC (Outside of Cycle)";
case _degenerated_mark:
return "Pause Degenerated GC (Mark)";
- case _degenerated_evac:
- return "Pause Degenerated GC (Evacuation)";
- case _degenerated_updaterefs:
- return "Pause Degenerated GC (Update Refs)";
+ case _degenerated_evac_update:
+ return "Pause Degenerated GC (Evac Update)";
default:
ShouldNotReachHere();
return "ERROR";
}
}
< prev index next >