< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp

Print this page
rev 50076 : Fold Partial GC into Traversal GC

*** 43,53 **** #include "gc/shenandoah/shenandoahHeapRegionSet.hpp" #include "gc/shenandoah/shenandoahMarkCompact.hpp" #include "gc/shenandoah/shenandoahMemoryPool.hpp" #include "gc/shenandoah/shenandoahMonitoringSupport.hpp" #include "gc/shenandoah/shenandoahOopClosures.inline.hpp" - #include "gc/shenandoah/shenandoahPartialGC.hpp" #include "gc/shenandoah/shenandoahPacer.hpp" #include "gc/shenandoah/shenandoahPacer.inline.hpp" #include "gc/shenandoah/shenandoahRootProcessor.hpp" #include "gc/shenandoah/shenandoahStringDedup.hpp" #include "gc/shenandoah/shenandoahUtils.hpp" --- 43,52 ----
*** 200,219 **** } assert((((size_t) base()) & ShenandoahHeapRegion::region_size_bytes_mask()) == 0, "misaligned heap: "PTR_FORMAT, p2i(base())); - LogTarget(Trace, gc, region) lt; - if (lt.is_enabled()) { - ResourceMark rm; - LogStream ls(lt); - log_trace(gc, region)("All Regions"); - print_heap_regions_on(&ls); - log_trace(gc, region)("Free Regions"); - _free_set->print_on(&ls); - } - // The call below uses stuff (the SATB* things) that are in G1, but probably // belong into a shared location. ShenandoahBarrierSet::satb_mark_queue_set().initialize(SATB_Q_CBL_mon, SATB_Q_FL_lock, 20 /*G1SATBProcessCompletedThreshold */, --- 199,208 ----
*** 305,318 **** _connection_matrix = new ShenandoahConnectionMatrix(_num_regions); } else { _connection_matrix = NULL; } - _partial_gc = _shenandoah_policy->can_do_partial_gc() ? - new ShenandoahPartialGC(this, _num_regions) : - NULL; - _traversal_gc = _shenandoah_policy->can_do_traversal_gc() ? new ShenandoahTraversalGC(this, _num_regions) : NULL; _monitoring_support = new ShenandoahMonitoringSupport(this); --- 294,303 ----
*** 327,336 **** --- 312,331 ---- _concurrent_gc_thread = new ShenandoahConcurrentThread(); ShenandoahCodeRoots::initialize(); + LogTarget(Trace, gc, region) lt; + if (lt.is_enabled()) { + ResourceMark rm; + LogStream ls(lt); + log_trace(gc, region)("All Regions"); + print_heap_regions_on(&ls); + log_trace(gc, region)("Free Regions"); + _free_set->print_on(&ls); + } + log_info(gc, init)("Safepointing mechanism: %s", SafepointMechanism::uses_thread_local_poll() ? "thread-local poll" : (SafepointMechanism::uses_global_page_poll() ? "global-page poll" : "unknown")); return JNI_OK;
*** 359,368 **** --- 354,364 ---- _pacer(NULL), _used_at_last_gc(0), _alloc_seq_at_last_gc_start(0), _alloc_seq_at_last_gc_end(0), _safepoint_workers(NULL), + _gc_cycle_mode(), #ifdef ASSERT _heap_expansion_count(0), #endif _gc_timer(new (ResourceObj::C_HEAP, mtGC) ConcurrentGCTimer()), _phase_timings(NULL),
*** 428,437 **** --- 424,478 ---- ShenandoahResetNextBitmapTask task(region_iterator()); _workers->run_task(&task); } + class ShenandoahResetNextBitmapTraversalTask : public AbstractGangTask { + private: + ShenandoahRegionIterator _regions; + + public: + ShenandoahResetNextBitmapTraversalTask(ShenandoahRegionIterator regions) : + AbstractGangTask("Parallel Reset Bitmap Task for Traversal"), + _regions(regions) { + } + + void work(uint worker_id) { + ShenandoahHeap* heap = ShenandoahHeap::heap(); + ShenandoahHeapRegionSet* traversal_set = heap->traversal_gc()->traversal_set(); + ShenandoahHeapRegion* region = _regions.next(); + while (region != NULL) { + if (heap->is_bitmap_slice_committed(region)) { + if (traversal_set->is_in(region) && !region->is_trash()) { + ShenandoahHeapLocker locker(heap->lock()); + HeapWord* bottom = region->bottom(); + HeapWord* top = heap->next_top_at_mark_start(bottom); + assert(top <= region->top(), + "TAMS must smaller/equals than top: TAMS: "PTR_FORMAT", top: "PTR_FORMAT, + p2i(top), p2i(region->top())); + if (top > bottom) { + heap->complete_mark_bit_map()->copy_from(heap->next_mark_bit_map(), MemRegion(bottom, top)); + heap->set_complete_top_at_mark_start(bottom, top); + heap->next_mark_bit_map()->clear_range_large(MemRegion(bottom, top)); + heap->set_next_top_at_mark_start(bottom, bottom); + } + } + assert(heap->is_next_bitmap_clear_range(region->bottom(), region->end()), + "need clear next bitmap"); + } + region = _regions.next(); + } + } + }; + + void ShenandoahHeap::reset_next_mark_bitmap_traversal() { + assert_gc_workers(_workers->active_workers()); + + ShenandoahResetNextBitmapTraversalTask task(region_iterator()); + _workers->run_task(&task); + } + bool ShenandoahHeap::is_next_bitmap_clear() { for (size_t idx = 0; idx < _num_regions; idx++) { ShenandoahHeapRegion* r = get_region(idx); if (is_bitmap_slice_committed(r) && !is_next_bitmap_clear_range(r->bottom(), r->end())) { return false;
*** 458,468 **** st->print("Status: "); if (has_forwarded_objects()) st->print("has forwarded objects, "); if (is_concurrent_mark_in_progress()) st->print("marking, "); if (is_evacuation_in_progress()) st->print("evacuating, "); if (is_update_refs_in_progress()) st->print("updating refs, "); - if (is_concurrent_partial_in_progress()) st->print("partial, "); if (is_concurrent_traversal_in_progress()) st->print("traversal, "); if (is_degenerated_gc_in_progress()) st->print("degenerated gc, "); if (is_full_gc_in_progress()) st->print("full gc, "); if (is_full_gc_move_in_progress()) st->print("full gc move, "); --- 499,508 ----
*** 798,809 **** oop obj = CompressedOops::decode_not_null(o); if (_heap->in_collection_set(obj)) { shenandoah_assert_marked_complete(p, obj); oop resolved = ShenandoahBarrierSet::resolve_forwarded_not_null(obj); if (oopDesc::unsafe_equals(resolved, obj)) { ! bool evac; ! resolved = _heap->evacuate_object(obj, _thread, evac); } RawAccess<OOP_NOT_NULL>::oop_store(p, resolved); } } } --- 838,848 ---- oop obj = CompressedOops::decode_not_null(o); if (_heap->in_collection_set(obj)) { shenandoah_assert_marked_complete(p, obj); oop resolved = ShenandoahBarrierSet::resolve_forwarded_not_null(obj); if (oopDesc::unsafe_equals(resolved, obj)) { ! resolved = _heap->evacuate_object(obj, _thread); } RawAccess<OOP_NOT_NULL>::oop_store(p, resolved); } } }
*** 833,844 **** if (! CompressedOops::is_null(o)) { oop obj = CompressedOops::decode_not_null(o); if (_heap->in_collection_set(obj)) { oop resolved = ShenandoahBarrierSet::resolve_forwarded_not_null(obj); if (oopDesc::unsafe_equals(resolved, obj)) { ! bool evac; ! _heap->evacuate_object(obj, _thread, evac); } } } } --- 872,882 ---- if (! CompressedOops::is_null(o)) { oop obj = CompressedOops::decode_not_null(o); if (_heap->in_collection_set(obj)) { oop resolved = ShenandoahBarrierSet::resolve_forwarded_not_null(obj); if (oopDesc::unsafe_equals(resolved, obj)) { ! _heap->evacuate_object(obj, _thread); } } } }
*** 860,871 **** _heap(heap), _thread(Thread::current()) {} void do_object(oop p) { shenandoah_assert_marked_complete(NULL, p); if (oopDesc::unsafe_equals(p, ShenandoahBarrierSet::resolve_forwarded_not_null(p))) { ! bool evac; ! _heap->evacuate_object(p, _thread, evac); } } }; class ShenandoahParallelEvacuationTask : public AbstractGangTask { --- 898,908 ---- _heap(heap), _thread(Thread::current()) {} void do_object(oop p) { shenandoah_assert_marked_complete(NULL, p); if (oopDesc::unsafe_equals(p, ShenandoahBarrierSet::resolve_forwarded_not_null(p))) { ! _heap->evacuate_object(p, _thread); } } }; class ShenandoahParallelEvacuationTask : public AbstractGangTask {
*** 1611,1637 **** // Allocations happen during bitmap cleanup, record peak after the phase: shenandoahPolicy()->record_peak_occupancy(); } ! void ShenandoahHeap::op_preclean() { ! concurrentMark()->preclean_weak_refs(); ! // Allocations happen during concurrent preclean, record peak after the phase: ! shenandoahPolicy()->record_peak_occupancy(); ! } ! void ShenandoahHeap::op_init_partial() { ! partial_gc()->init_partial_collection(); ! } ! void ShenandoahHeap::op_partial() { ! partial_gc()->concurrent_partial_collection(); } ! void ShenandoahHeap::op_final_partial() { ! partial_gc()->final_partial_collection(); } void ShenandoahHeap::op_init_traversal() { traversal_gc()->init_traversal_collection(); } --- 1648,1675 ---- // Allocations happen during bitmap cleanup, record peak after the phase: shenandoahPolicy()->record_peak_occupancy(); } ! void ShenandoahHeap::op_cleanup_traversal() { ! { ! ShenandoahGCPhase phase_reset(ShenandoahPhaseTimings::conc_cleanup_reset_bitmaps); ! reset_next_mark_bitmap_traversal(); ! } ! op_cleanup(); ! // Allocations happen during bitmap cleanup, record peak after the phase: ! shenandoahPolicy()->record_peak_occupancy(); } ! void ShenandoahHeap::op_preclean() { ! concurrentMark()->preclean_weak_refs(); ! ! // Allocations happen during concurrent preclean, record peak after the phase: ! shenandoahPolicy()->record_peak_occupancy(); } void ShenandoahHeap::op_init_traversal() { traversal_gc()->init_traversal_collection(); }
*** 1656,1666 **** clear_cancelled_concgc(); size_t used_before = used(); switch (point) { - case _degenerated_partial: case _degenerated_evac: // Not possible to degenerate from here, upgrade to Full GC right away. cancel_concgc(GCCause::_shenandoah_upgrade_to_full_gc); op_degenerated_fail(); return; --- 1694,1703 ----
*** 1678,1688 **** r->make_regular_bypass(); } collection_set()->clear(); } op_final_traversal(); ! op_cleanup_bitmaps(); return; case _degenerated_outside_cycle: if (shenandoahPolicy()->can_do_traversal_gc()) { // Not possible to degenerate from here, upgrade to Full GC right away. --- 1715,1725 ---- r->make_regular_bypass(); } collection_set()->clear(); } op_final_traversal(); ! op_cleanup_traversal(); return; case _degenerated_outside_cycle: if (shenandoahPolicy()->can_do_traversal_gc()) { // Not possible to degenerate from here, upgrade to Full GC right away.
*** 1818,1833 **** void ShenandoahHeap::set_concurrent_mark_in_progress(bool in_progress) { set_gc_state_mask(MARKING, in_progress); ShenandoahBarrierSet::satb_mark_queue_set().set_active_all_threads(in_progress, !in_progress); } - void ShenandoahHeap::set_concurrent_partial_in_progress(bool in_progress) { - - set_gc_state_mask(PARTIAL | HAS_FORWARDED, in_progress); - ShenandoahBarrierSet::satb_mark_queue_set().set_active_all_threads(in_progress, !in_progress); - } - void ShenandoahHeap::set_concurrent_traversal_in_progress(bool in_progress) { set_gc_state_mask(TRAVERSAL | HAS_FORWARDED, in_progress); ShenandoahBarrierSet::satb_mark_queue_set().set_active_all_threads(in_progress, !in_progress); } --- 1855,1864 ----
*** 2195,2208 **** ShenandoahConnectionMatrix* ShenandoahHeap::connection_matrix() const { return _connection_matrix; } - ShenandoahPartialGC* ShenandoahHeap::partial_gc() { - return _partial_gc; - } - ShenandoahTraversalGC* ShenandoahHeap::traversal_gc() { return _traversal_gc; } ShenandoahVerifier* ShenandoahHeap::verifier() { --- 2226,2235 ----
*** 2511,2540 **** try_inject_alloc_failure(); VM_ShenandoahFinalUpdateRefs op; VMThread::execute(&op); } - void ShenandoahHeap::vmop_entry_init_partial() { - TraceCollectorStats tcs(monitoring_support()->stw_collection_counters()); - ShenandoahGCPhase total(ShenandoahPhaseTimings::total_pause_gross); - ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_partial_gc_gross); - - try_inject_alloc_failure(); - VM_ShenandoahInitPartialGC op; - VMThread::execute(&op); - } - - void ShenandoahHeap::vmop_entry_final_partial() { - TraceCollectorStats tcs(monitoring_support()->stw_collection_counters()); - ShenandoahGCPhase total(ShenandoahPhaseTimings::total_pause_gross); - ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_partial_gc_gross); - - try_inject_alloc_failure(); - VM_ShenandoahFinalPartialGC op; - VMThread::execute(&op); - } - void ShenandoahHeap::vmop_entry_init_traversal() { TraceCollectorStats tcs(monitoring_support()->stw_collection_counters()); ShenandoahGCPhase total(ShenandoahPhaseTimings::total_pause_gross); ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_traversal_gc_gross); --- 2538,2547 ----
*** 2639,2674 **** ShenandoahWorkerScope scope(workers(), ShenandoahWorkerPolicy::calc_workers_for_final_update_ref()); op_final_updaterefs(); } - void ShenandoahHeap::entry_init_partial() { - ShenandoahGCPhase total_phase(ShenandoahPhaseTimings::total_pause); - ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_partial_gc); - - static const char* msg = "Pause Init Partial"; - GCTraceTime(Info, gc) time(msg, gc_timer()); - EventMark em("%s", msg); - - ShenandoahWorkerScope scope(workers(), ShenandoahWorkerPolicy::calc_workers_for_stw_partial()); - - op_init_partial(); - } - - void ShenandoahHeap::entry_final_partial() { - ShenandoahGCPhase total_phase(ShenandoahPhaseTimings::total_pause); - ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_partial_gc); - - static const char* msg = "Pause Final Partial"; - GCTraceTime(Info, gc) time(msg, gc_timer()); - EventMark em("%s", msg); - - ShenandoahWorkerScope scope(workers(), ShenandoahWorkerPolicy::calc_workers_for_stw_partial()); - - op_final_partial(); - } - void ShenandoahHeap::entry_init_traversal() { ShenandoahGCPhase total_phase(ShenandoahPhaseTimings::total_pause); ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_traversal_gc); static const char* msg = "Pause Init Traversal"; --- 2646,2655 ----
*** 2775,2784 **** --- 2756,2778 ---- try_inject_alloc_failure(); op_cleanup(); } + void ShenandoahHeap::entry_cleanup_traversal() { + ShenandoahGCPhase phase(ShenandoahPhaseTimings::conc_cleanup); + + static const char* msg = "Concurrent cleanup"; + GCTraceTime(Info, gc) time(msg, gc_timer(), GCCause::_no_gc, true); + EventMark em("%s", msg); + + ShenandoahWorkerScope scope(workers(), ShenandoahWorkerPolicy::calc_workers_for_conc_traversal()); + + try_inject_alloc_failure(); + op_cleanup_traversal(); + } + void ShenandoahHeap::entry_cleanup_bitmaps() { ShenandoahGCPhase phase(ShenandoahPhaseTimings::conc_cleanup); static const char* msg = "Concurrent cleanup"; GCTraceTime(Info, gc) time(msg, gc_timer(), GCCause::_no_gc, true);
*** 2803,2831 **** try_inject_alloc_failure(); op_preclean(); } } - void ShenandoahHeap::entry_partial() { - static const char* msg = "Concurrent partial"; - GCTraceTime(Info, gc) time(msg, gc_timer(), GCCause::_no_gc, true); - EventMark em("%s", msg); - - TraceCollectorStats tcs(monitoring_support()->concurrent_collection_counters()); - - ShenandoahWorkerScope scope(workers(), ShenandoahWorkerPolicy::calc_workers_for_conc_partial()); - - try_inject_alloc_failure(); - op_partial(); - } - void ShenandoahHeap::entry_traversal() { static const char* msg = "Concurrent traversal"; GCTraceTime(Info, gc) time(msg, gc_timer(), GCCause::_no_gc, true); EventMark em("%s", msg); ! TraceCollectorStats tcs(monitoring_support()->concurrent_collection_counters()); ShenandoahWorkerScope scope(workers(), ShenandoahWorkerPolicy::calc_workers_for_conc_traversal()); try_inject_alloc_failure(); op_traversal(); --- 2797,2813 ---- try_inject_alloc_failure(); op_preclean(); } } void ShenandoahHeap::entry_traversal() { static const char* msg = "Concurrent traversal"; GCTraceTime(Info, gc) time(msg, gc_timer(), GCCause::_no_gc, true); EventMark em("%s", msg); ! TraceCollectorStats tcs(is_minor_gc() ? monitoring_support()->partial_collection_counters() ! : monitoring_support()->concurrent_collection_counters()); ShenandoahWorkerScope scope(workers(), ShenandoahWorkerPolicy::calc_workers_for_conc_traversal()); try_inject_alloc_failure(); op_traversal();
*** 2901,2908 **** --- 2883,2902 ---- } r = regions.next(); } } + bool ShenandoahHeap::is_minor_gc() const { + return _gc_cycle_mode.get() == MINOR; + } + + bool ShenandoahHeap::is_major_gc() const { + return _gc_cycle_mode.get() == MAJOR; + } + + void ShenandoahHeap::set_cycle_mode(GCCycleMode gc_cycle_mode) { + _gc_cycle_mode.set(gc_cycle_mode); + } + char ShenandoahHeap::gc_state() const { return _gc_state.raw_value(); }
< prev index next >