< prev index next >

src/share/vm/gc/g1/g1CollectedHeap.cpp

Print this page
rev 11777 : [mq]: gcinterface.patch
rev 11778 : [mq]: service.patch


1979   G1StringDedup::initialize();
1980 
1981   _preserved_marks_set.init(ParallelGCThreads);
1982 
1983   _collection_set.initialize(max_regions());
1984 
1985   return JNI_OK;
1986 }
1987 
1988 void G1CollectedHeap::stop() {
1989   // Stop all concurrent threads. We do this to make sure these threads
1990   // do not continue to execute and access resources (e.g. logging)
1991   // that are destroyed during shutdown.
1992   _cg1r->stop();
1993   _cmThread->stop();
1994   if (G1StringDedup::is_enabled()) {
1995     G1StringDedup::stop();
1996   }
1997 }
1998 
1999 size_t G1CollectedHeap::conservative_max_heap_alignment() {
2000   return HeapRegion::max_region_size();
2001 }
2002 
2003 void G1CollectedHeap::post_initialize() {
2004   ref_processing_init();
2005 }
2006 
2007 void G1CollectedHeap::ref_processing_init() {
2008   // Reference processing in G1 currently works as follows:
2009   //
2010   // * There are two reference processor instances. One is
2011   //   used to record and process discovered references
2012   //   during concurrent marking; the other is used to
2013   //   record and process references during STW pauses
2014   //   (both full and incremental).
2015   // * Both ref processors need to 'span' the entire heap as
2016   //   the regions in the collection set may be dotted around.
2017   //
2018   // * For the concurrent marking ref processor:
2019   //   * Reference discovery is enabled at initial marking.
2020   //   * Reference discovery is disabled and the discovered
2021   //     references processed etc during remarking.
2022   //   * Reference discovery is MT (see below).


2675   return G1HeapSummary(heap_summary, heap_used, eden_used_bytes,
2676                        eden_capacity_bytes, survivor_used_bytes, num_regions());
2677 }
2678 
2679 G1EvacSummary G1CollectedHeap::create_g1_evac_summary(G1EvacStats* stats) {
2680   return G1EvacSummary(stats->allocated(), stats->wasted(), stats->undo_wasted(),
2681                        stats->unused(), stats->used(), stats->region_end_waste(),
2682                        stats->regions_filled(), stats->direct_allocated(),
2683                        stats->failure_used(), stats->failure_waste());
2684 }
2685 
2686 void G1CollectedHeap::trace_heap(GCWhen::Type when, const GCTracer* gc_tracer) {
2687   const G1HeapSummary& heap_summary = create_g1_heap_summary();
2688   gc_tracer->report_gc_heap_summary(when, heap_summary);
2689 
2690   const MetaspaceSummary& metaspace_summary = create_metaspace_summary();
2691   gc_tracer->report_metaspace_summary(when, metaspace_summary);
2692 }
2693 
2694 G1CollectedHeap* G1CollectedHeap::heap() {
2695   CollectedHeap* heap = Universe::heap();
2696   assert(heap != NULL, "Uninitialized access to G1CollectedHeap::heap()");
2697   assert(heap->kind() == CollectedHeap::G1CollectedHeap, "Not a G1CollectedHeap");
2698   return (G1CollectedHeap*)heap;
2699 }
2700 
2701 void G1CollectedHeap::gc_prologue(bool full /* Ignored */) {
2702   // always_do_update_barrier = false;
2703   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
2704   // Fill TLAB's and such
2705   accumulate_statistics_all_tlabs();
2706   ensure_parsability(true);
2707 
2708   g1_rem_set()->print_periodic_summary_info("Before GC RS summary", total_collections());
2709 }
2710 
2711 void G1CollectedHeap::gc_epilogue(bool full) {
2712   // we are at the end of the GC. Total collections has already been increased.
2713   g1_rem_set()->print_periodic_summary_info("After GC RS summary", total_collections() - 1);
2714 
2715   // FIXME: what is this about?




1979   G1StringDedup::initialize();
1980 
1981   _preserved_marks_set.init(ParallelGCThreads);
1982 
1983   _collection_set.initialize(max_regions());
1984 
1985   return JNI_OK;
1986 }
1987 
1988 void G1CollectedHeap::stop() {
1989   // Stop all concurrent threads. We do this to make sure these threads
1990   // do not continue to execute and access resources (e.g. logging)
1991   // that are destroyed during shutdown.
1992   _cg1r->stop();
1993   _cmThread->stop();
1994   if (G1StringDedup::is_enabled()) {
1995     G1StringDedup::stop();
1996   }
1997 }
1998 




1999 void G1CollectedHeap::post_initialize() {
2000   ref_processing_init();
2001 }
2002 
2003 void G1CollectedHeap::ref_processing_init() {
2004   // Reference processing in G1 currently works as follows:
2005   //
2006   // * There are two reference processor instances. One is
2007   //   used to record and process discovered references
2008   //   during concurrent marking; the other is used to
2009   //   record and process references during STW pauses
2010   //   (both full and incremental).
2011   // * Both ref processors need to 'span' the entire heap as
2012   //   the regions in the collection set may be dotted around.
2013   //
2014   // * For the concurrent marking ref processor:
2015   //   * Reference discovery is enabled at initial marking.
2016   //   * Reference discovery is disabled and the discovered
2017   //     references processed etc during remarking.
2018   //   * Reference discovery is MT (see below).


2671   return G1HeapSummary(heap_summary, heap_used, eden_used_bytes,
2672                        eden_capacity_bytes, survivor_used_bytes, num_regions());
2673 }
2674 
2675 G1EvacSummary G1CollectedHeap::create_g1_evac_summary(G1EvacStats* stats) {
2676   return G1EvacSummary(stats->allocated(), stats->wasted(), stats->undo_wasted(),
2677                        stats->unused(), stats->used(), stats->region_end_waste(),
2678                        stats->regions_filled(), stats->direct_allocated(),
2679                        stats->failure_used(), stats->failure_waste());
2680 }
2681 
2682 void G1CollectedHeap::trace_heap(GCWhen::Type when, const GCTracer* gc_tracer) {
2683   const G1HeapSummary& heap_summary = create_g1_heap_summary();
2684   gc_tracer->report_gc_heap_summary(when, heap_summary);
2685 
2686   const MetaspaceSummary& metaspace_summary = create_metaspace_summary();
2687   gc_tracer->report_metaspace_summary(when, metaspace_summary);
2688 }
2689 
2690 G1CollectedHeap* G1CollectedHeap::heap() {
2691   CollectedHeap* heap = GC::gc()->heap();
2692   assert(heap != NULL, "Uninitialized access to G1CollectedHeap::heap()");
2693   assert(heap->kind() == CollectedHeap::G1CollectedHeap, "Not a G1CollectedHeap");
2694   return (G1CollectedHeap*)heap;
2695 }
2696 
2697 void G1CollectedHeap::gc_prologue(bool full /* Ignored */) {
2698   // always_do_update_barrier = false;
2699   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
2700   // Fill TLAB's and such
2701   accumulate_statistics_all_tlabs();
2702   ensure_parsability(true);
2703 
2704   g1_rem_set()->print_periodic_summary_info("Before GC RS summary", total_collections());
2705 }
2706 
2707 void G1CollectedHeap::gc_epilogue(bool full) {
2708   // we are at the end of the GC. Total collections has already been increased.
2709   g1_rem_set()->print_periodic_summary_info("After GC RS summary", total_collections() - 1);
2710 
2711   // FIXME: what is this about?


< prev index next >