< prev index next >

src/share/vm/gc/g1/g1CollectedHeap.cpp

Print this page
rev 12310 : [mq]: gcinterface.patch


1979   G1StringDedup::initialize();
1980 
1981   _preserved_marks_set.init(ParallelGCThreads);
1982 
1983   _collection_set.initialize(max_regions());
1984 
1985   return JNI_OK;
1986 }
1987 
1988 void G1CollectedHeap::stop() {
1989   // Stop all concurrent threads. We do this to make sure these threads
1990   // do not continue to execute and access resources (e.g. logging)
1991   // that are destroyed during shutdown.
1992   _cg1r->stop();
1993   _cmThread->stop();
1994   if (G1StringDedup::is_enabled()) {
1995     G1StringDedup::stop();
1996   }
1997 }
1998 
1999 size_t G1CollectedHeap::conservative_max_heap_alignment() {
2000   return HeapRegion::max_region_size();
2001 }
2002 
2003 void G1CollectedHeap::post_initialize() {
2004   ref_processing_init();
2005 }
2006 
2007 void G1CollectedHeap::ref_processing_init() {
2008   // Reference processing in G1 currently works as follows:
2009   //
2010   // * There are two reference processor instances. One is
2011   //   used to record and process discovered references
2012   //   during concurrent marking; the other is used to
2013   //   record and process references during STW pauses
2014   //   (both full and incremental).
2015   // * Both ref processors need to 'span' the entire heap as
2016   //   the regions in the collection set may be dotted around.
2017   //
2018   // * For the concurrent marking ref processor:
2019   //   * Reference discovery is enabled at initial marking.
2020   //   * Reference discovery is disabled and the discovered
2021   //     references processed etc during remarking.
2022   //   * Reference discovery is MT (see below).


2672   return G1HeapSummary(heap_summary, heap_used, eden_used_bytes,
2673                        eden_capacity_bytes, survivor_used_bytes, num_regions());
2674 }
2675 
2676 G1EvacSummary G1CollectedHeap::create_g1_evac_summary(G1EvacStats* stats) {
2677   return G1EvacSummary(stats->allocated(), stats->wasted(), stats->undo_wasted(),
2678                        stats->unused(), stats->used(), stats->region_end_waste(),
2679                        stats->regions_filled(), stats->direct_allocated(),
2680                        stats->failure_used(), stats->failure_waste());
2681 }
2682 
2683 void G1CollectedHeap::trace_heap(GCWhen::Type when, const GCTracer* gc_tracer) {
2684   const G1HeapSummary& heap_summary = create_g1_heap_summary();
2685   gc_tracer->report_gc_heap_summary(when, heap_summary);
2686 
2687   const MetaspaceSummary& metaspace_summary = create_metaspace_summary();
2688   gc_tracer->report_metaspace_summary(when, metaspace_summary);
2689 }
2690 
2691 G1CollectedHeap* G1CollectedHeap::heap() {
2692   CollectedHeap* heap = Universe::heap();
2693   assert(heap != NULL, "Uninitialized access to G1CollectedHeap::heap()");
2694   assert(heap->kind() == CollectedHeap::G1CollectedHeap, "Not a G1CollectedHeap");
2695   return (G1CollectedHeap*)heap;
2696 }
2697 
2698 void G1CollectedHeap::gc_prologue(bool full /* Ignored */) {
2699   // always_do_update_barrier = false;
2700   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
2701   // Fill TLAB's and such
2702   accumulate_statistics_all_tlabs();
2703   ensure_parsability(true);
2704 
2705   g1_rem_set()->print_periodic_summary_info("Before GC RS summary", total_collections());
2706 }
2707 
2708 void G1CollectedHeap::gc_epilogue(bool full) {
2709   // we are at the end of the GC. Total collections has already been increased.
2710   g1_rem_set()->print_periodic_summary_info("After GC RS summary", total_collections() - 1);
2711 
2712   // FIXME: what is this about?




1979   G1StringDedup::initialize();
1980 
1981   _preserved_marks_set.init(ParallelGCThreads);
1982 
1983   _collection_set.initialize(max_regions());
1984 
1985   return JNI_OK;
1986 }
1987 
1988 void G1CollectedHeap::stop() {
1989   // Stop all concurrent threads. We do this to make sure these threads
1990   // do not continue to execute and access resources (e.g. logging)
1991   // that are destroyed during shutdown.
1992   _cg1r->stop();
1993   _cmThread->stop();
1994   if (G1StringDedup::is_enabled()) {
1995     G1StringDedup::stop();
1996   }
1997 }
1998 




1999 void G1CollectedHeap::post_initialize() {
2000   ref_processing_init();
2001 }
2002 
2003 void G1CollectedHeap::ref_processing_init() {
2004   // Reference processing in G1 currently works as follows:
2005   //
2006   // * There are two reference processor instances. One is
2007   //   used to record and process discovered references
2008   //   during concurrent marking; the other is used to
2009   //   record and process references during STW pauses
2010   //   (both full and incremental).
2011   // * Both ref processors need to 'span' the entire heap as
2012   //   the regions in the collection set may be dotted around.
2013   //
2014   // * For the concurrent marking ref processor:
2015   //   * Reference discovery is enabled at initial marking.
2016   //   * Reference discovery is disabled and the discovered
2017   //     references processed etc during remarking.
2018   //   * Reference discovery is MT (see below).


2668   return G1HeapSummary(heap_summary, heap_used, eden_used_bytes,
2669                        eden_capacity_bytes, survivor_used_bytes, num_regions());
2670 }
2671 
2672 G1EvacSummary G1CollectedHeap::create_g1_evac_summary(G1EvacStats* stats) {
2673   return G1EvacSummary(stats->allocated(), stats->wasted(), stats->undo_wasted(),
2674                        stats->unused(), stats->used(), stats->region_end_waste(),
2675                        stats->regions_filled(), stats->direct_allocated(),
2676                        stats->failure_used(), stats->failure_waste());
2677 }
2678 
2679 void G1CollectedHeap::trace_heap(GCWhen::Type when, const GCTracer* gc_tracer) {
2680   const G1HeapSummary& heap_summary = create_g1_heap_summary();
2681   gc_tracer->report_gc_heap_summary(when, heap_summary);
2682 
2683   const MetaspaceSummary& metaspace_summary = create_metaspace_summary();
2684   gc_tracer->report_metaspace_summary(when, metaspace_summary);
2685 }
2686 
2687 G1CollectedHeap* G1CollectedHeap::heap() {
2688   CollectedHeap* heap = GC::gc()->heap();
2689   assert(heap != NULL, "Uninitialized access to G1CollectedHeap::heap()");
2690   assert(heap->kind() == CollectedHeap::G1CollectedHeap, "Not a G1CollectedHeap");
2691   return (G1CollectedHeap*)heap;
2692 }
2693 
2694 void G1CollectedHeap::gc_prologue(bool full /* Ignored */) {
2695   // always_do_update_barrier = false;
2696   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
2697   // Fill TLAB's and such
2698   accumulate_statistics_all_tlabs();
2699   ensure_parsability(true);
2700 
2701   g1_rem_set()->print_periodic_summary_info("Before GC RS summary", total_collections());
2702 }
2703 
2704 void G1CollectedHeap::gc_epilogue(bool full) {
2705   // we are at the end of the GC. Total collections has already been increased.
2706   g1_rem_set()->print_periodic_summary_info("After GC RS summary", total_collections() - 1);
2707 
2708   // FIXME: what is this about?


< prev index next >