1284 // set between the last GC or pause and now. We need to clear the
1285 // incremental collection set and then start rebuilding it afresh
1286 // after this full GC.
1287 abandon_collection_set(g1_policy()->inc_cset_head());
1288 g1_policy()->clear_incremental_cset();
1289 g1_policy()->stop_incremental_cset_building();
1290
1291 tear_down_region_sets(false /* free_list_only */);
1292 g1_policy()->set_gcs_are_young(true);
1293
1294 // See the comments in g1CollectedHeap.hpp and
1295 // G1CollectedHeap::ref_processing_init() about
1296 // how reference processing currently works in G1.
1297
1298 // Temporarily make discovery by the STW ref processor single threaded (non-MT).
1299 ReferenceProcessorMTDiscoveryMutator stw_rp_disc_ser(ref_processor_stw(), false);
1300
1301 // Temporarily clear the STW ref processor's _is_alive_non_header field.
1302 ReferenceProcessorIsAliveMutator stw_rp_is_alive_null(ref_processor_stw(), NULL);
1303
1304 ref_processor_stw()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
1305 ref_processor_stw()->setup_policy(do_clear_all_soft_refs);
1306
1307 // Do collection work
1308 {
1309 HandleMark hm; // Discard invalid handles created during gc
1310 G1MarkSweep::invoke_at_safepoint(ref_processor_stw(), do_clear_all_soft_refs);
1311 }
1312
1313 assert(num_free_regions() == 0, "we should not have added any free regions");
1314 rebuild_region_sets(false /* free_list_only */);
1315
1316 // Enqueue any discovered reference objects that have
1317 // not been removed from the discovered lists.
1318 ref_processor_stw()->enqueue_discovered_references();
1319
1320 COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
1321
1322 MemoryService::track_memory_usage();
1323
1324 assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
3733 // it will be set.
3734
3735 { // Call to jvmpi::post_class_unload_events must occur outside of active GC
3736 IsGCActiveMark x;
3737
3738 gc_prologue(false);
3739 increment_total_collections(false /* full gc */);
3740 increment_gc_time_stamp();
3741
3742 verify_before_gc();
3743
3744 check_bitmaps("GC Start");
3745
3746 COMPILER2_PRESENT(DerivedPointerTable::clear());
3747
3748 // Please see comment in g1CollectedHeap.hpp and
3749 // G1CollectedHeap::ref_processing_init() to see how
3750 // reference processing currently works in G1.
3751
3752 // Enable discovery in the STW reference processor
3753 ref_processor_stw()->enable_discovery(true /*verify_disabled*/,
3754 true /*verify_no_refs*/);
3755
3756 {
3757 // We want to temporarily turn off discovery by the
3758 // CM ref processor, if necessary, and turn it back on
3759 // on again later if we do. Using a scoped
3760 // NoRefDiscovery object will do this.
3761 NoRefDiscovery no_cm_discovery(ref_processor_cm());
3762
3763 // Forget the current alloc region (we might even choose it to be part
3764 // of the collection set!).
3765 _allocator->release_mutator_alloc_region();
3766
3767 // We should call this after we retire the mutator alloc
3768 // region(s) so that all the ALLOC / RETIRE events are generated
3769 // before the start GC event.
3770 _hr_printer.start_gc(false /* full */, (size_t) total_collections());
3771
3772 // This timing is only used by the ergonomics to handle our pause target.
3773 // It is unclear why this should not include the full pause. We will
3774 // investigate this in CR 7178365.
|
1284 // set between the last GC or pause and now. We need to clear the
1285 // incremental collection set and then start rebuilding it afresh
1286 // after this full GC.
1287 abandon_collection_set(g1_policy()->inc_cset_head());
1288 g1_policy()->clear_incremental_cset();
1289 g1_policy()->stop_incremental_cset_building();
1290
1291 tear_down_region_sets(false /* free_list_only */);
1292 g1_policy()->set_gcs_are_young(true);
1293
1294 // See the comments in g1CollectedHeap.hpp and
1295 // G1CollectedHeap::ref_processing_init() about
1296 // how reference processing currently works in G1.
1297
1298 // Temporarily make discovery by the STW ref processor single threaded (non-MT).
1299 ReferenceProcessorMTDiscoveryMutator stw_rp_disc_ser(ref_processor_stw(), false);
1300
1301 // Temporarily clear the STW ref processor's _is_alive_non_header field.
1302 ReferenceProcessorIsAliveMutator stw_rp_is_alive_null(ref_processor_stw(), NULL);
1303
1304 ref_processor_stw()->enable_discovery();
1305 ref_processor_stw()->setup_policy(do_clear_all_soft_refs);
1306
1307 // Do collection work
1308 {
1309 HandleMark hm; // Discard invalid handles created during gc
1310 G1MarkSweep::invoke_at_safepoint(ref_processor_stw(), do_clear_all_soft_refs);
1311 }
1312
1313 assert(num_free_regions() == 0, "we should not have added any free regions");
1314 rebuild_region_sets(false /* free_list_only */);
1315
1316 // Enqueue any discovered reference objects that have
1317 // not been removed from the discovered lists.
1318 ref_processor_stw()->enqueue_discovered_references();
1319
1320 COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
1321
1322 MemoryService::track_memory_usage();
1323
1324 assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
3733 // it will be set.
3734
3735 { // Call to jvmpi::post_class_unload_events must occur outside of active GC
3736 IsGCActiveMark x;
3737
3738 gc_prologue(false);
3739 increment_total_collections(false /* full gc */);
3740 increment_gc_time_stamp();
3741
3742 verify_before_gc();
3743
3744 check_bitmaps("GC Start");
3745
3746 COMPILER2_PRESENT(DerivedPointerTable::clear());
3747
3748 // Please see comment in g1CollectedHeap.hpp and
3749 // G1CollectedHeap::ref_processing_init() to see how
3750 // reference processing currently works in G1.
3751
3752 // Enable discovery in the STW reference processor
3753 ref_processor_stw()->enable_discovery();
3754
3755 {
3756 // We want to temporarily turn off discovery by the
3757 // CM ref processor, if necessary, and turn it back on
3758 // on again later if we do. Using a scoped
3759 // NoRefDiscovery object will do this.
3760 NoRefDiscovery no_cm_discovery(ref_processor_cm());
3761
3762 // Forget the current alloc region (we might even choose it to be part
3763 // of the collection set!).
3764 _allocator->release_mutator_alloc_region();
3765
3766 // We should call this after we retire the mutator alloc
3767 // region(s) so that all the ALLOC / RETIRE events are generated
3768 // before the start GC event.
3769 _hr_printer.start_gc(false /* full */, (size_t) total_collections());
3770
3771 // This timing is only used by the ergonomics to handle our pause target.
3772 // It is unclear why this should not include the full pause. We will
3773 // investigate this in CR 7178365.
|