1115 void G1CollectedHeap::prepare_heap_for_full_collection() {
1116 // Make sure we'll choose a new allocation region afterwards.
1117 _allocator->release_mutator_alloc_region();
1118 _allocator->abandon_gc_alloc_regions();
1119 g1_rem_set()->cleanupHRRS();
1120
1121 // We may have added regions to the current incremental collection
1122 // set between the last GC or pause and now. We need to clear the
1123 // incremental collection set and then start rebuilding it afresh
1124 // after this full GC.
1125 abandon_collection_set(collection_set());
1126
1127 tear_down_region_sets(false /* free_list_only */);
1128 collector_state()->set_gcs_are_young(true);
1129 }
1130
1131 void G1CollectedHeap::verify_before_full_collection(bool explicit_gc) {
1132 assert(!GCCause::is_user_requested_gc(gc_cause()) || explicit_gc, "invariant");
1133 assert(used() == recalculate_used(), "Should be equal");
1134 _verifier->verify_region_sets_optional();
1135 _verifier->verify_before_gc();
1136 _verifier->check_bitmaps("Full GC Start");
1137 }
1138
1139 void G1CollectedHeap::prepare_heap_for_mutators() {
1140 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1141 ClassLoaderDataGraph::purge();
1142 MetaspaceAux::verify_metrics();
1143
1144 // Prepare heap for normal collections.
1145 assert(num_free_regions() == 0, "we should not have added any free regions");
1146 rebuild_region_sets(false /* free_list_only */);
1147 abort_refinement();
1148 resize_if_necessary_after_full_collection();
1149
1150 // Rebuild the strong code root lists for each region
1151 rebuild_strong_code_roots();
1152
1153 // Start a new incremental collection set for the next pause
1154 start_new_collection_set();
1155
1156 _allocator->init_mutator_alloc_region();
1157
1158 // Post collection state updates.
1159 MetaspaceGC::compute_new_size();
1160 }
1161
1162 void G1CollectedHeap::abort_refinement() {
1163 if (_hot_card_cache->use_cache()) {
1164 _hot_card_cache->reset_hot_cache();
1165 }
1166
1167 // Discard all remembered set updates.
1168 JavaThread::dirty_card_queue_set().abandon_logs();
1169 assert(dirty_card_queue_set().completed_buffers_num() == 0, "DCQS should be empty");
1170 }
1171
1172 void G1CollectedHeap::verify_after_full_collection() {
1173 check_gc_time_stamps();
1174 _hrm.verify_optional();
1175 _verifier->verify_region_sets_optional();
1176 _verifier->verify_after_gc();
1177 // Clear the previous marking bitmap, if needed for bitmap verification.
1178 // Note we cannot do this when we clear the next marking bitmap in
1179 // G1ConcurrentMark::abort() above since VerifyDuringGC verifies the
1180 // objects marked during a full GC against the previous bitmap.
1181 // But we need to clear it before calling check_bitmaps below since
1182 // the full GC has compacted objects and updated TAMS but not updated
1183 // the prev bitmap.
1184 if (G1VerifyBitmaps) {
1185 GCTraceTime(Debug, gc)("Clear Bitmap for Verification");
1186 _cm->clear_prev_bitmap(workers());
1187 }
1188 _verifier->check_bitmaps("Full GC End");
1189
1190 // At this point there should be no regions in the
1191 // entire heap tagged as young.
1192 assert(check_young_list_empty(), "young list should be empty at this point");
1193
1194 // Note: since we've just done a full GC, concurrent
1195 // marking is no longer active. Therefore we need not
1196 // re-enable reference discovery for the CM ref processor.
2967 }
2968
2969 G1HeapTransition heap_transition(this);
2970 size_t heap_used_bytes_before_gc = used();
2971
2972 // Don't dynamically change the number of GC threads this early. A value of
2973 // 0 is used to indicate serial work. When parallel work is done,
2974 // it will be set.
2975
2976 { // Call to jvmpi::post_class_unload_events must occur outside of active GC
2977 IsGCActiveMark x;
2978
2979 gc_prologue(false);
2980
2981 if (VerifyRememberedSets) {
2982 log_info(gc, verify)("[Verifying RemSets before GC]");
2983 VerifyRegionRemSetClosure v_cl;
2984 heap_region_iterate(&v_cl);
2985 }
2986
2987 _verifier->verify_before_gc();
2988
2989 _verifier->check_bitmaps("GC Start");
2990
2991 #if COMPILER2_OR_JVMCI
2992 DerivedPointerTable::clear();
2993 #endif
2994
2995 // Please see comment in g1CollectedHeap.hpp and
2996 // G1CollectedHeap::ref_processing_init() to see how
2997 // reference processing currently works in G1.
2998
2999 // Enable discovery in the STW reference processor
3000 if (g1_policy()->should_process_references()) {
3001 ref_processor_stw()->enable_discovery();
3002 } else {
3003 ref_processor_stw()->disable_discovery();
3004 }
3005
3006 {
3007 // We want to temporarily turn off discovery by the
3127 // has just got initialized after the previous CSet was freed.
3128 _cm->verify_no_cset_oops();
3129
3130 // This timing is only used by the ergonomics to handle our pause target.
3131 // It is unclear why this should not include the full pause. We will
3132 // investigate this in CR 7178365.
3133 double sample_end_time_sec = os::elapsedTime();
3134 double pause_time_ms = (sample_end_time_sec - sample_start_time_sec) * MILLIUNITS;
3135 size_t total_cards_scanned = g1_policy()->phase_times()->sum_thread_work_items(G1GCPhaseTimes::ScanRS, G1GCPhaseTimes::ScanRSScannedCards);
3136 g1_policy()->record_collection_pause_end(pause_time_ms, total_cards_scanned, heap_used_bytes_before_gc);
3137
3138 evacuation_info.set_collectionset_used_before(collection_set()->bytes_used_before());
3139 evacuation_info.set_bytes_copied(g1_policy()->bytes_copied_during_gc());
3140
3141 if (VerifyRememberedSets) {
3142 log_info(gc, verify)("[Verifying RemSets after GC]");
3143 VerifyRegionRemSetClosure v_cl;
3144 heap_region_iterate(&v_cl);
3145 }
3146
3147 _verifier->verify_after_gc();
3148 _verifier->check_bitmaps("GC End");
3149
3150 assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
3151 ref_processor_stw()->verify_no_references_recorded();
3152
3153 // CM reference discovery will be re-enabled if necessary.
3154 }
3155
3156 #ifdef TRACESPINNING
3157 ParallelTaskTerminator::print_termination_counts();
3158 #endif
3159
3160 gc_epilogue(false);
3161 }
3162
3163 // Print the remainder of the GC log output.
3164 if (evacuation_failed()) {
3165 log_info(gc)("To-space exhausted");
3166 }
3167
|
1115 void G1CollectedHeap::prepare_heap_for_full_collection() {
1116 // Make sure we'll choose a new allocation region afterwards.
1117 _allocator->release_mutator_alloc_region();
1118 _allocator->abandon_gc_alloc_regions();
1119 g1_rem_set()->cleanupHRRS();
1120
1121 // We may have added regions to the current incremental collection
1122 // set between the last GC or pause and now. We need to clear the
1123 // incremental collection set and then start rebuilding it afresh
1124 // after this full GC.
1125 abandon_collection_set(collection_set());
1126
1127 tear_down_region_sets(false /* free_list_only */);
1128 collector_state()->set_gcs_are_young(true);
1129 }
1130
1131 void G1CollectedHeap::verify_before_full_collection(bool explicit_gc) {
1132 assert(!GCCause::is_user_requested_gc(gc_cause()) || explicit_gc, "invariant");
1133 assert(used() == recalculate_used(), "Should be equal");
1134 _verifier->verify_region_sets_optional();
1135 _verifier->verify_before_gc(G1HeapVerifier::G1VerifyFull);
1136 _verifier->check_bitmaps("Full GC Start");
1137 }
1138
1139 void G1CollectedHeap::prepare_heap_for_mutators() {
1140 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1141 ClassLoaderDataGraph::purge();
1142 MetaspaceAux::verify_metrics();
1143
1144 // Prepare heap for normal collections.
1145 assert(num_free_regions() == 0, "we should not have added any free regions");
1146 rebuild_region_sets(false /* free_list_only */);
1147 abort_refinement();
1148 resize_if_necessary_after_full_collection();
1149
1150 // Rebuild the strong code root lists for each region
1151 rebuild_strong_code_roots();
1152
1153 // Start a new incremental collection set for the next pause
1154 start_new_collection_set();
1155
1156 _allocator->init_mutator_alloc_region();
1157
1158 // Post collection state updates.
1159 MetaspaceGC::compute_new_size();
1160 }
1161
1162 void G1CollectedHeap::abort_refinement() {
1163 if (_hot_card_cache->use_cache()) {
1164 _hot_card_cache->reset_hot_cache();
1165 }
1166
1167 // Discard all remembered set updates.
1168 JavaThread::dirty_card_queue_set().abandon_logs();
1169 assert(dirty_card_queue_set().completed_buffers_num() == 0, "DCQS should be empty");
1170 }
1171
1172 void G1CollectedHeap::verify_after_full_collection() {
1173 check_gc_time_stamps();
1174 _hrm.verify_optional();
1175 _verifier->verify_region_sets_optional();
1176 _verifier->verify_after_gc(G1HeapVerifier::G1VerifyFull);
1177 // Clear the previous marking bitmap, if needed for bitmap verification.
1178 // Note we cannot do this when we clear the next marking bitmap in
1179 // G1ConcurrentMark::abort() above since VerifyDuringGC verifies the
1180 // objects marked during a full GC against the previous bitmap.
1181 // But we need to clear it before calling check_bitmaps below since
1182 // the full GC has compacted objects and updated TAMS but not updated
1183 // the prev bitmap.
1184 if (G1VerifyBitmaps) {
1185 GCTraceTime(Debug, gc)("Clear Bitmap for Verification");
1186 _cm->clear_prev_bitmap(workers());
1187 }
1188 _verifier->check_bitmaps("Full GC End");
1189
1190 // At this point there should be no regions in the
1191 // entire heap tagged as young.
1192 assert(check_young_list_empty(), "young list should be empty at this point");
1193
1194 // Note: since we've just done a full GC, concurrent
1195 // marking is no longer active. Therefore we need not
1196 // re-enable reference discovery for the CM ref processor.
2967 }
2968
2969 G1HeapTransition heap_transition(this);
2970 size_t heap_used_bytes_before_gc = used();
2971
2972 // Don't dynamically change the number of GC threads this early. A value of
2973 // 0 is used to indicate serial work. When parallel work is done,
2974 // it will be set.
2975
2976 { // Call to jvmpi::post_class_unload_events must occur outside of active GC
2977 IsGCActiveMark x;
2978
2979 gc_prologue(false);
2980
2981 if (VerifyRememberedSets) {
2982 log_info(gc, verify)("[Verifying RemSets before GC]");
2983 VerifyRegionRemSetClosure v_cl;
2984 heap_region_iterate(&v_cl);
2985 }
2986
2987 _verifier->verify_before_gc(collector_state()->yc_type() == Mixed ? G1HeapVerifier::G1VerifyMixed : G1HeapVerifier::G1VerifyYoung);
2988
2989 _verifier->check_bitmaps("GC Start");
2990
2991 #if COMPILER2_OR_JVMCI
2992 DerivedPointerTable::clear();
2993 #endif
2994
2995 // Please see comment in g1CollectedHeap.hpp and
2996 // G1CollectedHeap::ref_processing_init() to see how
2997 // reference processing currently works in G1.
2998
2999 // Enable discovery in the STW reference processor
3000 if (g1_policy()->should_process_references()) {
3001 ref_processor_stw()->enable_discovery();
3002 } else {
3003 ref_processor_stw()->disable_discovery();
3004 }
3005
3006 {
3007 // We want to temporarily turn off discovery by the
3127 // has just got initialized after the previous CSet was freed.
3128 _cm->verify_no_cset_oops();
3129
3130 // This timing is only used by the ergonomics to handle our pause target.
3131 // It is unclear why this should not include the full pause. We will
3132 // investigate this in CR 7178365.
3133 double sample_end_time_sec = os::elapsedTime();
3134 double pause_time_ms = (sample_end_time_sec - sample_start_time_sec) * MILLIUNITS;
3135 size_t total_cards_scanned = g1_policy()->phase_times()->sum_thread_work_items(G1GCPhaseTimes::ScanRS, G1GCPhaseTimes::ScanRSScannedCards);
3136 g1_policy()->record_collection_pause_end(pause_time_ms, total_cards_scanned, heap_used_bytes_before_gc);
3137
3138 evacuation_info.set_collectionset_used_before(collection_set()->bytes_used_before());
3139 evacuation_info.set_bytes_copied(g1_policy()->bytes_copied_during_gc());
3140
3141 if (VerifyRememberedSets) {
3142 log_info(gc, verify)("[Verifying RemSets after GC]");
3143 VerifyRegionRemSetClosure v_cl;
3144 heap_region_iterate(&v_cl);
3145 }
3146
3147 _verifier->verify_after_gc(collector_state()->yc_type() == Mixed ? G1HeapVerifier::G1VerifyMixed : G1HeapVerifier::G1VerifyYoung);
3148 _verifier->check_bitmaps("GC End");
3149
3150 assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
3151 ref_processor_stw()->verify_no_references_recorded();
3152
3153 // CM reference discovery will be re-enabled if necessary.
3154 }
3155
3156 #ifdef TRACESPINNING
3157 ParallelTaskTerminator::print_termination_counts();
3158 #endif
3159
3160 gc_epilogue(false);
3161 }
3162
3163 // Print the remainder of the GC log output.
3164 if (evacuation_failed()) {
3165 log_info(gc)("To-space exhausted");
3166 }
3167
|