240 PSCardTable* card_table = heap->card_table();
241 MemRegion old_mr = heap->old_gen()->reserved();
242 if (young_gen_empty) {
243 card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
244 } else {
245 card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
246 }
247
248 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
249 ClassLoaderDataGraph::purge();
250 MetaspaceUtils::verify_metrics();
251
252 BiasedLocking::restore_marks();
253 CodeCache::gc_epilogue();
254 JvmtiExport::gc_epilogue();
255
256 #if COMPILER2_OR_JVMCI
257 DerivedPointerTable::update_pointers();
258 #endif
259
260 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_queues());
261
262 ref_processor()->enqueue_discovered_references(NULL, &pt);
263
264 pt.print_enqueue_phase();
265
266 // Update time of last GC
267 reset_millis_since_last_gc();
268
269 // Let the size policy know we're done
270 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
271
272 if (UseAdaptiveSizePolicy) {
273
274 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
275 log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
276 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
277
278 // Don't check if the size_policy is ready here. Let
279 // the size_policy check that internally.
280 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
281 AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
282 // Swap the survivor spaces if from_space is empty. The
283 // resize_young_gen() called below is normally used after
284 // a successful young GC and swapping of survivor spaces;
|
240 PSCardTable* card_table = heap->card_table();
241 MemRegion old_mr = heap->old_gen()->reserved();
242 if (young_gen_empty) {
243 card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
244 } else {
245 card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
246 }
247
248 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
249 ClassLoaderDataGraph::purge();
250 MetaspaceUtils::verify_metrics();
251
252 BiasedLocking::restore_marks();
253 CodeCache::gc_epilogue();
254 JvmtiExport::gc_epilogue();
255
256 #if COMPILER2_OR_JVMCI
257 DerivedPointerTable::update_pointers();
258 #endif
259
260 assert(!ref_processor()->discovery_enabled(), "Should have been disabled earlier");
261
262 // Update time of last GC
263 reset_millis_since_last_gc();
264
265 // Let the size policy know we're done
266 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
267
268 if (UseAdaptiveSizePolicy) {
269
270 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
271 log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
272 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
273
274 // Don't check if the size_policy is ready here. Let
275 // the size_policy check that internally.
276 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
277 AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
278 // Swap the survivor spaces if from_space is empty. The
279 // resize_young_gen() called below is normally used after
280 // a successful young GC and swapping of survivor spaces;
|