< prev index next >

src/share/vm/gc/parallel/psMarkSweep.cpp

Print this page
rev 13071 : [mq]: webrev.1


 236     ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
 237     MemRegion old_mr = heap->old_gen()->reserved();
 238     if (young_gen_empty) {
 239       modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 240     } else {
 241       modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 242     }
 243 
 244     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 245     ClassLoaderDataGraph::purge();
 246     MetaspaceAux::verify_metrics();
 247 
 248     BiasedLocking::restore_marks();
 249     CodeCache::gc_epilogue();
 250     JvmtiExport::gc_epilogue();
 251 
 252 #if defined(COMPILER2) || INCLUDE_JVMCI
 253     DerivedPointerTable::update_pointers();
 254 #endif
 255 
 256     ref_processor()->enqueue_discovered_references(NULL);
 257 
 258     // Update time of last GC
 259     reset_millis_since_last_gc();
 260 
 261     // Let the size policy know we're done
 262     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 263 
 264     if (UseAdaptiveSizePolicy) {
 265 
 266      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 267      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 268                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 269 
 270       // Don't check if the size_policy is ready here.  Let
 271       // the size_policy check that internally.
 272       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
 273           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
 274         // Swap the survivor spaces if from_space is empty. The
 275         // resize_young_gen() called below is normally used after
 276         // a successful young GC and swapping of survivor spaces;




 236     ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
 237     MemRegion old_mr = heap->old_gen()->reserved();
 238     if (young_gen_empty) {
 239       modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 240     } else {
 241       modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 242     }
 243 
 244     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 245     ClassLoaderDataGraph::purge();
 246     MetaspaceAux::verify_metrics();
 247 
 248     BiasedLocking::restore_marks();
 249     CodeCache::gc_epilogue();
 250     JvmtiExport::gc_epilogue();
 251 
 252 #if defined(COMPILER2) || INCLUDE_JVMCI
 253     DerivedPointerTable::update_pointers();
 254 #endif
 255 
 256     ref_processor()->enqueue_discovered_references(NULL, _gc_timer);
 257 
 258     // Update time of last GC
 259     reset_millis_since_last_gc();
 260 
 261     // Let the size policy know we're done
 262     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 263 
 264     if (UseAdaptiveSizePolicy) {
 265 
 266      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 267      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 268                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 269 
 270       // Don't check if the size_policy is ready here.  Let
 271       // the size_policy check that internally.
 272       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
 273           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
 274         // Swap the survivor spaces if from_space is empty. The
 275         // resize_young_gen() called below is normally used after
 276         // a successful young GC and swapping of survivor spaces;


< prev index next >