< prev index next >

src/share/vm/gc/parallel/psMarkSweep.cpp

Print this page
rev 13329 : [mq]: webrev.1
rev 13331 : imported patch webrev.3b
rev 13332 : [mq]: webrev.4


 237     ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
 238     MemRegion old_mr = heap->old_gen()->reserved();
 239     if (young_gen_empty) {
 240       modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 241     } else {
 242       modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 243     }
 244 
 245     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 246     ClassLoaderDataGraph::purge();
 247     MetaspaceAux::verify_metrics();
 248 
 249     BiasedLocking::restore_marks();
 250     CodeCache::gc_epilogue();
 251     JvmtiExport::gc_epilogue();
 252 
 253 #if defined(COMPILER2) || INCLUDE_JVMCI
 254     DerivedPointerTable::update_pointers();
 255 #endif
 256 
 257     ref_processor()->enqueue_discovered_references(NULL);




 258 
 259     // Update time of last GC
 260     reset_millis_since_last_gc();
 261 
 262     // Let the size policy know we're done
 263     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 264 
 265     if (UseAdaptiveSizePolicy) {
 266 
 267      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 268      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 269                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 270 
 271       // Don't check if the size_policy is ready here.  Let
 272       // the size_policy check that internally.
 273       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
 274           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
 275         // Swap the survivor spaces if from_space is empty. The
 276         // resize_young_gen() called below is normally used after
 277         // a successful young GC and swapping of survivor spaces;


 511     Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
 512     ObjectSynchronizer::oops_do(mark_and_push_closure());
 513     FlatProfiler::oops_do(mark_and_push_closure());
 514     Management::oops_do(mark_and_push_closure());
 515     JvmtiExport::oops_do(mark_and_push_closure());
 516     SystemDictionary::always_strong_oops_do(mark_and_push_closure());
 517     ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
 518     // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
 519     //CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
 520     AOTLoader::oops_do(mark_and_push_closure());
 521   }
 522 
 523   // Flush marking stack.
 524   follow_stack();
 525 
 526   // Process reference objects found during marking
 527   {
 528     GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
 529 
 530     ref_processor()->setup_policy(clear_all_softrefs);

 531     const ReferenceProcessorStats& stats =
 532       ref_processor()->process_discovered_references(
 533         is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, _gc_timer);
 534     gc_tracer()->report_gc_reference_stats(stats);

 535   }
 536 
 537   // This is the point where the entire marking should have completed.
 538   assert(_marking_stack.is_empty(), "Marking should have completed");
 539 
 540   {
 541     GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer);
 542 
 543     // Unload classes and purge the SystemDictionary.
 544     bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), _gc_timer);
 545 
 546     // Unload nmethods.
 547     CodeCache::do_unloading(is_alive_closure(), purged_class);
 548 
 549     // Prune dead klasses from subklass/sibling/implementor lists.
 550     Klass::clean_weak_klass_links(is_alive_closure());
 551   }
 552 
 553   {
 554     GCTraceTime(Debug, gc, phases) t("Scrub String Table", _gc_timer);




 237     ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
 238     MemRegion old_mr = heap->old_gen()->reserved();
 239     if (young_gen_empty) {
 240       modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 241     } else {
 242       modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 243     }
 244 
 245     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 246     ClassLoaderDataGraph::purge();
 247     MetaspaceAux::verify_metrics();
 248 
 249     BiasedLocking::restore_marks();
 250     CodeCache::gc_epilogue();
 251     JvmtiExport::gc_epilogue();
 252 
 253 #if defined(COMPILER2) || INCLUDE_JVMCI
 254     DerivedPointerTable::update_pointers();
 255 #endif
 256 
 257     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
 258 
 259     ref_processor()->enqueue_discovered_references(NULL, &pt);
 260 
 261     pt.print_enqueue_phase();
 262 
 263     // Update time of last GC
 264     reset_millis_since_last_gc();
 265 
 266     // Let the size policy know we're done
 267     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 268 
 269     if (UseAdaptiveSizePolicy) {
 270 
 271      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 272      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 273                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 274 
 275       // Don't check if the size_policy is ready here.  Let
 276       // the size_policy check that internally.
 277       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
 278           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
 279         // Swap the survivor spaces if from_space is empty. The
 280         // resize_young_gen() called below is normally used after
 281         // a successful young GC and swapping of survivor spaces;


 515     Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
 516     ObjectSynchronizer::oops_do(mark_and_push_closure());
 517     FlatProfiler::oops_do(mark_and_push_closure());
 518     Management::oops_do(mark_and_push_closure());
 519     JvmtiExport::oops_do(mark_and_push_closure());
 520     SystemDictionary::always_strong_oops_do(mark_and_push_closure());
 521     ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
 522     // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
 523     //CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
 524     AOTLoader::oops_do(mark_and_push_closure());
 525   }
 526 
 527   // Flush marking stack.
 528   follow_stack();
 529 
 530   // Process reference objects found during marking
 531   {
 532     GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
 533 
 534     ref_processor()->setup_policy(clear_all_softrefs);
 535     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
 536     const ReferenceProcessorStats& stats =
 537       ref_processor()->process_discovered_references(
 538         is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
 539     gc_tracer()->report_gc_reference_stats(stats);
 540     pt.print_all_references();
 541   }
 542 
 543   // This is the point where the entire marking should have completed.
 544   assert(_marking_stack.is_empty(), "Marking should have completed");
 545 
 546   {
 547     GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer);
 548 
 549     // Unload classes and purge the SystemDictionary.
 550     bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), _gc_timer);
 551 
 552     // Unload nmethods.
 553     CodeCache::do_unloading(is_alive_closure(), purged_class);
 554 
 555     // Prune dead klasses from subklass/sibling/implementor lists.
 556     Klass::clean_weak_klass_links(is_alive_closure());
 557   }
 558 
 559   {
 560     GCTraceTime(Debug, gc, phases) t("Scrub String Table", _gc_timer);


< prev index next >