< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.cpp

Print this page
rev 54012 : 8215221: Serial GC misreports young GC time
Reviewed-by: kbarrett, manc


 540 void GenCollectedHeap::do_collection(bool           full,
 541                                      bool           clear_all_soft_refs,
 542                                      size_t         size,
 543                                      bool           is_tlab,
 544                                      GenerationType max_generation) {
 545   ResourceMark rm;
 546   DEBUG_ONLY(Thread* my_thread = Thread::current();)
 547 
 548   assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
 549   assert(my_thread->is_VM_thread() ||
 550          my_thread->is_ConcurrentGC_thread(),
 551          "incorrect thread type capability");
 552   assert(Heap_lock->is_locked(),
 553          "the requesting thread should have the Heap_lock");
 554   guarantee(!is_gc_active(), "collection is not reentrant");
 555 
 556   if (GCLocker::check_active_before_gc()) {
 557     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 558   }
 559 
 560   GCIdMark gc_id_mark;
 561 
 562   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 563                           soft_ref_policy()->should_clear_all_soft_refs();
 564 
 565   ClearedAllSoftRefs casr(do_clear_all_soft_refs, soft_ref_policy());
 566 
 567   const size_t metadata_prev_used = MetaspaceUtils::used_bytes();
 568 
 569   print_heap_before_gc();
 570 
 571   {
 572     FlagSetting fl(_is_gc_active, true);
 573 
 574     bool complete = full && (max_generation == OldGen);
 575     bool old_collects_young = complete && !ScavengeBeforeFullGC;
 576     bool do_young_collection = !old_collects_young && _young_gen->should_collect(full, size, is_tlab);
 577 
 578     FormatBuffer<> gc_string("%s", "Pause ");
 579     if (do_young_collection) {
 580       gc_string.append("Young");
 581     } else {
 582       gc_string.append("Full");
 583     }
 584 
 585     GCTraceCPUTime tcpu;
 586     GCTraceTime(Info, gc) t(gc_string, NULL, gc_cause(), true);
 587 
 588     gc_prologue(complete);
 589     increment_total_collections(complete);
 590 
 591     size_t young_prev_used = _young_gen->used();
 592     size_t old_prev_used = _old_gen->used();
 593 
 594     bool run_verification = total_collections() >= VerifyGCStartAt;
 595 
 596     bool prepared_for_verification = false;
 597     bool collected_old = false;
 598 
 599     if (do_young_collection) {






 600       if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 601         prepare_for_verify();
 602         prepared_for_verification = true;
 603       }
 604 



 605       collect_generation(_young_gen,
 606                          full,
 607                          size,
 608                          is_tlab,
 609                          run_verification && VerifyGCLevel <= 0,
 610                          do_clear_all_soft_refs,
 611                          false);
 612 
 613       if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 614           size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 615         // Allocation request was met by young GC.
 616         size = 0;
 617       }















 618     }
 619 
 620     bool must_restore_marks_for_biased_locking = false;
 621 
 622     if (max_generation == OldGen && _old_gen->should_collect(full, size, is_tlab)) {
 623       if (!complete) {
 624         // The full_collections increment was missed above.
 625         increment_total_full_collections();
 626       }
 627 







 628       if (!prepared_for_verification && run_verification &&
 629           VerifyGCLevel <= 1 && VerifyBeforeGC) {
 630         prepare_for_verify();
 631       }
 632 
 633       if (do_young_collection) {
 634         // We did a young GC. Need a new GC id for the old GC.
 635         GCIdMark gc_id_mark;
 636         GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause(), true);
 637         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 638       } else {
 639         // No young GC done. Use the same GC id as was set up earlier in this method.
 640         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 641       }
 642 
 643       must_restore_marks_for_biased_locking = true;
 644       collected_old = true;



 645     }
 646 
 647     // Update "complete" boolean wrt what actually transpired --
 648     // for instance, a promotion failure could have led to
 649     // a whole heap collection.
 650     complete = complete || collected_old;



 651 
 652     // Adjust generation sizes.
 653     if (collected_old) {
 654       _old_gen->compute_new_size();
 655     }
 656     _young_gen->compute_new_size();
 657 
 658     if (complete) {
 659       // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 660       ClassLoaderDataGraph::purge();
 661       MetaspaceUtils::verify_metrics();
 662       // Resize the metaspace capacity after full collections
 663       MetaspaceGC::compute_new_size();
 664       update_full_collections_completed();
 665     }
 666 
 667     print_heap_change(young_prev_used, old_prev_used);
 668     MetaspaceUtils::print_metaspace_change(metadata_prev_used);
 669 
 670     // Track memory usage and detect low memory after GC finishes
 671     MemoryService::track_memory_usage();
 672 
 673     gc_epilogue(complete);
 674 
 675     if (must_restore_marks_for_biased_locking) {
 676       BiasedLocking::restore_marks();
 677     }
 678   }
 679 
 680   print_heap_after_gc();

 681 
 682 #ifdef TRACESPINNING
 683   ParallelTaskTerminator::print_termination_counts();
 684 #endif





 685 }
 686 
 687 void GenCollectedHeap::register_nmethod(nmethod* nm) {
 688   CodeCache::register_scavenge_root_nmethod(nm);
 689 }
 690 
 691 void GenCollectedHeap::verify_nmethod(nmethod* nm) {
 692   CodeCache::verify_scavenge_root_nmethod(nm);
 693 }
 694 
 695 HeapWord* GenCollectedHeap::satisfy_failed_allocation(size_t size, bool is_tlab) {
 696   GCCauseSetter x(this, GCCause::_allocation_failure);
 697   HeapWord* result = NULL;
 698 
 699   assert(size != 0, "Precondition violated");
 700   if (GCLocker::is_active_and_needs_gc()) {
 701     // GC locker is active; instead of a collection we will attempt
 702     // to expand the heap, if there's room for expansion.
 703     if (!is_maximal_no_gc()) {
 704       result = expand_heap_and_allocate(size, is_tlab);




 540 void GenCollectedHeap::do_collection(bool           full,
 541                                      bool           clear_all_soft_refs,
 542                                      size_t         size,
 543                                      bool           is_tlab,
 544                                      GenerationType max_generation) {
 545   ResourceMark rm;
 546   DEBUG_ONLY(Thread* my_thread = Thread::current();)
 547 
 548   assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
 549   assert(my_thread->is_VM_thread() ||
 550          my_thread->is_ConcurrentGC_thread(),
 551          "incorrect thread type capability");
 552   assert(Heap_lock->is_locked(),
 553          "the requesting thread should have the Heap_lock");
 554   guarantee(!is_gc_active(), "collection is not reentrant");
 555 
 556   if (GCLocker::check_active_before_gc()) {
 557     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 558   }
 559 


 560   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 561                           soft_ref_policy()->should_clear_all_soft_refs();
 562 
 563   ClearedAllSoftRefs casr(do_clear_all_soft_refs, soft_ref_policy());
 564 
 565   const size_t metadata_prev_used = MetaspaceUtils::used_bytes();
 566 

 567 

 568   FlagSetting fl(_is_gc_active, true);
 569 
 570   bool complete = full && (max_generation == OldGen);
 571   bool old_collects_young = complete && !ScavengeBeforeFullGC;
 572   bool do_young_collection = !old_collects_young && _young_gen->should_collect(full, size, is_tlab);
 573 













 574   size_t young_prev_used = _young_gen->used();
 575   size_t old_prev_used = _old_gen->used();
 576 
 577   bool run_verification = total_collections() >= VerifyGCStartAt;

 578   bool prepared_for_verification = false;
 579   bool do_full_collection = false;
 580 
 581   if (do_young_collection) {
 582     GCIdMark gc_id_mark;
 583     GCTraceCPUTime tcpu;
 584     GCTraceTime(Info, gc) t("Pause Young", NULL, gc_cause(), true);
 585 
 586     print_heap_before_gc();
 587 
 588     if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 589       prepare_for_verify();
 590       prepared_for_verification = true;
 591     }
 592 
 593     gc_prologue(complete);
 594     increment_total_collections(complete);
 595 
 596     collect_generation(_young_gen,
 597                        full,
 598                        size,
 599                        is_tlab,
 600                        run_verification && VerifyGCLevel <= 0,
 601                        do_clear_all_soft_refs,
 602                        false);
 603 
 604     if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 605         size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 606       // Allocation request was met by young GC.
 607       size = 0;
 608     }
 609 
 610     // Ask if young collection is enough. If so, do the final steps for young collection,
 611     // and fallthrough to the end.
 612     do_full_collection = should_do_full_collection(size, full, is_tlab, max_generation);
 613     if (!do_full_collection) {
 614       // Adjust generation sizes.
 615       _young_gen->compute_new_size();
 616 
 617       print_heap_change(young_prev_used, old_prev_used);
 618       MetaspaceUtils::print_metaspace_change(metadata_prev_used);
 619 
 620       // Track memory usage and detect low memory after GC finishes
 621       MemoryService::track_memory_usage();
 622 
 623       gc_epilogue(complete);
 624     }
 625 
 626     print_heap_after_gc();
 627 
 628   } else {
 629     // No young collection, ask if we need to perform Full collection.
 630     do_full_collection = should_do_full_collection(size, full, is_tlab, max_generation);

 631   }
 632 
 633   if (do_full_collection) {
 634     GCIdMark gc_id_mark;
 635     GCTraceCPUTime tcpu;
 636     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause(), true);
 637 
 638     print_heap_before_gc();
 639 
 640     if (!prepared_for_verification && run_verification &&
 641         VerifyGCLevel <= 1 && VerifyBeforeGC) {
 642       prepare_for_verify();
 643     }
 644 
 645     if (!do_young_collection) {
 646       gc_prologue(complete);
 647       increment_total_collections(complete);





 648     }
 649 
 650     // Accounting quirk: total full collections would be incremented when "complete"
 651     // is set, by calling increment_total_collections above. However, we also need to
 652     // account Full collections that had "complete" unset.
 653     if (!complete) {
 654       increment_total_full_collections();
 655     }
 656 
 657     collect_generation(_old_gen,
 658                        full,
 659                        size,
 660                        is_tlab,
 661                        run_verification && VerifyGCLevel <= 1,
 662                        do_clear_all_soft_refs,
 663                        true);
 664 
 665     // Adjust generation sizes.

 666     _old_gen->compute_new_size();

 667     _young_gen->compute_new_size();
 668 

 669     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 670     ClassLoaderDataGraph::purge();
 671     MetaspaceUtils::verify_metrics();
 672     // Resize the metaspace capacity after full collections
 673     MetaspaceGC::compute_new_size();
 674     update_full_collections_completed();

 675 
 676     print_heap_change(young_prev_used, old_prev_used);
 677     MetaspaceUtils::print_metaspace_change(metadata_prev_used);
 678 
 679     // Track memory usage and detect low memory after GC finishes
 680     MemoryService::track_memory_usage();
 681 
 682     gc_epilogue(complete);
 683 

 684     BiasedLocking::restore_marks();


 685 
 686     print_heap_after_gc();
 687   }
 688 
 689 #ifdef TRACESPINNING
 690   ParallelTaskTerminator::print_termination_counts();
 691 #endif
 692 }
 693 
 694 bool GenCollectedHeap::should_do_full_collection(size_t size, bool full, bool is_tlab,
 695                                                  GenCollectedHeap::GenerationType max_gen) const {
 696   return max_gen == OldGen && _old_gen->should_collect(full, size, is_tlab);
 697 }
 698 
 699 void GenCollectedHeap::register_nmethod(nmethod* nm) {
 700   CodeCache::register_scavenge_root_nmethod(nm);
 701 }
 702 
 703 void GenCollectedHeap::verify_nmethod(nmethod* nm) {
 704   CodeCache::verify_scavenge_root_nmethod(nm);
 705 }
 706 
 707 HeapWord* GenCollectedHeap::satisfy_failed_allocation(size_t size, bool is_tlab) {
 708   GCCauseSetter x(this, GCCause::_allocation_failure);
 709   HeapWord* result = NULL;
 710 
 711   assert(size != 0, "Precondition violated");
 712   if (GCLocker::is_active_and_needs_gc()) {
 713     // GC locker is active; instead of a collection we will attempt
 714     // to expand the heap, if there's room for expansion.
 715     if (!is_maximal_no_gc()) {
 716       result = expand_heap_and_allocate(size, is_tlab);


< prev index next >