< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.cpp

Print this page
rev 55927 : 8227224: GenCollectedHeap: add subspace transitions for young gen for gc+heap=info log lines
Reviewed-by:


 189 void GenCollectedHeap::post_initialize() {
 190   CollectedHeap::post_initialize();
 191   ref_processing_init();
 192 
 193   DefNewGeneration* def_new_gen = (DefNewGeneration*)_young_gen;
 194 
 195   initialize_size_policy(def_new_gen->eden()->capacity(),
 196                          _old_gen->capacity(),
 197                          def_new_gen->from()->capacity());
 198 
 199   MarkSweep::initialize();
 200 
 201   ScavengableNMethods::initialize(&_is_scavengable);
 202 }
 203 
 204 void GenCollectedHeap::ref_processing_init() {
 205   _young_gen->ref_processor_init();
 206   _old_gen->ref_processor_init();
 207 }
 208 













 209 GenerationSpec* GenCollectedHeap::young_gen_spec() const {
 210   return _young_gen_spec;
 211 }
 212 
 213 GenerationSpec* GenCollectedHeap::old_gen_spec() const {
 214   return _old_gen_spec;
 215 }
 216 
 217 size_t GenCollectedHeap::capacity() const {
 218   return _young_gen->capacity() + _old_gen->capacity();
 219 }
 220 
 221 size_t GenCollectedHeap::used() const {
 222   return _young_gen->used() + _old_gen->used();
 223 }
 224 
 225 void GenCollectedHeap::save_used_regions() {
 226   _old_gen->save_used_region();
 227   _young_gen->save_used_region();
 228 }


 564          "incorrect thread type capability");
 565   assert(Heap_lock->is_locked(),
 566          "the requesting thread should have the Heap_lock");
 567   guarantee(!is_gc_active(), "collection is not reentrant");
 568 
 569   if (GCLocker::check_active_before_gc()) {
 570     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 571   }
 572 
 573   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 574                           soft_ref_policy()->should_clear_all_soft_refs();
 575 
 576   ClearedAllSoftRefs casr(do_clear_all_soft_refs, soft_ref_policy());
 577 
 578   FlagSetting fl(_is_gc_active, true);
 579 
 580   bool complete = full && (max_generation == OldGen);
 581   bool old_collects_young = complete && !ScavengeBeforeFullGC;
 582   bool do_young_collection = !old_collects_young && _young_gen->should_collect(full, size, is_tlab);
 583 
 584   size_t young_prev_used = _young_gen->used();
 585   size_t old_prev_used = _old_gen->used();
 586   const metaspace::MetaspaceSizesSnapshot prev_meta_sizes;
 587 
 588   bool run_verification = total_collections() >= VerifyGCStartAt;
 589   bool prepared_for_verification = false;
 590   bool do_full_collection = false;
 591 
 592   if (do_young_collection) {
 593     GCIdMark gc_id_mark;
 594     GCTraceCPUTime tcpu;
 595     GCTraceTime(Info, gc) t("Pause Young", NULL, gc_cause(), true);
 596 
 597     print_heap_before_gc();
 598 
 599     if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 600       prepare_for_verify();
 601       prepared_for_verification = true;
 602     }
 603 
 604     gc_prologue(complete);
 605     increment_total_collections(complete);
 606 


 608                        full,
 609                        size,
 610                        is_tlab,
 611                        run_verification && VerifyGCLevel <= 0,
 612                        do_clear_all_soft_refs,
 613                        false);
 614 
 615     if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 616         size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 617       // Allocation request was met by young GC.
 618       size = 0;
 619     }
 620 
 621     // Ask if young collection is enough. If so, do the final steps for young collection,
 622     // and fallthrough to the end.
 623     do_full_collection = should_do_full_collection(size, full, is_tlab, max_generation);
 624     if (!do_full_collection) {
 625       // Adjust generation sizes.
 626       _young_gen->compute_new_size();
 627 
 628       print_heap_change(young_prev_used, old_prev_used);
 629       MetaspaceUtils::print_metaspace_change(prev_meta_sizes);
 630 
 631       // Track memory usage and detect low memory after GC finishes
 632       MemoryService::track_memory_usage();
 633 
 634       gc_epilogue(complete);
 635     }
 636 
 637     print_heap_after_gc();
 638 
 639   } else {
 640     // No young collection, ask if we need to perform Full collection.
 641     do_full_collection = should_do_full_collection(size, full, is_tlab, max_generation);
 642   }
 643 
 644   if (do_full_collection) {
 645     GCIdMark gc_id_mark;
 646     GCTraceCPUTime tcpu;
 647     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause(), true);
 648 
 649     print_heap_before_gc();


 667 
 668     collect_generation(_old_gen,
 669                        full,
 670                        size,
 671                        is_tlab,
 672                        run_verification && VerifyGCLevel <= 1,
 673                        do_clear_all_soft_refs,
 674                        true);
 675 
 676     // Adjust generation sizes.
 677     _old_gen->compute_new_size();
 678     _young_gen->compute_new_size();
 679 
 680     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 681     ClassLoaderDataGraph::purge();
 682     MetaspaceUtils::verify_metrics();
 683     // Resize the metaspace capacity after full collections
 684     MetaspaceGC::compute_new_size();
 685     update_full_collections_completed();
 686 
 687     print_heap_change(young_prev_used, old_prev_used);
 688     MetaspaceUtils::print_metaspace_change(prev_meta_sizes);
 689 
 690     // Track memory usage and detect low memory after GC finishes
 691     MemoryService::track_memory_usage();
 692 
 693     // Need to tell the epilogue code we are done with Full GC, regardless what was
 694     // the initial value for "complete" flag.
 695     gc_epilogue(true);
 696 
 697     BiasedLocking::restore_marks();
 698 
 699     print_heap_after_gc();
 700   }
 701 
 702 #ifdef TRACESPINNING
 703   ParallelTaskTerminator::print_termination_counts();
 704 #endif
 705 }
 706 
 707 bool GenCollectedHeap::should_do_full_collection(size_t size, bool full, bool is_tlab,
 708                                                  GenCollectedHeap::GenerationType max_gen) const {


1251 void GenCollectedHeap::print_on(outputStream* st) const {
1252   _young_gen->print_on(st);
1253   _old_gen->print_on(st);
1254   MetaspaceUtils::print_on(st);
1255 }
1256 
1257 void GenCollectedHeap::gc_threads_do(ThreadClosure* tc) const {
1258 }
1259 
1260 void GenCollectedHeap::print_gc_threads_on(outputStream* st) const {
1261 }
1262 
1263 void GenCollectedHeap::print_tracing_info() const {
1264   if (log_is_enabled(Debug, gc, heap, exit)) {
1265     LogStreamHandle(Debug, gc, heap, exit) lsh;
1266     _young_gen->print_summary_info_on(&lsh);
1267     _old_gen->print_summary_info_on(&lsh);
1268   }
1269 }
1270 
1271 void GenCollectedHeap::print_heap_change(size_t young_prev_used, size_t old_prev_used) const {
1272   log_info(gc, heap)("%s: " SIZE_FORMAT "K->" SIZE_FORMAT "K("  SIZE_FORMAT "K)",
1273                      _young_gen->short_name(), young_prev_used / K, _young_gen->used() /K, _young_gen->capacity() /K);
1274   log_info(gc, heap)("%s: " SIZE_FORMAT "K->" SIZE_FORMAT "K("  SIZE_FORMAT "K)",
1275                      _old_gen->short_name(), old_prev_used / K, _old_gen->used() /K, _old_gen->capacity() /K);























1276 }
1277 
1278 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1279  private:
1280   bool _full;
1281  public:
1282   void do_generation(Generation* gen) {
1283     gen->gc_prologue(_full);
1284   }
1285   GenGCPrologueClosure(bool full) : _full(full) {};
1286 };
1287 
1288 void GenCollectedHeap::gc_prologue(bool full) {
1289   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1290 
1291   // Fill TLAB's and such
1292   ensure_parsability(true);   // retire TLABs
1293 
1294   // Walk generations
1295   GenGCPrologueClosure blk(full);




 189 void GenCollectedHeap::post_initialize() {
 190   CollectedHeap::post_initialize();
 191   ref_processing_init();
 192 
 193   DefNewGeneration* def_new_gen = (DefNewGeneration*)_young_gen;
 194 
 195   initialize_size_policy(def_new_gen->eden()->capacity(),
 196                          _old_gen->capacity(),
 197                          def_new_gen->from()->capacity());
 198 
 199   MarkSweep::initialize();
 200 
 201   ScavengableNMethods::initialize(&_is_scavengable);
 202 }
 203 
 204 void GenCollectedHeap::ref_processing_init() {
 205   _young_gen->ref_processor_init();
 206   _old_gen->ref_processor_init();
 207 }
 208 
 209 PreGenGCValues GenCollectedHeap::get_pre_gc_values() const {
 210   const DefNewGeneration* const def_new_gen = (DefNewGeneration*) young_gen();
 211 
 212   return PreGenGCValues(def_new_gen->used(),
 213                         def_new_gen->capacity(),
 214                         def_new_gen->eden()->used(),
 215                         def_new_gen->eden()->capacity(),
 216                         def_new_gen->from()->used(),
 217                         def_new_gen->from()->capacity(),
 218                         old_gen()->used(),
 219                         old_gen()->capacity());
 220 }
 221 
 222 GenerationSpec* GenCollectedHeap::young_gen_spec() const {
 223   return _young_gen_spec;
 224 }
 225 
 226 GenerationSpec* GenCollectedHeap::old_gen_spec() const {
 227   return _old_gen_spec;
 228 }
 229 
 230 size_t GenCollectedHeap::capacity() const {
 231   return _young_gen->capacity() + _old_gen->capacity();
 232 }
 233 
 234 size_t GenCollectedHeap::used() const {
 235   return _young_gen->used() + _old_gen->used();
 236 }
 237 
 238 void GenCollectedHeap::save_used_regions() {
 239   _old_gen->save_used_region();
 240   _young_gen->save_used_region();
 241 }


 577          "incorrect thread type capability");
 578   assert(Heap_lock->is_locked(),
 579          "the requesting thread should have the Heap_lock");
 580   guarantee(!is_gc_active(), "collection is not reentrant");
 581 
 582   if (GCLocker::check_active_before_gc()) {
 583     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 584   }
 585 
 586   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 587                           soft_ref_policy()->should_clear_all_soft_refs();
 588 
 589   ClearedAllSoftRefs casr(do_clear_all_soft_refs, soft_ref_policy());
 590 
 591   FlagSetting fl(_is_gc_active, true);
 592 
 593   bool complete = full && (max_generation == OldGen);
 594   bool old_collects_young = complete && !ScavengeBeforeFullGC;
 595   bool do_young_collection = !old_collects_young && _young_gen->should_collect(full, size, is_tlab);
 596 
 597   const PreGenGCValues pre_gc_values = get_pre_gc_values();


 598 
 599   bool run_verification = total_collections() >= VerifyGCStartAt;
 600   bool prepared_for_verification = false;
 601   bool do_full_collection = false;
 602 
 603   if (do_young_collection) {
 604     GCIdMark gc_id_mark;
 605     GCTraceCPUTime tcpu;
 606     GCTraceTime(Info, gc) t("Pause Young", NULL, gc_cause(), true);
 607 
 608     print_heap_before_gc();
 609 
 610     if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 611       prepare_for_verify();
 612       prepared_for_verification = true;
 613     }
 614 
 615     gc_prologue(complete);
 616     increment_total_collections(complete);
 617 


 619                        full,
 620                        size,
 621                        is_tlab,
 622                        run_verification && VerifyGCLevel <= 0,
 623                        do_clear_all_soft_refs,
 624                        false);
 625 
 626     if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 627         size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 628       // Allocation request was met by young GC.
 629       size = 0;
 630     }
 631 
 632     // Ask if young collection is enough. If so, do the final steps for young collection,
 633     // and fallthrough to the end.
 634     do_full_collection = should_do_full_collection(size, full, is_tlab, max_generation);
 635     if (!do_full_collection) {
 636       // Adjust generation sizes.
 637       _young_gen->compute_new_size();
 638 
 639       print_heap_change(pre_gc_values);

 640 
 641       // Track memory usage and detect low memory after GC finishes
 642       MemoryService::track_memory_usage();
 643 
 644       gc_epilogue(complete);
 645     }
 646 
 647     print_heap_after_gc();
 648 
 649   } else {
 650     // No young collection, ask if we need to perform Full collection.
 651     do_full_collection = should_do_full_collection(size, full, is_tlab, max_generation);
 652   }
 653 
 654   if (do_full_collection) {
 655     GCIdMark gc_id_mark;
 656     GCTraceCPUTime tcpu;
 657     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause(), true);
 658 
 659     print_heap_before_gc();


 677 
 678     collect_generation(_old_gen,
 679                        full,
 680                        size,
 681                        is_tlab,
 682                        run_verification && VerifyGCLevel <= 1,
 683                        do_clear_all_soft_refs,
 684                        true);
 685 
 686     // Adjust generation sizes.
 687     _old_gen->compute_new_size();
 688     _young_gen->compute_new_size();
 689 
 690     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 691     ClassLoaderDataGraph::purge();
 692     MetaspaceUtils::verify_metrics();
 693     // Resize the metaspace capacity after full collections
 694     MetaspaceGC::compute_new_size();
 695     update_full_collections_completed();
 696 
 697     print_heap_change(pre_gc_values);

 698 
 699     // Track memory usage and detect low memory after GC finishes
 700     MemoryService::track_memory_usage();
 701 
 702     // Need to tell the epilogue code we are done with Full GC, regardless what was
 703     // the initial value for "complete" flag.
 704     gc_epilogue(true);
 705 
 706     BiasedLocking::restore_marks();
 707 
 708     print_heap_after_gc();
 709   }
 710 
 711 #ifdef TRACESPINNING
 712   ParallelTaskTerminator::print_termination_counts();
 713 #endif
 714 }
 715 
 716 bool GenCollectedHeap::should_do_full_collection(size_t size, bool full, bool is_tlab,
 717                                                  GenCollectedHeap::GenerationType max_gen) const {


1260 void GenCollectedHeap::print_on(outputStream* st) const {
1261   _young_gen->print_on(st);
1262   _old_gen->print_on(st);
1263   MetaspaceUtils::print_on(st);
1264 }
1265 
1266 void GenCollectedHeap::gc_threads_do(ThreadClosure* tc) const {
1267 }
1268 
1269 void GenCollectedHeap::print_gc_threads_on(outputStream* st) const {
1270 }
1271 
1272 void GenCollectedHeap::print_tracing_info() const {
1273   if (log_is_enabled(Debug, gc, heap, exit)) {
1274     LogStreamHandle(Debug, gc, heap, exit) lsh;
1275     _young_gen->print_summary_info_on(&lsh);
1276     _old_gen->print_summary_info_on(&lsh);
1277   }
1278 }
1279 
1280 void GenCollectedHeap::print_heap_change(const PreGenGCValues& pre_gc_values) const {
1281   const DefNewGeneration* const def_new_gen = (DefNewGeneration*) young_gen();
1282 
1283   log_info(gc, heap)(HEAP_CHANGE_FORMAT" "
1284                      HEAP_CHANGE_FORMAT" "
1285                      HEAP_CHANGE_FORMAT,
1286                      HEAP_CHANGE_FORMAT_ARGS(def_new_gen->short_name(),
1287                                              pre_gc_values.young_gen_used(),
1288                                              pre_gc_values.young_gen_capacity(),
1289                                              def_new_gen->used(),
1290                                              def_new_gen->capacity()),
1291                      HEAP_CHANGE_FORMAT_ARGS("Eden",
1292                                              pre_gc_values.eden_used(),
1293                                              pre_gc_values.eden_capacity(),
1294                                              def_new_gen->eden()->used(),
1295                                              def_new_gen->eden()->capacity()),
1296                      HEAP_CHANGE_FORMAT_ARGS("From",
1297                                              pre_gc_values.from_used(),
1298                                              pre_gc_values.from_capacity(),
1299                                              def_new_gen->from()->used(),
1300                                              def_new_gen->from()->capacity()));
1301   log_info(gc, heap)(HEAP_CHANGE_FORMAT,
1302                      HEAP_CHANGE_FORMAT_ARGS(old_gen()->short_name(),
1303                                              pre_gc_values.old_gen_used(),
1304                                              pre_gc_values.old_gen_capacity(),
1305                                              old_gen()->used(),
1306                                              old_gen()->capacity()));
1307   MetaspaceUtils::print_metaspace_change(pre_gc_values.metaspace_sizes());
1308 }
1309 
1310 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1311  private:
1312   bool _full;
1313  public:
1314   void do_generation(Generation* gen) {
1315     gen->gc_prologue(_full);
1316   }
1317   GenGCPrologueClosure(bool full) : _full(full) {};
1318 };
1319 
1320 void GenCollectedHeap::gc_prologue(bool full) {
1321   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1322 
1323   // Fill TLAB's and such
1324   ensure_parsability(true);   // retire TLABs
1325 
1326   // Walk generations
1327   GenGCPrologueClosure blk(full);


< prev index next >