< prev index next >

src/share/vm/gc/shared/genCollectedHeap.cpp

Print this page




 297 bool GenCollectedHeap::must_clear_all_soft_refs() {
 298   return _gc_cause == GCCause::_last_ditch_collection;
 299 }
 300 
 301 bool GenCollectedHeap::should_do_concurrent_full_gc(GCCause::Cause cause) {
 302   if (!UseConcMarkSweepGC) {
 303     return false;
 304   }
 305 
 306   switch (cause) {
 307     case GCCause::_gc_locker:           return GCLockerInvokesConcurrent;
 308     case GCCause::_java_lang_system_gc:
 309     case GCCause::_dcmd_gc_run:         return ExplicitGCInvokesConcurrent;
 310     default:                            return false;
 311   }
 312 }
 313 
 314 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 315                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 316                                           bool restore_marks_for_biased_locking) {
 317   // Timer for individual generations. Last argument is false: no CR
 318   // FIXME: We should try to start the timing earlier to cover more of the GC pause
 319   GCTraceTime t1(gen->short_name(), PrintGCDetails, false, NULL);
 320   TraceCollectorStats tcs(gen->counters());
 321   TraceMemoryManagerStats tmms(gen->kind(),gc_cause());
 322 
 323   size_t prev_used = gen->used();
 324   gen->stat_record()->invocations++;
 325   gen->stat_record()->accumulated_time.start();
 326 
 327   // Must be done anew before each collection because
 328   // a previous collection will do mangling and will
 329   // change top of some spaces.
 330   record_gen_tops_before_GC();
 331 
 332   if (PrintGC && Verbose) {
 333     // I didn't want to change the logging when removing the level concept,
 334     // but I guess this logging could say young/old or something instead of 0/1.
 335     uint level;
 336     if (heap()->is_young_gen(gen)) {
 337       level = 0;
 338     } else {
 339       level = 1;
 340     }
 341     gclog_or_tty->print("level=%u invoke=%d size=" SIZE_FORMAT,
 342                         level,
 343                         gen->stat_record()->invocations,
 344                         size * HeapWordSize);
 345   }
 346 
 347   if (run_verification && VerifyBeforeGC) {
 348     HandleMark hm;  // Discard invalid handles created during verification
 349     Universe::verify(" VerifyBeforeGC:");
 350   }
 351   COMPILER2_PRESENT(DerivedPointerTable::clear());
 352 
 353   if (restore_marks_for_biased_locking) {
 354     // We perform this mark word preservation work lazily
 355     // because it's only at this point that we know whether we
 356     // absolutely have to do it; we want to avoid doing it for
 357     // scavenge-only collections where it's unnecessary
 358     BiasedLocking::preserve_marks();
 359   }
 360 
 361   // Do collection work
 362   {
 363     // Note on ref discovery: For what appear to be historical reasons,
 364     // GCH enables and disabled (by enqueing) refs discovery.
 365     // In the future this should be moved into the generation's
 366     // collect method so that ref discovery and enqueueing concerns
 367     // are local to a generation. The collect method could return
 368     // an appropriate indication in the case that notification on
 369     // the ref lock was needed. This will make the treatment of


 387     } else {
 388       // collect() below will enable discovery as appropriate
 389     }
 390     gen->collect(full, clear_soft_refs, size, is_tlab);
 391     if (!rp->enqueuing_is_done()) {
 392       rp->enqueue_discovered_references();
 393     } else {
 394       rp->set_enqueuing_is_done(false);
 395     }
 396     rp->verify_no_references_recorded();
 397   }
 398 
 399   COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 400 
 401   gen->stat_record()->accumulated_time.stop();
 402 
 403   update_gc_stats(gen, full);
 404 
 405   if (run_verification && VerifyAfterGC) {
 406     HandleMark hm;  // Discard invalid handles created during verification
 407     Universe::verify(" VerifyAfterGC:");
 408   }
 409 
 410   if (PrintGCDetails) {
 411     gclog_or_tty->print(":");
 412     gen->print_heap_change(prev_used);
 413   }
 414 }
 415 
 416 void GenCollectedHeap::do_collection(bool           full,
 417                                      bool           clear_all_soft_refs,
 418                                      size_t         size,
 419                                      bool           is_tlab,
 420                                      GenerationType max_generation) {
 421   ResourceMark rm;
 422   DEBUG_ONLY(Thread* my_thread = Thread::current();)
 423 
 424   assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
 425   assert(my_thread->is_VM_thread() ||
 426          my_thread->is_ConcurrentGC_thread(),
 427          "incorrect thread type capability");
 428   assert(Heap_lock->is_locked(),
 429          "the requesting thread should have the Heap_lock");
 430   guarantee(!is_gc_active(), "collection is not reentrant");
 431 
 432   if (GC_locker::check_active_before_gc()) {
 433     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 434   }
 435 
 436   GCIdMarkAndRestore gc_id_mark;
 437 
 438   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 439                           collector_policy()->should_clear_all_soft_refs();
 440 
 441   ClearedAllSoftRefs casr(do_clear_all_soft_refs, collector_policy());
 442 
 443   const size_t metadata_prev_used = MetaspaceAux::used_bytes();
 444 
 445   print_heap_before_gc();
 446 
 447   {
 448     FlagSetting fl(_is_gc_active, true);
 449 
 450     bool complete = full && (max_generation == OldGen);
 451     const char* gc_cause_prefix = complete ? "Full GC" : "GC";
 452     TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
 453     GCTraceTime t(GCCauseString(gc_cause_prefix, gc_cause()), PrintGCDetails, false, NULL);









 454 
 455     gc_prologue(complete);
 456     increment_total_collections(complete);
 457 
 458     size_t gch_prev_used = used();


 459     bool run_verification = total_collections() >= VerifyGCStartAt;
 460 
 461     bool prepared_for_verification = false;
 462     bool collected_old = false;
 463     bool old_collects_young = complete && !ScavengeBeforeFullGC;
 464 
 465     if (!old_collects_young && _young_gen->should_collect(full, size, is_tlab)) {
 466       if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 467         prepare_for_verify();
 468         prepared_for_verification = true;
 469       }
 470 
 471       assert(!_young_gen->performs_in_place_marking(), "No young generation do in place marking");
 472       collect_generation(_young_gen,
 473                          full,
 474                          size,
 475                          is_tlab,
 476                          run_verification && VerifyGCLevel <= 0,
 477                          do_clear_all_soft_refs,
 478                          false);
 479 
 480       if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 481           size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 482         // Allocation request was met by young GC.
 483         size = 0;
 484       }
 485     }
 486 
 487     bool must_restore_marks_for_biased_locking = false;
 488 
 489     if (max_generation == OldGen && _old_gen->should_collect(full, size, is_tlab)) {
 490       GCIdMarkAndRestore gc_id_mark;
 491       if (!complete) {
 492         // The full_collections increment was missed above.
 493         increment_total_full_collections();
 494       }
 495 
 496       pre_full_gc_dump(NULL);    // do any pre full gc dumps
 497 
 498       if (!prepared_for_verification && run_verification &&
 499           VerifyGCLevel <= 1 && VerifyBeforeGC) {
 500         prepare_for_verify();
 501       }
 502 
 503       assert(_old_gen->performs_in_place_marking(), "All old generations do in place marking");
 504       collect_generation(_old_gen,
 505                          full,
 506                          size,
 507                          is_tlab,
 508                          run_verification && VerifyGCLevel <= 1,
 509                          do_clear_all_soft_refs,
 510                          true);


 511 
 512       must_restore_marks_for_biased_locking = true;
 513       collected_old = true;
 514     }
 515 
 516     // Update "complete" boolean wrt what actually transpired --
 517     // for instance, a promotion failure could have led to
 518     // a whole heap collection.
 519     complete = complete || collected_old;
 520 
 521     if (complete) { // We did a full collection
 522       // FIXME: See comment at pre_full_gc_dump call
 523       post_full_gc_dump(NULL);   // do any post full gc dumps
 524     }
 525 
 526     if (PrintGCDetails) {
 527       print_heap_change(gch_prev_used);
 528 
 529       // Print metaspace info for full GC with PrintGCDetails flag.
 530       if (complete) {
 531         MetaspaceAux::print_metaspace_change(metadata_prev_used);
 532       }
 533     }
 534 
 535     // Adjust generation sizes.
 536     if (collected_old) {
 537       _old_gen->compute_new_size();
 538     }
 539     _young_gen->compute_new_size();
 540 
 541     if (complete) {
 542       // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 543       ClassLoaderDataGraph::purge();
 544       MetaspaceAux::verify_metrics();
 545       // Resize the metaspace capacity after full collections
 546       MetaspaceGC::compute_new_size();
 547       update_full_collections_completed();
 548     }
 549 
 550     // Track memory usage and detect low memory after GC finishes
 551     MemoryService::track_memory_usage();
 552 
 553     gc_epilogue(complete);


 857 void GenCollectedHeap::do_full_collection(bool clear_all_soft_refs,
 858                                           GenerationType last_generation) {
 859   GenerationType local_last_generation;
 860   if (!incremental_collection_will_fail(false /* don't consult_young */) &&
 861       gc_cause() == GCCause::_gc_locker) {
 862     local_last_generation = YoungGen;
 863   } else {
 864     local_last_generation = last_generation;
 865   }
 866 
 867   do_collection(true,                   // full
 868                 clear_all_soft_refs,    // clear_all_soft_refs
 869                 0,                      // size
 870                 false,                  // is_tlab
 871                 local_last_generation); // last_generation
 872   // Hack XXX FIX ME !!!
 873   // A scavenge may not have been attempted, or may have
 874   // been attempted and failed, because the old gen was too full
 875   if (local_last_generation == YoungGen && gc_cause() == GCCause::_gc_locker &&
 876       incremental_collection_will_fail(false /* don't consult_young */)) {
 877     if (PrintGCDetails) {
 878       gclog_or_tty->print_cr("GC locker: Trying a full collection "
 879                              "because scavenge failed");
 880     }
 881     // This time allow the old gen to be collected as well
 882     do_collection(true,                // full
 883                   clear_all_soft_refs, // clear_all_soft_refs
 884                   0,                   // size
 885                   false,               // is_tlab
 886                   OldGen);             // last_generation
 887   }
 888 }
 889 
 890 bool GenCollectedHeap::is_in_young(oop p) {
 891   bool result = ((HeapWord*)p) < _old_gen->reserved().start();
 892   assert(result == _young_gen->is_in_reserved(p),
 893          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 894   return result;
 895 }
 896 
 897 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 898 bool GenCollectedHeap::is_in(const void* p) const {
 899   return _young_gen->is_in(p) || _old_gen->is_in(p);
 900 }


1089 
1090 void GenCollectedHeap::save_marks() {
1091   _young_gen->save_marks();
1092   _old_gen->save_marks();
1093 }
1094 
1095 GenCollectedHeap* GenCollectedHeap::heap() {
1096   CollectedHeap* heap = Universe::heap();
1097   assert(heap != NULL, "Uninitialized access to GenCollectedHeap::heap()");
1098   assert(heap->kind() == CollectedHeap::GenCollectedHeap, "Not a GenCollectedHeap");
1099   return (GenCollectedHeap*)heap;
1100 }
1101 
1102 void GenCollectedHeap::prepare_for_compaction() {
1103   // Start by compacting into same gen.
1104   CompactPoint cp(_old_gen);
1105   _old_gen->prepare_for_compaction(&cp);
1106   _young_gen->prepare_for_compaction(&cp);
1107 }
1108 
1109 void GenCollectedHeap::verify(bool silent, VerifyOption option /* ignored */) {
1110   if (!silent) {
1111     gclog_or_tty->print("%s", _old_gen->name());
1112     gclog_or_tty->print(" ");
1113   }
1114   _old_gen->verify();
1115 
1116   if (!silent) {
1117     gclog_or_tty->print("%s", _young_gen->name());
1118     gclog_or_tty->print(" ");
1119   }
1120   _young_gen->verify();
1121 
1122   if (!silent) {
1123     gclog_or_tty->print("remset ");
1124   }
1125   rem_set()->verify();
1126 }
1127 
1128 void GenCollectedHeap::print_on(outputStream* st) const {
1129   _young_gen->print_on(st);
1130   _old_gen->print_on(st);
1131   MetaspaceAux::print_on(st);
1132 }
1133 
1134 void GenCollectedHeap::gc_threads_do(ThreadClosure* tc) const {
1135   if (workers() != NULL) {
1136     workers()->threads_do(tc);
1137   }
1138 #if INCLUDE_ALL_GCS
1139   if (UseConcMarkSweepGC) {
1140     ConcurrentMarkSweepThread::threads_do(tc);
1141   }
1142 #endif // INCLUDE_ALL_GCS
1143 }
1144 


1154 void GenCollectedHeap::print_on_error(outputStream* st) const {
1155   this->CollectedHeap::print_on_error(st);
1156 
1157 #if INCLUDE_ALL_GCS
1158   if (UseConcMarkSweepGC) {
1159     st->cr();
1160     CMSCollector::print_on_error(st);
1161   }
1162 #endif // INCLUDE_ALL_GCS
1163 }
1164 
1165 void GenCollectedHeap::print_tracing_info() const {
1166   if (TraceYoungGenTime) {
1167     _young_gen->print_summary_info();
1168   }
1169   if (TraceOldGenTime) {
1170     _old_gen->print_summary_info();
1171   }
1172 }
1173 
1174 void GenCollectedHeap::print_heap_change(size_t prev_used) const {
1175   if (PrintGCDetails && Verbose) {
1176     gclog_or_tty->print(" "  SIZE_FORMAT
1177                         "->" SIZE_FORMAT
1178                         "("  SIZE_FORMAT ")",
1179                         prev_used, used(), capacity());
1180   } else {
1181     gclog_or_tty->print(" "  SIZE_FORMAT "K"
1182                         "->" SIZE_FORMAT "K"
1183                         "("  SIZE_FORMAT "K)",
1184                         prev_used / K, used() / K, capacity() / K);
1185   }
1186 }
1187 
1188 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1189  private:
1190   bool _full;
1191  public:
1192   void do_generation(Generation* gen) {
1193     gen->gc_prologue(_full);
1194   }
1195   GenGCPrologueClosure(bool full) : _full(full) {};
1196 };
1197 
1198 void GenCollectedHeap::gc_prologue(bool full) {
1199   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1200 
1201   always_do_update_barrier = false;
1202   // Fill TLAB's and such
1203   CollectedHeap::accumulate_statistics_all_tlabs();
1204   ensure_parsability(true);   // retire TLABs
1205 




 297 bool GenCollectedHeap::must_clear_all_soft_refs() {
 298   return _gc_cause == GCCause::_last_ditch_collection;
 299 }
 300 
 301 bool GenCollectedHeap::should_do_concurrent_full_gc(GCCause::Cause cause) {
 302   if (!UseConcMarkSweepGC) {
 303     return false;
 304   }
 305 
 306   switch (cause) {
 307     case GCCause::_gc_locker:           return GCLockerInvokesConcurrent;
 308     case GCCause::_java_lang_system_gc:
 309     case GCCause::_dcmd_gc_run:         return ExplicitGCInvokesConcurrent;
 310     default:                            return false;
 311   }
 312 }
 313 
 314 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 315                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 316                                           bool restore_marks_for_biased_locking) {
 317   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 318   GCTraceTime(Debug, gc) t1(title);

 319   TraceCollectorStats tcs(gen->counters());
 320   TraceMemoryManagerStats tmms(gen->kind(),gc_cause());
 321 

 322   gen->stat_record()->invocations++;
 323   gen->stat_record()->accumulated_time.start();
 324 
 325   // Must be done anew before each collection because
 326   // a previous collection will do mangling and will
 327   // change top of some spaces.
 328   record_gen_tops_before_GC();
 329 
 330   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);













 331 
 332   if (run_verification && VerifyBeforeGC) {
 333     HandleMark hm;  // Discard invalid handles created during verification
 334     Universe::verify("Before GC");
 335   }
 336   COMPILER2_PRESENT(DerivedPointerTable::clear());
 337 
 338   if (restore_marks_for_biased_locking) {
 339     // We perform this mark word preservation work lazily
 340     // because it's only at this point that we know whether we
 341     // absolutely have to do it; we want to avoid doing it for
 342     // scavenge-only collections where it's unnecessary
 343     BiasedLocking::preserve_marks();
 344   }
 345 
 346   // Do collection work
 347   {
 348     // Note on ref discovery: For what appear to be historical reasons,
 349     // GCH enables and disabled (by enqueing) refs discovery.
 350     // In the future this should be moved into the generation's
 351     // collect method so that ref discovery and enqueueing concerns
 352     // are local to a generation. The collect method could return
 353     // an appropriate indication in the case that notification on
 354     // the ref lock was needed. This will make the treatment of


 372     } else {
 373       // collect() below will enable discovery as appropriate
 374     }
 375     gen->collect(full, clear_soft_refs, size, is_tlab);
 376     if (!rp->enqueuing_is_done()) {
 377       rp->enqueue_discovered_references();
 378     } else {
 379       rp->set_enqueuing_is_done(false);
 380     }
 381     rp->verify_no_references_recorded();
 382   }
 383 
 384   COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 385 
 386   gen->stat_record()->accumulated_time.stop();
 387 
 388   update_gc_stats(gen, full);
 389 
 390   if (run_verification && VerifyAfterGC) {
 391     HandleMark hm;  // Discard invalid handles created during verification
 392     Universe::verify("After GC");





 393   }
 394 }
 395 
 396 void GenCollectedHeap::do_collection(bool           full,
 397                                      bool           clear_all_soft_refs,
 398                                      size_t         size,
 399                                      bool           is_tlab,
 400                                      GenerationType max_generation) {
 401   ResourceMark rm;
 402   DEBUG_ONLY(Thread* my_thread = Thread::current();)
 403 
 404   assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
 405   assert(my_thread->is_VM_thread() ||
 406          my_thread->is_ConcurrentGC_thread(),
 407          "incorrect thread type capability");
 408   assert(Heap_lock->is_locked(),
 409          "the requesting thread should have the Heap_lock");
 410   guarantee(!is_gc_active(), "collection is not reentrant");
 411 
 412   if (GC_locker::check_active_before_gc()) {
 413     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 414   }
 415 
 416   GCIdMarkAndRestore gc_id_mark;
 417 
 418   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 419                           collector_policy()->should_clear_all_soft_refs();
 420 
 421   ClearedAllSoftRefs casr(do_clear_all_soft_refs, collector_policy());
 422 
 423   const size_t metadata_prev_used = MetaspaceAux::used_bytes();
 424 
 425   print_heap_before_gc();
 426 
 427   {
 428     FlagSetting fl(_is_gc_active, true);
 429 
 430     bool complete = full && (max_generation == OldGen);
 431     bool old_collects_young = complete && !ScavengeBeforeFullGC;
 432     bool do_young_collection = !old_collects_young && _young_gen->should_collect(full, size, is_tlab);
 433 
 434     FormatBuffer<> gc_string("%s", "Pause ");
 435     if (do_young_collection) {
 436       gc_string.append("Young");
 437     } else {
 438       gc_string.append("Full");
 439     }
 440 
 441     GCTraceCPUTime tcpu;
 442     GCTraceTime(Info, gc) t(gc_string, NULL, gc_cause(), true);
 443 
 444     gc_prologue(complete);
 445     increment_total_collections(complete);
 446 
 447     size_t young_prev_used = _young_gen->used();
 448     size_t old_prev_used = _old_gen->used();
 449 
 450     bool run_verification = total_collections() >= VerifyGCStartAt;
 451 
 452     bool prepared_for_verification = false;
 453     bool collected_old = false;

 454 
 455     if (do_young_collection) {
 456       if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 457         prepare_for_verify();
 458         prepared_for_verification = true;
 459       }
 460 
 461       assert(!_young_gen->performs_in_place_marking(), "No young generation do in place marking");
 462       collect_generation(_young_gen,
 463                          full,
 464                          size,
 465                          is_tlab,
 466                          run_verification && VerifyGCLevel <= 0,
 467                          do_clear_all_soft_refs,
 468                          false);
 469 
 470       if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 471           size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 472         // Allocation request was met by young GC.
 473         size = 0;
 474       }
 475     }
 476 
 477     bool must_restore_marks_for_biased_locking = false;
 478 
 479     if (max_generation == OldGen && _old_gen->should_collect(full, size, is_tlab)) {

 480       if (!complete) {
 481         // The full_collections increment was missed above.
 482         increment_total_full_collections();
 483       }
 484 
 485       pre_full_gc_dump(NULL);    // do any pre full gc dumps
 486 
 487       if (!prepared_for_verification && run_verification &&
 488           VerifyGCLevel <= 1 && VerifyBeforeGC) {
 489         prepare_for_verify();
 490       }
 491 
 492       assert(_old_gen->performs_in_place_marking(), "All old generations do in place marking");
 493 
 494       if (do_young_collection) {
 495         // We did a young GC. Need a new GC id for the old GC.
 496         GCIdMarkAndRestore gc_id_mark;
 497         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 498       } else {
 499         // No young GC done. Use the same GC id as was set up earlier in this method.
 500         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 501       }
 502 
 503       must_restore_marks_for_biased_locking = true;
 504       collected_old = true;
 505     }
 506 
 507     // Update "complete" boolean wrt what actually transpired --
 508     // for instance, a promotion failure could have led to
 509     // a whole heap collection.
 510     complete = complete || collected_old;
 511 
 512     if (complete) { // We did a full collection
 513       // FIXME: See comment at pre_full_gc_dump call
 514       post_full_gc_dump(NULL);   // do any post full gc dumps
 515     }
 516 
 517     print_heap_change(young_prev_used, old_prev_used);




 518     MetaspaceAux::print_metaspace_change(metadata_prev_used);


 519 
 520     // Adjust generation sizes.
 521     if (collected_old) {
 522       _old_gen->compute_new_size();
 523     }
 524     _young_gen->compute_new_size();
 525 
 526     if (complete) {
 527       // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 528       ClassLoaderDataGraph::purge();
 529       MetaspaceAux::verify_metrics();
 530       // Resize the metaspace capacity after full collections
 531       MetaspaceGC::compute_new_size();
 532       update_full_collections_completed();
 533     }
 534 
 535     // Track memory usage and detect low memory after GC finishes
 536     MemoryService::track_memory_usage();
 537 
 538     gc_epilogue(complete);


 842 void GenCollectedHeap::do_full_collection(bool clear_all_soft_refs,
 843                                           GenerationType last_generation) {
 844   GenerationType local_last_generation;
 845   if (!incremental_collection_will_fail(false /* don't consult_young */) &&
 846       gc_cause() == GCCause::_gc_locker) {
 847     local_last_generation = YoungGen;
 848   } else {
 849     local_last_generation = last_generation;
 850   }
 851 
 852   do_collection(true,                   // full
 853                 clear_all_soft_refs,    // clear_all_soft_refs
 854                 0,                      // size
 855                 false,                  // is_tlab
 856                 local_last_generation); // last_generation
 857   // Hack XXX FIX ME !!!
 858   // A scavenge may not have been attempted, or may have
 859   // been attempted and failed, because the old gen was too full
 860   if (local_last_generation == YoungGen && gc_cause() == GCCause::_gc_locker &&
 861       incremental_collection_will_fail(false /* don't consult_young */)) {
 862     log_debug(gc, jni)("GC locker: Trying a full collection because scavenge failed");



 863     // This time allow the old gen to be collected as well
 864     do_collection(true,                // full
 865                   clear_all_soft_refs, // clear_all_soft_refs
 866                   0,                   // size
 867                   false,               // is_tlab
 868                   OldGen);             // last_generation
 869   }
 870 }
 871 
 872 bool GenCollectedHeap::is_in_young(oop p) {
 873   bool result = ((HeapWord*)p) < _old_gen->reserved().start();
 874   assert(result == _young_gen->is_in_reserved(p),
 875          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 876   return result;
 877 }
 878 
 879 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 880 bool GenCollectedHeap::is_in(const void* p) const {
 881   return _young_gen->is_in(p) || _old_gen->is_in(p);
 882 }


1071 
1072 void GenCollectedHeap::save_marks() {
1073   _young_gen->save_marks();
1074   _old_gen->save_marks();
1075 }
1076 
1077 GenCollectedHeap* GenCollectedHeap::heap() {
1078   CollectedHeap* heap = Universe::heap();
1079   assert(heap != NULL, "Uninitialized access to GenCollectedHeap::heap()");
1080   assert(heap->kind() == CollectedHeap::GenCollectedHeap, "Not a GenCollectedHeap");
1081   return (GenCollectedHeap*)heap;
1082 }
1083 
1084 void GenCollectedHeap::prepare_for_compaction() {
1085   // Start by compacting into same gen.
1086   CompactPoint cp(_old_gen);
1087   _old_gen->prepare_for_compaction(&cp);
1088   _young_gen->prepare_for_compaction(&cp);
1089 }
1090 
1091 void GenCollectedHeap::verify(VerifyOption option /* ignored */) {
1092   log_debug(gc, verify)("%s", _old_gen->name());



1093   _old_gen->verify();
1094 
1095   log_debug(gc, verify)("%s", _old_gen->name());



1096   _young_gen->verify();
1097 
1098   log_debug(gc, verify)("RemSet");


1099   rem_set()->verify();
1100 }
1101 
1102 void GenCollectedHeap::print_on(outputStream* st) const {
1103   _young_gen->print_on(st);
1104   _old_gen->print_on(st);
1105   MetaspaceAux::print_on(st);
1106 }
1107 
1108 void GenCollectedHeap::gc_threads_do(ThreadClosure* tc) const {
1109   if (workers() != NULL) {
1110     workers()->threads_do(tc);
1111   }
1112 #if INCLUDE_ALL_GCS
1113   if (UseConcMarkSweepGC) {
1114     ConcurrentMarkSweepThread::threads_do(tc);
1115   }
1116 #endif // INCLUDE_ALL_GCS
1117 }
1118 


1128 void GenCollectedHeap::print_on_error(outputStream* st) const {
1129   this->CollectedHeap::print_on_error(st);
1130 
1131 #if INCLUDE_ALL_GCS
1132   if (UseConcMarkSweepGC) {
1133     st->cr();
1134     CMSCollector::print_on_error(st);
1135   }
1136 #endif // INCLUDE_ALL_GCS
1137 }
1138 
1139 void GenCollectedHeap::print_tracing_info() const {
1140   if (TraceYoungGenTime) {
1141     _young_gen->print_summary_info();
1142   }
1143   if (TraceOldGenTime) {
1144     _old_gen->print_summary_info();
1145   }
1146 }
1147 
1148 void GenCollectedHeap::print_heap_change(size_t young_prev_used, size_t old_prev_used) const {
1149   log_info(gc, heap)("%s: " SIZE_FORMAT "K->" SIZE_FORMAT "K("  SIZE_FORMAT "K)",
1150       _young_gen->short_name(), young_prev_used / K, _young_gen->used() /K, _young_gen->capacity() /K);
1151   log_info(gc, heap)("%s: " SIZE_FORMAT "K->" SIZE_FORMAT "K("  SIZE_FORMAT "K)",
1152       _old_gen->short_name(), old_prev_used / K, _old_gen->used() /K, _old_gen->capacity() /K);







1153 }
1154 
1155 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1156  private:
1157   bool _full;
1158  public:
1159   void do_generation(Generation* gen) {
1160     gen->gc_prologue(_full);
1161   }
1162   GenGCPrologueClosure(bool full) : _full(full) {};
1163 };
1164 
1165 void GenCollectedHeap::gc_prologue(bool full) {
1166   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1167 
1168   always_do_update_barrier = false;
1169   // Fill TLAB's and such
1170   CollectedHeap::accumulate_statistics_all_tlabs();
1171   ensure_parsability(true);   // retire TLABs
1172 


< prev index next >