< prev index next >

src/share/vm/gc/shared/genCollectedHeap.cpp

Print this page




  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/symbolTable.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/icBuffer.hpp"
  31 #include "gc/shared/collectedHeap.inline.hpp"
  32 #include "gc/shared/collectorCounters.hpp"
  33 #include "gc/shared/gcId.hpp"
  34 #include "gc/shared/gcLocker.inline.hpp"
  35 #include "gc/shared/gcTrace.hpp"
  36 #include "gc/shared/gcTraceTime.hpp"
  37 #include "gc/shared/genCollectedHeap.hpp"
  38 #include "gc/shared/genOopClosures.inline.hpp"
  39 #include "gc/shared/generationSpec.hpp"
  40 #include "gc/shared/space.hpp"
  41 #include "gc/shared/strongRootsScope.hpp"
  42 #include "gc/shared/vmGCOperations.hpp"
  43 #include "gc/shared/workgroup.hpp"
  44 #include "memory/filemap.hpp"
  45 #include "memory/resourceArea.hpp"
  46 #include "oops/oop.inline.hpp"
  47 #include "runtime/biasedLocking.hpp"
  48 #include "runtime/fprofiler.hpp"
  49 #include "runtime/handles.hpp"
  50 #include "runtime/handles.inline.hpp"
  51 #include "runtime/java.hpp"
  52 #include "runtime/vmThread.hpp"
  53 #include "services/management.hpp"
  54 #include "services/memoryService.hpp"
  55 #include "utilities/macros.hpp"
  56 #include "utilities/stack.inline.hpp"


 297 bool GenCollectedHeap::must_clear_all_soft_refs() {
 298   return _gc_cause == GCCause::_last_ditch_collection;
 299 }
 300 
 301 bool GenCollectedHeap::should_do_concurrent_full_gc(GCCause::Cause cause) {
 302   if (!UseConcMarkSweepGC) {
 303     return false;
 304   }
 305 
 306   switch (cause) {
 307     case GCCause::_gc_locker:           return GCLockerInvokesConcurrent;
 308     case GCCause::_java_lang_system_gc:
 309     case GCCause::_dcmd_gc_run:         return ExplicitGCInvokesConcurrent;
 310     default:                            return false;
 311   }
 312 }
 313 
 314 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 315                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 316                                           bool restore_marks_for_biased_locking) {
 317   // Timer for individual generations. Last argument is false: no CR
 318   // FIXME: We should try to start the timing earlier to cover more of the GC pause
 319   GCTraceTime t1(gen->short_name(), PrintGCDetails, false, NULL);
 320   TraceCollectorStats tcs(gen->counters());
 321   TraceMemoryManagerStats tmms(gen->kind(),gc_cause());
 322 
 323   size_t prev_used = gen->used();
 324   gen->stat_record()->invocations++;
 325   gen->stat_record()->accumulated_time.start();
 326 
 327   // Must be done anew before each collection because
 328   // a previous collection will do mangling and will
 329   // change top of some spaces.
 330   record_gen_tops_before_GC();
 331 
 332   if (PrintGC && Verbose) {
 333     // I didn't want to change the logging when removing the level concept,
 334     // but I guess this logging could say young/old or something instead of 0/1.
 335     uint level;
 336     if (heap()->is_young_gen(gen)) {
 337       level = 0;
 338     } else {
 339       level = 1;
 340     }
 341     gclog_or_tty->print("level=%u invoke=%d size=" SIZE_FORMAT,
 342                         level,
 343                         gen->stat_record()->invocations,
 344                         size * HeapWordSize);
 345   }
 346 
 347   if (run_verification && VerifyBeforeGC) {
 348     HandleMark hm;  // Discard invalid handles created during verification
 349     Universe::verify(" VerifyBeforeGC:");
 350   }
 351   COMPILER2_PRESENT(DerivedPointerTable::clear());
 352 
 353   if (restore_marks_for_biased_locking) {
 354     // We perform this mark word preservation work lazily
 355     // because it's only at this point that we know whether we
 356     // absolutely have to do it; we want to avoid doing it for
 357     // scavenge-only collections where it's unnecessary
 358     BiasedLocking::preserve_marks();
 359   }
 360 
 361   // Do collection work
 362   {
 363     // Note on ref discovery: For what appear to be historical reasons,
 364     // GCH enables and disabled (by enqueing) refs discovery.
 365     // In the future this should be moved into the generation's
 366     // collect method so that ref discovery and enqueueing concerns
 367     // are local to a generation. The collect method could return
 368     // an appropriate indication in the case that notification on
 369     // the ref lock was needed. This will make the treatment of


 387     } else {
 388       // collect() below will enable discovery as appropriate
 389     }
 390     gen->collect(full, clear_soft_refs, size, is_tlab);
 391     if (!rp->enqueuing_is_done()) {
 392       rp->enqueue_discovered_references();
 393     } else {
 394       rp->set_enqueuing_is_done(false);
 395     }
 396     rp->verify_no_references_recorded();
 397   }
 398 
 399   COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 400 
 401   gen->stat_record()->accumulated_time.stop();
 402 
 403   update_gc_stats(gen, full);
 404 
 405   if (run_verification && VerifyAfterGC) {
 406     HandleMark hm;  // Discard invalid handles created during verification
 407     Universe::verify(" VerifyAfterGC:");
 408   }
 409 
 410   if (PrintGCDetails) {
 411     gclog_or_tty->print(":");
 412     gen->print_heap_change(prev_used);
 413   }
 414 }
 415 
 416 void GenCollectedHeap::do_collection(bool           full,
 417                                      bool           clear_all_soft_refs,
 418                                      size_t         size,
 419                                      bool           is_tlab,
 420                                      GenerationType max_generation) {
 421   ResourceMark rm;
 422   DEBUG_ONLY(Thread* my_thread = Thread::current();)
 423 
 424   assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
 425   assert(my_thread->is_VM_thread() ||
 426          my_thread->is_ConcurrentGC_thread(),
 427          "incorrect thread type capability");
 428   assert(Heap_lock->is_locked(),
 429          "the requesting thread should have the Heap_lock");
 430   guarantee(!is_gc_active(), "collection is not reentrant");
 431 
 432   if (GC_locker::check_active_before_gc()) {
 433     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 434   }
 435 
 436   GCIdMarkAndRestore gc_id_mark;
 437 
 438   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 439                           collector_policy()->should_clear_all_soft_refs();
 440 
 441   ClearedAllSoftRefs casr(do_clear_all_soft_refs, collector_policy());
 442 
 443   const size_t metadata_prev_used = MetaspaceAux::used_bytes();
 444 
 445   print_heap_before_gc();
 446 
 447   {
 448     FlagSetting fl(_is_gc_active, true);
 449 
 450     bool complete = full && (max_generation == OldGen);
 451     const char* gc_cause_prefix = complete ? "Full GC" : "GC";
 452     TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
 453     GCTraceTime t(GCCauseString(gc_cause_prefix, gc_cause()), PrintGCDetails, false, NULL);









 454 
 455     gc_prologue(complete);
 456     increment_total_collections(complete);
 457 
 458     size_t gch_prev_used = used();


 459     bool run_verification = total_collections() >= VerifyGCStartAt;
 460 
 461     bool prepared_for_verification = false;
 462     bool collected_old = false;
 463     bool old_collects_young = complete && !ScavengeBeforeFullGC;
 464 
 465     if (!old_collects_young && _young_gen->should_collect(full, size, is_tlab)) {
 466       if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 467         prepare_for_verify();
 468         prepared_for_verification = true;
 469       }
 470 
 471       assert(!_young_gen->performs_in_place_marking(), "No young generation do in place marking");
 472       collect_generation(_young_gen,
 473                          full,
 474                          size,
 475                          is_tlab,
 476                          run_verification && VerifyGCLevel <= 0,
 477                          do_clear_all_soft_refs,
 478                          false);
 479 
 480       if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 481           size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 482         // Allocation request was met by young GC.
 483         size = 0;
 484       }
 485     }
 486 
 487     bool must_restore_marks_for_biased_locking = false;
 488 
 489     if (max_generation == OldGen && _old_gen->should_collect(full, size, is_tlab)) {
 490       GCIdMarkAndRestore gc_id_mark;
 491       if (!complete) {
 492         // The full_collections increment was missed above.
 493         increment_total_full_collections();
 494       }
 495 
 496       pre_full_gc_dump(NULL);    // do any pre full gc dumps
 497 
 498       if (!prepared_for_verification && run_verification &&
 499           VerifyGCLevel <= 1 && VerifyBeforeGC) {
 500         prepare_for_verify();
 501       }
 502 
 503       assert(_old_gen->performs_in_place_marking(), "All old generations do in place marking");
 504       collect_generation(_old_gen,
 505                          full,
 506                          size,
 507                          is_tlab,
 508                          run_verification && VerifyGCLevel <= 1,
 509                          do_clear_all_soft_refs,
 510                          true);



 511 
 512       must_restore_marks_for_biased_locking = true;
 513       collected_old = true;
 514     }
 515 
 516     // Update "complete" boolean wrt what actually transpired --
 517     // for instance, a promotion failure could have led to
 518     // a whole heap collection.
 519     complete = complete || collected_old;
 520 
 521     if (complete) { // We did a full collection
 522       // FIXME: See comment at pre_full_gc_dump call
 523       post_full_gc_dump(NULL);   // do any post full gc dumps
 524     }
 525 
 526     if (PrintGCDetails) {
 527       print_heap_change(gch_prev_used);
 528 
 529       // Print metaspace info for full GC with PrintGCDetails flag.
 530       if (complete) {
 531         MetaspaceAux::print_metaspace_change(metadata_prev_used);
 532       }
 533     }
 534 
 535     // Adjust generation sizes.
 536     if (collected_old) {
 537       _old_gen->compute_new_size();
 538     }
 539     _young_gen->compute_new_size();
 540 
 541     if (complete) {
 542       // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 543       ClassLoaderDataGraph::purge();
 544       MetaspaceAux::verify_metrics();
 545       // Resize the metaspace capacity after full collections
 546       MetaspaceGC::compute_new_size();
 547       update_full_collections_completed();
 548     }
 549 
 550     // Track memory usage and detect low memory after GC finishes
 551     MemoryService::track_memory_usage();
 552 
 553     gc_epilogue(complete);


 857 void GenCollectedHeap::do_full_collection(bool clear_all_soft_refs,
 858                                           GenerationType last_generation) {
 859   GenerationType local_last_generation;
 860   if (!incremental_collection_will_fail(false /* don't consult_young */) &&
 861       gc_cause() == GCCause::_gc_locker) {
 862     local_last_generation = YoungGen;
 863   } else {
 864     local_last_generation = last_generation;
 865   }
 866 
 867   do_collection(true,                   // full
 868                 clear_all_soft_refs,    // clear_all_soft_refs
 869                 0,                      // size
 870                 false,                  // is_tlab
 871                 local_last_generation); // last_generation
 872   // Hack XXX FIX ME !!!
 873   // A scavenge may not have been attempted, or may have
 874   // been attempted and failed, because the old gen was too full
 875   if (local_last_generation == YoungGen && gc_cause() == GCCause::_gc_locker &&
 876       incremental_collection_will_fail(false /* don't consult_young */)) {
 877     if (PrintGCDetails) {
 878       gclog_or_tty->print_cr("GC locker: Trying a full collection "
 879                              "because scavenge failed");
 880     }
 881     // This time allow the old gen to be collected as well
 882     do_collection(true,                // full
 883                   clear_all_soft_refs, // clear_all_soft_refs
 884                   0,                   // size
 885                   false,               // is_tlab
 886                   OldGen);             // last_generation
 887   }
 888 }
 889 
 890 bool GenCollectedHeap::is_in_young(oop p) {
 891   bool result = ((HeapWord*)p) < _old_gen->reserved().start();
 892   assert(result == _young_gen->is_in_reserved(p),
 893          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 894   return result;
 895 }
 896 
 897 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 898 bool GenCollectedHeap::is_in(const void* p) const {
 899   return _young_gen->is_in(p) || _old_gen->is_in(p);
 900 }


1089 
1090 void GenCollectedHeap::save_marks() {
1091   _young_gen->save_marks();
1092   _old_gen->save_marks();
1093 }
1094 
1095 GenCollectedHeap* GenCollectedHeap::heap() {
1096   CollectedHeap* heap = Universe::heap();
1097   assert(heap != NULL, "Uninitialized access to GenCollectedHeap::heap()");
1098   assert(heap->kind() == CollectedHeap::GenCollectedHeap, "Not a GenCollectedHeap");
1099   return (GenCollectedHeap*)heap;
1100 }
1101 
1102 void GenCollectedHeap::prepare_for_compaction() {
1103   // Start by compacting into same gen.
1104   CompactPoint cp(_old_gen);
1105   _old_gen->prepare_for_compaction(&cp);
1106   _young_gen->prepare_for_compaction(&cp);
1107 }
1108 
1109 void GenCollectedHeap::verify(bool silent, VerifyOption option /* ignored */) {
1110   if (!silent) {
1111     gclog_or_tty->print("%s", _old_gen->name());
1112     gclog_or_tty->print(" ");
1113   }
1114   _old_gen->verify();
1115 
1116   if (!silent) {
1117     gclog_or_tty->print("%s", _young_gen->name());
1118     gclog_or_tty->print(" ");
1119   }
1120   _young_gen->verify();
1121 
1122   if (!silent) {
1123     gclog_or_tty->print("remset ");
1124   }
1125   rem_set()->verify();
1126 }
1127 
1128 void GenCollectedHeap::print_on(outputStream* st) const {
1129   _young_gen->print_on(st);
1130   _old_gen->print_on(st);
1131   MetaspaceAux::print_on(st);
1132 }
1133 
1134 void GenCollectedHeap::gc_threads_do(ThreadClosure* tc) const {
1135   if (workers() != NULL) {
1136     workers()->threads_do(tc);
1137   }
1138 #if INCLUDE_ALL_GCS
1139   if (UseConcMarkSweepGC) {
1140     ConcurrentMarkSweepThread::threads_do(tc);
1141   }
1142 #endif // INCLUDE_ALL_GCS
1143 }
1144 


1154 void GenCollectedHeap::print_on_error(outputStream* st) const {
1155   this->CollectedHeap::print_on_error(st);
1156 
1157 #if INCLUDE_ALL_GCS
1158   if (UseConcMarkSweepGC) {
1159     st->cr();
1160     CMSCollector::print_on_error(st);
1161   }
1162 #endif // INCLUDE_ALL_GCS
1163 }
1164 
1165 void GenCollectedHeap::print_tracing_info() const {
1166   if (TraceYoungGenTime) {
1167     _young_gen->print_summary_info();
1168   }
1169   if (TraceOldGenTime) {
1170     _old_gen->print_summary_info();
1171   }
1172 }
1173 
1174 void GenCollectedHeap::print_heap_change(size_t prev_used) const {
1175   if (PrintGCDetails && Verbose) {
1176     gclog_or_tty->print(" "  SIZE_FORMAT
1177                         "->" SIZE_FORMAT
1178                         "("  SIZE_FORMAT ")",
1179                         prev_used, used(), capacity());
1180   } else {
1181     gclog_or_tty->print(" "  SIZE_FORMAT "K"
1182                         "->" SIZE_FORMAT "K"
1183                         "("  SIZE_FORMAT "K)",
1184                         prev_used / K, used() / K, capacity() / K);
1185   }
1186 }
1187 
1188 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1189  private:
1190   bool _full;
1191  public:
1192   void do_generation(Generation* gen) {
1193     gen->gc_prologue(_full);
1194   }
1195   GenGCPrologueClosure(bool full) : _full(full) {};
1196 };
1197 
1198 void GenCollectedHeap::gc_prologue(bool full) {
1199   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1200 
1201   always_do_update_barrier = false;
1202   // Fill TLAB's and such
1203   CollectedHeap::accumulate_statistics_all_tlabs();
1204   ensure_parsability(true);   // retire TLABs
1205 




  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/symbolTable.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/icBuffer.hpp"
  31 #include "gc/shared/collectedHeap.inline.hpp"
  32 #include "gc/shared/collectorCounters.hpp"
  33 #include "gc/shared/gcId.hpp"
  34 #include "gc/shared/gcLocker.inline.hpp"
  35 #include "gc/shared/gcTrace.hpp"
  36 #include "gc/shared/gcTraceTime.inline.hpp"
  37 #include "gc/shared/genCollectedHeap.hpp"
  38 #include "gc/shared/genOopClosures.inline.hpp"
  39 #include "gc/shared/generationSpec.hpp"
  40 #include "gc/shared/space.hpp"
  41 #include "gc/shared/strongRootsScope.hpp"
  42 #include "gc/shared/vmGCOperations.hpp"
  43 #include "gc/shared/workgroup.hpp"
  44 #include "memory/filemap.hpp"
  45 #include "memory/resourceArea.hpp"
  46 #include "oops/oop.inline.hpp"
  47 #include "runtime/biasedLocking.hpp"
  48 #include "runtime/fprofiler.hpp"
  49 #include "runtime/handles.hpp"
  50 #include "runtime/handles.inline.hpp"
  51 #include "runtime/java.hpp"
  52 #include "runtime/vmThread.hpp"
  53 #include "services/management.hpp"
  54 #include "services/memoryService.hpp"
  55 #include "utilities/macros.hpp"
  56 #include "utilities/stack.inline.hpp"


 297 bool GenCollectedHeap::must_clear_all_soft_refs() {
 298   return _gc_cause == GCCause::_last_ditch_collection;
 299 }
 300 
 301 bool GenCollectedHeap::should_do_concurrent_full_gc(GCCause::Cause cause) {
 302   if (!UseConcMarkSweepGC) {
 303     return false;
 304   }
 305 
 306   switch (cause) {
 307     case GCCause::_gc_locker:           return GCLockerInvokesConcurrent;
 308     case GCCause::_java_lang_system_gc:
 309     case GCCause::_dcmd_gc_run:         return ExplicitGCInvokesConcurrent;
 310     default:                            return false;
 311   }
 312 }
 313 
 314 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 315                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 316                                           bool restore_marks_for_biased_locking) {
 317   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 318   GCTraceTime(Debug, gc) t1(title);

 319   TraceCollectorStats tcs(gen->counters());
 320   TraceMemoryManagerStats tmms(gen->kind(),gc_cause());
 321 

 322   gen->stat_record()->invocations++;
 323   gen->stat_record()->accumulated_time.start();
 324 
 325   // Must be done anew before each collection because
 326   // a previous collection will do mangling and will
 327   // change top of some spaces.
 328   record_gen_tops_before_GC();
 329 
 330   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);













 331 
 332   if (run_verification && VerifyBeforeGC) {
 333     HandleMark hm;  // Discard invalid handles created during verification
 334     Universe::verify("Before GC");
 335   }
 336   COMPILER2_PRESENT(DerivedPointerTable::clear());
 337 
 338   if (restore_marks_for_biased_locking) {
 339     // We perform this mark word preservation work lazily
 340     // because it's only at this point that we know whether we
 341     // absolutely have to do it; we want to avoid doing it for
 342     // scavenge-only collections where it's unnecessary
 343     BiasedLocking::preserve_marks();
 344   }
 345 
 346   // Do collection work
 347   {
 348     // Note on ref discovery: For what appear to be historical reasons,
 349     // GCH enables and disabled (by enqueing) refs discovery.
 350     // In the future this should be moved into the generation's
 351     // collect method so that ref discovery and enqueueing concerns
 352     // are local to a generation. The collect method could return
 353     // an appropriate indication in the case that notification on
 354     // the ref lock was needed. This will make the treatment of


 372     } else {
 373       // collect() below will enable discovery as appropriate
 374     }
 375     gen->collect(full, clear_soft_refs, size, is_tlab);
 376     if (!rp->enqueuing_is_done()) {
 377       rp->enqueue_discovered_references();
 378     } else {
 379       rp->set_enqueuing_is_done(false);
 380     }
 381     rp->verify_no_references_recorded();
 382   }
 383 
 384   COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 385 
 386   gen->stat_record()->accumulated_time.stop();
 387 
 388   update_gc_stats(gen, full);
 389 
 390   if (run_verification && VerifyAfterGC) {
 391     HandleMark hm;  // Discard invalid handles created during verification
 392     Universe::verify("After GC");





 393   }
 394 }
 395 
 396 void GenCollectedHeap::do_collection(bool           full,
 397                                      bool           clear_all_soft_refs,
 398                                      size_t         size,
 399                                      bool           is_tlab,
 400                                      GenerationType max_generation) {
 401   ResourceMark rm;
 402   DEBUG_ONLY(Thread* my_thread = Thread::current();)
 403 
 404   assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
 405   assert(my_thread->is_VM_thread() ||
 406          my_thread->is_ConcurrentGC_thread(),
 407          "incorrect thread type capability");
 408   assert(Heap_lock->is_locked(),
 409          "the requesting thread should have the Heap_lock");
 410   guarantee(!is_gc_active(), "collection is not reentrant");
 411 
 412   if (GC_locker::check_active_before_gc()) {
 413     return; // GC is disabled (e.g. JNI GetXXXCritical operation)
 414   }
 415 
 416   GCIdMarkAndRestore gc_id_mark;
 417 
 418   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
 419                           collector_policy()->should_clear_all_soft_refs();
 420 
 421   ClearedAllSoftRefs casr(do_clear_all_soft_refs, collector_policy());
 422 
 423   const size_t metadata_prev_used = MetaspaceAux::used_bytes();
 424 
 425   print_heap_before_gc();
 426 
 427   {
 428     FlagSetting fl(_is_gc_active, true);
 429 
 430     bool complete = full && (max_generation == OldGen);
 431     bool old_collects_young = complete && !ScavengeBeforeFullGC;
 432     bool do_young_collection = !old_collects_young && _young_gen->should_collect(full, size, is_tlab);
 433 
 434     FormatBuffer<> gc_string("%s", "Pause ");
 435     if (do_young_collection) {
 436       gc_string.append("Young");
 437     } else {
 438       gc_string.append("Full");
 439     }
 440 
 441     GCTraceCPUTime tcpu;
 442     GCTraceTime(Info, gc) t(gc_string, NULL, gc_cause(), true);
 443 
 444     gc_prologue(complete);
 445     increment_total_collections(complete);
 446 
 447     size_t young_prev_used = _young_gen->used();
 448     size_t old_prev_used = _old_gen->used();
 449 
 450     bool run_verification = total_collections() >= VerifyGCStartAt;
 451 
 452     bool prepared_for_verification = false;
 453     bool collected_old = false;

 454 
 455     if (do_young_collection) {
 456       if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 457         prepare_for_verify();
 458         prepared_for_verification = true;
 459       }
 460 
 461       assert(!_young_gen->performs_in_place_marking(), "No young generation do in place marking");
 462       collect_generation(_young_gen,
 463                          full,
 464                          size,
 465                          is_tlab,
 466                          run_verification && VerifyGCLevel <= 0,
 467                          do_clear_all_soft_refs,
 468                          false);
 469 
 470       if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 471           size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 472         // Allocation request was met by young GC.
 473         size = 0;
 474       }
 475     }
 476 
 477     bool must_restore_marks_for_biased_locking = false;
 478 
 479     if (max_generation == OldGen && _old_gen->should_collect(full, size, is_tlab)) {

 480       if (!complete) {
 481         // The full_collections increment was missed above.
 482         increment_total_full_collections();
 483       }
 484 
 485       pre_full_gc_dump(NULL);    // do any pre full gc dumps
 486 
 487       if (!prepared_for_verification && run_verification &&
 488           VerifyGCLevel <= 1 && VerifyBeforeGC) {
 489         prepare_for_verify();
 490       }
 491 
 492       assert(_old_gen->performs_in_place_marking(), "All old generations do in place marking");
 493 
 494       if (do_young_collection) {
 495         // We did a young GC. Need a new GC id for the old GC.
 496         GCIdMarkAndRestore gc_id_mark;
 497         GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause(), true);
 498         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 499       } else {
 500         // No young GC done. Use the same GC id as was set up earlier in this method.
 501         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 502       }
 503 
 504       must_restore_marks_for_biased_locking = true;
 505       collected_old = true;
 506     }
 507 
 508     // Update "complete" boolean wrt what actually transpired --
 509     // for instance, a promotion failure could have led to
 510     // a whole heap collection.
 511     complete = complete || collected_old;
 512 
 513     if (complete) { // We did a full collection
 514       // FIXME: See comment at pre_full_gc_dump call
 515       post_full_gc_dump(NULL);   // do any post full gc dumps
 516     }
 517 
 518     print_heap_change(young_prev_used, old_prev_used);




 519     MetaspaceAux::print_metaspace_change(metadata_prev_used);


 520 
 521     // Adjust generation sizes.
 522     if (collected_old) {
 523       _old_gen->compute_new_size();
 524     }
 525     _young_gen->compute_new_size();
 526 
 527     if (complete) {
 528       // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 529       ClassLoaderDataGraph::purge();
 530       MetaspaceAux::verify_metrics();
 531       // Resize the metaspace capacity after full collections
 532       MetaspaceGC::compute_new_size();
 533       update_full_collections_completed();
 534     }
 535 
 536     // Track memory usage and detect low memory after GC finishes
 537     MemoryService::track_memory_usage();
 538 
 539     gc_epilogue(complete);


 843 void GenCollectedHeap::do_full_collection(bool clear_all_soft_refs,
 844                                           GenerationType last_generation) {
 845   GenerationType local_last_generation;
 846   if (!incremental_collection_will_fail(false /* don't consult_young */) &&
 847       gc_cause() == GCCause::_gc_locker) {
 848     local_last_generation = YoungGen;
 849   } else {
 850     local_last_generation = last_generation;
 851   }
 852 
 853   do_collection(true,                   // full
 854                 clear_all_soft_refs,    // clear_all_soft_refs
 855                 0,                      // size
 856                 false,                  // is_tlab
 857                 local_last_generation); // last_generation
 858   // Hack XXX FIX ME !!!
 859   // A scavenge may not have been attempted, or may have
 860   // been attempted and failed, because the old gen was too full
 861   if (local_last_generation == YoungGen && gc_cause() == GCCause::_gc_locker &&
 862       incremental_collection_will_fail(false /* don't consult_young */)) {
 863     log_debug(gc, jni)("GC locker: Trying a full collection because scavenge failed");



 864     // This time allow the old gen to be collected as well
 865     do_collection(true,                // full
 866                   clear_all_soft_refs, // clear_all_soft_refs
 867                   0,                   // size
 868                   false,               // is_tlab
 869                   OldGen);             // last_generation
 870   }
 871 }
 872 
 873 bool GenCollectedHeap::is_in_young(oop p) {
 874   bool result = ((HeapWord*)p) < _old_gen->reserved().start();
 875   assert(result == _young_gen->is_in_reserved(p),
 876          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 877   return result;
 878 }
 879 
 880 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 881 bool GenCollectedHeap::is_in(const void* p) const {
 882   return _young_gen->is_in(p) || _old_gen->is_in(p);
 883 }


1072 
1073 void GenCollectedHeap::save_marks() {
1074   _young_gen->save_marks();
1075   _old_gen->save_marks();
1076 }
1077 
1078 GenCollectedHeap* GenCollectedHeap::heap() {
1079   CollectedHeap* heap = Universe::heap();
1080   assert(heap != NULL, "Uninitialized access to GenCollectedHeap::heap()");
1081   assert(heap->kind() == CollectedHeap::GenCollectedHeap, "Not a GenCollectedHeap");
1082   return (GenCollectedHeap*)heap;
1083 }
1084 
1085 void GenCollectedHeap::prepare_for_compaction() {
1086   // Start by compacting into same gen.
1087   CompactPoint cp(_old_gen);
1088   _old_gen->prepare_for_compaction(&cp);
1089   _young_gen->prepare_for_compaction(&cp);
1090 }
1091 
1092 void GenCollectedHeap::verify(VerifyOption option /* ignored */) {
1093   log_debug(gc, verify)("%s", _old_gen->name());



1094   _old_gen->verify();
1095 
1096   log_debug(gc, verify)("%s", _old_gen->name());



1097   _young_gen->verify();
1098 
1099   log_debug(gc, verify)("RemSet");


1100   rem_set()->verify();
1101 }
1102 
1103 void GenCollectedHeap::print_on(outputStream* st) const {
1104   _young_gen->print_on(st);
1105   _old_gen->print_on(st);
1106   MetaspaceAux::print_on(st);
1107 }
1108 
1109 void GenCollectedHeap::gc_threads_do(ThreadClosure* tc) const {
1110   if (workers() != NULL) {
1111     workers()->threads_do(tc);
1112   }
1113 #if INCLUDE_ALL_GCS
1114   if (UseConcMarkSweepGC) {
1115     ConcurrentMarkSweepThread::threads_do(tc);
1116   }
1117 #endif // INCLUDE_ALL_GCS
1118 }
1119 


1129 void GenCollectedHeap::print_on_error(outputStream* st) const {
1130   this->CollectedHeap::print_on_error(st);
1131 
1132 #if INCLUDE_ALL_GCS
1133   if (UseConcMarkSweepGC) {
1134     st->cr();
1135     CMSCollector::print_on_error(st);
1136   }
1137 #endif // INCLUDE_ALL_GCS
1138 }
1139 
1140 void GenCollectedHeap::print_tracing_info() const {
1141   if (TraceYoungGenTime) {
1142     _young_gen->print_summary_info();
1143   }
1144   if (TraceOldGenTime) {
1145     _old_gen->print_summary_info();
1146   }
1147 }
1148 
1149 void GenCollectedHeap::print_heap_change(size_t young_prev_used, size_t old_prev_used) const {
1150   log_info(gc, heap)("%s: " SIZE_FORMAT "K->" SIZE_FORMAT "K("  SIZE_FORMAT "K)",
1151                      _young_gen->short_name(), young_prev_used / K, _young_gen->used() /K, _young_gen->capacity() /K);
1152   log_info(gc, heap)("%s: " SIZE_FORMAT "K->" SIZE_FORMAT "K("  SIZE_FORMAT "K)",
1153                      _old_gen->short_name(), old_prev_used / K, _old_gen->used() /K, _old_gen->capacity() /K);







1154 }
1155 
1156 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1157  private:
1158   bool _full;
1159  public:
1160   void do_generation(Generation* gen) {
1161     gen->gc_prologue(_full);
1162   }
1163   GenGCPrologueClosure(bool full) : _full(full) {};
1164 };
1165 
1166 void GenCollectedHeap::gc_prologue(bool full) {
1167   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1168 
1169   always_do_update_barrier = false;
1170   // Fill TLAB's and such
1171   CollectedHeap::accumulate_statistics_all_tlabs();
1172   ensure_parsability(true);   // retire TLABs
1173 


< prev index next >