< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.cpp

Print this page
rev 47972 : [mq]: open.patch


 247 
 248   if (_old_gen->should_allocate(size, is_tlab)) {
 249     res = _old_gen->allocate(size, is_tlab);
 250   }
 251 
 252   return res;
 253 }
 254 
 255 HeapWord* GenCollectedHeap::mem_allocate(size_t size,
 256                                          bool* gc_overhead_limit_was_exceeded) {
 257   return gen_policy()->mem_allocate_work(size,
 258                                          false /* is_tlab */,
 259                                          gc_overhead_limit_was_exceeded);
 260 }
 261 
 262 bool GenCollectedHeap::must_clear_all_soft_refs() {
 263   return _gc_cause == GCCause::_metadata_GC_clear_soft_refs ||
 264          _gc_cause == GCCause::_wb_full_gc;
 265 }
 266 
 267 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 268                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 269                                           bool restore_marks_for_biased_locking) {
 270   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 271   GCTraceTime(Trace, gc, phases) t1(title);
 272   TraceCollectorStats tcs(gen->counters());
 273   TraceMemoryManagerStats tmms(gen->kind(),gc_cause());
 274 
 275   gen->stat_record()->invocations++;
 276   gen->stat_record()->accumulated_time.start();
 277 
 278   // Must be done anew before each collection because
 279   // a previous collection will do mangling and will
 280   // change top of some spaces.
 281   record_gen_tops_before_GC();
 282 
 283   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);
 284 
 285   if (run_verification && VerifyBeforeGC) {
 286     HandleMark hm;  // Discard invalid handles created during verification
 287     Universe::verify("Before GC");
 288   }
 289   COMPILER2_PRESENT(DerivedPointerTable::clear());
 290 
 291   if (restore_marks_for_biased_locking) {
 292     // We perform this mark word preservation work lazily
 293     // because it's only at this point that we know whether we


 397     GCTraceTime(Info, gc) t(gc_string, NULL, gc_cause(), true);
 398 
 399     gc_prologue(complete);
 400     increment_total_collections(complete);
 401 
 402     size_t young_prev_used = _young_gen->used();
 403     size_t old_prev_used = _old_gen->used();
 404 
 405     bool run_verification = total_collections() >= VerifyGCStartAt;
 406 
 407     bool prepared_for_verification = false;
 408     bool collected_old = false;
 409 
 410     if (do_young_collection) {
 411       if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 412         prepare_for_verify();
 413         prepared_for_verification = true;
 414       }
 415 
 416       collect_generation(_young_gen,

 417                          full,
 418                          size,
 419                          is_tlab,
 420                          run_verification && VerifyGCLevel <= 0,
 421                          do_clear_all_soft_refs,
 422                          false);
 423 
 424       if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 425           size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 426         // Allocation request was met by young GC.
 427         size = 0;
 428       }
 429     }
 430 
 431     bool must_restore_marks_for_biased_locking = false;
 432 
 433     if (max_generation == OldGen && _old_gen->should_collect(full, size, is_tlab)) {
 434       if (!complete) {
 435         // The full_collections increment was missed above.
 436         increment_total_full_collections();
 437       }
 438 
 439       if (!prepared_for_verification && run_verification &&
 440           VerifyGCLevel <= 1 && VerifyBeforeGC) {
 441         prepare_for_verify();
 442       }
 443 
 444       if (do_young_collection) {
 445         // We did a young GC. Need a new GC id for the old GC.
 446         GCIdMarkAndRestore gc_id_mark;
 447         GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause(), true);
 448         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 449       } else {
 450         // No young GC done. Use the same GC id as was set up earlier in this method.
 451         collect_generation(_old_gen, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 452       }
 453 
 454       must_restore_marks_for_biased_locking = true;
 455       collected_old = true;
 456     }
 457 
 458     // Update "complete" boolean wrt what actually transpired --
 459     // for instance, a promotion failure could have led to
 460     // a whole heap collection.
 461     complete = complete || collected_old;
 462 
 463     print_heap_change(young_prev_used, old_prev_used);
 464     MetaspaceAux::print_metaspace_change(metadata_prev_used);
 465 
 466     // Adjust generation sizes.
 467     if (collected_old) {
 468       _old_gen->compute_new_size();
 469     }
 470     _young_gen->compute_new_size();
 471 




 247 
 248   if (_old_gen->should_allocate(size, is_tlab)) {
 249     res = _old_gen->allocate(size, is_tlab);
 250   }
 251 
 252   return res;
 253 }
 254 
 255 HeapWord* GenCollectedHeap::mem_allocate(size_t size,
 256                                          bool* gc_overhead_limit_was_exceeded) {
 257   return gen_policy()->mem_allocate_work(size,
 258                                          false /* is_tlab */,
 259                                          gc_overhead_limit_was_exceeded);
 260 }
 261 
 262 bool GenCollectedHeap::must_clear_all_soft_refs() {
 263   return _gc_cause == GCCause::_metadata_GC_clear_soft_refs ||
 264          _gc_cause == GCCause::_wb_full_gc;
 265 }
 266 
 267 void GenCollectedHeap::collect_generation(Generation* gen, GCMemoryManager* mem_mgr, bool full, size_t size,
 268                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 269                                           bool restore_marks_for_biased_locking) {
 270   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 271   GCTraceTime(Trace, gc, phases) t1(title);
 272   TraceCollectorStats tcs(gen->counters());
 273   TraceMemoryManagerStats tmms(mem_mgr, gc_cause());
 274 
 275   gen->stat_record()->invocations++;
 276   gen->stat_record()->accumulated_time.start();
 277 
 278   // Must be done anew before each collection because
 279   // a previous collection will do mangling and will
 280   // change top of some spaces.
 281   record_gen_tops_before_GC();
 282 
 283   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);
 284 
 285   if (run_verification && VerifyBeforeGC) {
 286     HandleMark hm;  // Discard invalid handles created during verification
 287     Universe::verify("Before GC");
 288   }
 289   COMPILER2_PRESENT(DerivedPointerTable::clear());
 290 
 291   if (restore_marks_for_biased_locking) {
 292     // We perform this mark word preservation work lazily
 293     // because it's only at this point that we know whether we


 397     GCTraceTime(Info, gc) t(gc_string, NULL, gc_cause(), true);
 398 
 399     gc_prologue(complete);
 400     increment_total_collections(complete);
 401 
 402     size_t young_prev_used = _young_gen->used();
 403     size_t old_prev_used = _old_gen->used();
 404 
 405     bool run_verification = total_collections() >= VerifyGCStartAt;
 406 
 407     bool prepared_for_verification = false;
 408     bool collected_old = false;
 409 
 410     if (do_young_collection) {
 411       if (run_verification && VerifyGCLevel <= 0 && VerifyBeforeGC) {
 412         prepare_for_verify();
 413         prepared_for_verification = true;
 414       }
 415 
 416       collect_generation(_young_gen,
 417                          _young_mgr,
 418                          full,
 419                          size,
 420                          is_tlab,
 421                          run_verification && VerifyGCLevel <= 0,
 422                          do_clear_all_soft_refs,
 423                          false);
 424 
 425       if (size > 0 && (!is_tlab || _young_gen->supports_tlab_allocation()) &&
 426           size * HeapWordSize <= _young_gen->unsafe_max_alloc_nogc()) {
 427         // Allocation request was met by young GC.
 428         size = 0;
 429       }
 430     }
 431 
 432     bool must_restore_marks_for_biased_locking = false;
 433 
 434     if (max_generation == OldGen && _old_gen->should_collect(full, size, is_tlab)) {
 435       if (!complete) {
 436         // The full_collections increment was missed above.
 437         increment_total_full_collections();
 438       }
 439 
 440       if (!prepared_for_verification && run_verification &&
 441           VerifyGCLevel <= 1 && VerifyBeforeGC) {
 442         prepare_for_verify();
 443       }
 444 
 445       if (do_young_collection) {
 446         // We did a young GC. Need a new GC id for the old GC.
 447         GCIdMarkAndRestore gc_id_mark;
 448         GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause(), true);
 449         collect_generation(_old_gen, _old_mgr, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 450       } else {
 451         // No young GC done. Use the same GC id as was set up earlier in this method.
 452         collect_generation(_old_gen, _old_mgr, full, size, is_tlab, run_verification && VerifyGCLevel <= 1, do_clear_all_soft_refs, true);
 453       }
 454 
 455       must_restore_marks_for_biased_locking = true;
 456       collected_old = true;
 457     }
 458 
 459     // Update "complete" boolean wrt what actually transpired --
 460     // for instance, a promotion failure could have led to
 461     // a whole heap collection.
 462     complete = complete || collected_old;
 463 
 464     print_heap_change(young_prev_used, old_prev_used);
 465     MetaspaceAux::print_metaspace_change(metadata_prev_used);
 466 
 467     // Adjust generation sizes.
 468     if (collected_old) {
 469       _old_gen->compute_new_size();
 470     }
 471     _young_gen->compute_new_size();
 472 


< prev index next >