< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.cpp

Print this page
rev 48000 : [mq]: open.patch


 253 }
 254 
 255 HeapWord* GenCollectedHeap::mem_allocate(size_t size,
 256                                          bool* gc_overhead_limit_was_exceeded) {
 257   return gen_policy()->mem_allocate_work(size,
 258                                          false /* is_tlab */,
 259                                          gc_overhead_limit_was_exceeded);
 260 }
 261 
 262 bool GenCollectedHeap::must_clear_all_soft_refs() {
 263   return _gc_cause == GCCause::_metadata_GC_clear_soft_refs ||
 264          _gc_cause == GCCause::_wb_full_gc;
 265 }
 266 
 267 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 268                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 269                                           bool restore_marks_for_biased_locking) {
 270   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 271   GCTraceTime(Trace, gc, phases) t1(title);
 272   TraceCollectorStats tcs(gen->counters());
 273   TraceMemoryManagerStats tmms(gen->kind(),gc_cause());
 274 
 275   gen->stat_record()->invocations++;
 276   gen->stat_record()->accumulated_time.start();
 277 
 278   // Must be done anew before each collection because
 279   // a previous collection will do mangling and will
 280   // change top of some spaces.
 281   record_gen_tops_before_GC();
 282 
 283   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);
 284 
 285   if (run_verification && VerifyBeforeGC) {
 286     HandleMark hm;  // Discard invalid handles created during verification
 287     Universe::verify("Before GC");
 288   }
 289   COMPILER2_PRESENT(DerivedPointerTable::clear());
 290 
 291   if (restore_marks_for_biased_locking) {
 292     // We perform this mark word preservation work lazily
 293     // because it's only at this point that we know whether we




 253 }
 254 
 255 HeapWord* GenCollectedHeap::mem_allocate(size_t size,
 256                                          bool* gc_overhead_limit_was_exceeded) {
 257   return gen_policy()->mem_allocate_work(size,
 258                                          false /* is_tlab */,
 259                                          gc_overhead_limit_was_exceeded);
 260 }
 261 
 262 bool GenCollectedHeap::must_clear_all_soft_refs() {
 263   return _gc_cause == GCCause::_metadata_GC_clear_soft_refs ||
 264          _gc_cause == GCCause::_wb_full_gc;
 265 }
 266 
 267 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 268                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 269                                           bool restore_marks_for_biased_locking) {
 270   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 271   GCTraceTime(Trace, gc, phases) t1(title);
 272   TraceCollectorStats tcs(gen->counters());
 273   TraceMemoryManagerStats tmms(gen->gc_manager(), gc_cause());
 274 
 275   gen->stat_record()->invocations++;
 276   gen->stat_record()->accumulated_time.start();
 277 
 278   // Must be done anew before each collection because
 279   // a previous collection will do mangling and will
 280   // change top of some spaces.
 281   record_gen_tops_before_GC();
 282 
 283   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);
 284 
 285   if (run_verification && VerifyBeforeGC) {
 286     HandleMark hm;  // Discard invalid handles created during verification
 287     Universe::verify("Before GC");
 288   }
 289   COMPILER2_PRESENT(DerivedPointerTable::clear());
 290 
 291   if (restore_marks_for_biased_locking) {
 292     // We perform this mark word preservation work lazily
 293     // because it's only at this point that we know whether we


< prev index next >