< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.cpp

Print this page
rev 48000 : [mq]: open.patch


 126     vm_exit_during_initialization("The size of the object heap + VM data exceeds "
 127                                   "the maximum representable size");
 128   }
 129   assert(total_reserved % alignment == 0,
 130          "Gen size; total_reserved=" SIZE_FORMAT ", alignment="
 131          SIZE_FORMAT, total_reserved, alignment);
 132 
 133   *heap_rs = Universe::reserve_heap(total_reserved, alignment);
 134 
 135   os::trace_page_sizes("Heap",
 136                        collector_policy()->min_heap_byte_size(),
 137                        total_reserved,
 138                        alignment,
 139                        heap_rs->base(),
 140                        heap_rs->size());
 141 
 142   return heap_rs->base();
 143 }
 144 
 145 void GenCollectedHeap::post_initialize() {

 146   ref_processing_init();
 147   check_gen_kinds();
 148   DefNewGeneration* def_new_gen = (DefNewGeneration*)_young_gen;
 149 
 150   _gen_policy->initialize_size_policy(def_new_gen->eden()->capacity(),
 151                                       _old_gen->capacity(),
 152                                       def_new_gen->from()->capacity());
 153   _gen_policy->initialize_gc_policy_counters();
 154 }
 155 
 156 void GenCollectedHeap::ref_processing_init() {
 157   _young_gen->ref_processor_init();
 158   _old_gen->ref_processor_init();
 159 }
 160 
 161 size_t GenCollectedHeap::capacity() const {
 162   return _young_gen->capacity() + _old_gen->capacity();
 163 }
 164 
 165 size_t GenCollectedHeap::used() const {


 253 }
 254 
 255 HeapWord* GenCollectedHeap::mem_allocate(size_t size,
 256                                          bool* gc_overhead_limit_was_exceeded) {
 257   return gen_policy()->mem_allocate_work(size,
 258                                          false /* is_tlab */,
 259                                          gc_overhead_limit_was_exceeded);
 260 }
 261 
 262 bool GenCollectedHeap::must_clear_all_soft_refs() {
 263   return _gc_cause == GCCause::_metadata_GC_clear_soft_refs ||
 264          _gc_cause == GCCause::_wb_full_gc;
 265 }
 266 
 267 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 268                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 269                                           bool restore_marks_for_biased_locking) {
 270   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 271   GCTraceTime(Trace, gc, phases) t1(title);
 272   TraceCollectorStats tcs(gen->counters());
 273   TraceMemoryManagerStats tmms(gen->kind(),gc_cause());
 274 
 275   gen->stat_record()->invocations++;
 276   gen->stat_record()->accumulated_time.start();
 277 
 278   // Must be done anew before each collection because
 279   // a previous collection will do mangling and will
 280   // change top of some spaces.
 281   record_gen_tops_before_GC();
 282 
 283   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);
 284 
 285   if (run_verification && VerifyBeforeGC) {
 286     HandleMark hm;  // Discard invalid handles created during verification
 287     Universe::verify("Before GC");
 288   }
 289   COMPILER2_PRESENT(DerivedPointerTable::clear());
 290 
 291   if (restore_marks_for_biased_locking) {
 292     // We perform this mark word preservation work lazily
 293     // because it's only at this point that we know whether we




 126     vm_exit_during_initialization("The size of the object heap + VM data exceeds "
 127                                   "the maximum representable size");
 128   }
 129   assert(total_reserved % alignment == 0,
 130          "Gen size; total_reserved=" SIZE_FORMAT ", alignment="
 131          SIZE_FORMAT, total_reserved, alignment);
 132 
 133   *heap_rs = Universe::reserve_heap(total_reserved, alignment);
 134 
 135   os::trace_page_sizes("Heap",
 136                        collector_policy()->min_heap_byte_size(),
 137                        total_reserved,
 138                        alignment,
 139                        heap_rs->base(),
 140                        heap_rs->size());
 141 
 142   return heap_rs->base();
 143 }
 144 
 145 void GenCollectedHeap::post_initialize() {
 146   CollectedHeap::post_initialize();
 147   ref_processing_init();
 148   check_gen_kinds();
 149   DefNewGeneration* def_new_gen = (DefNewGeneration*)_young_gen;
 150 
 151   _gen_policy->initialize_size_policy(def_new_gen->eden()->capacity(),
 152                                       _old_gen->capacity(),
 153                                       def_new_gen->from()->capacity());
 154   _gen_policy->initialize_gc_policy_counters();
 155 }
 156 
 157 void GenCollectedHeap::ref_processing_init() {
 158   _young_gen->ref_processor_init();
 159   _old_gen->ref_processor_init();
 160 }
 161 
 162 size_t GenCollectedHeap::capacity() const {
 163   return _young_gen->capacity() + _old_gen->capacity();
 164 }
 165 
 166 size_t GenCollectedHeap::used() const {


 254 }
 255 
 256 HeapWord* GenCollectedHeap::mem_allocate(size_t size,
 257                                          bool* gc_overhead_limit_was_exceeded) {
 258   return gen_policy()->mem_allocate_work(size,
 259                                          false /* is_tlab */,
 260                                          gc_overhead_limit_was_exceeded);
 261 }
 262 
 263 bool GenCollectedHeap::must_clear_all_soft_refs() {
 264   return _gc_cause == GCCause::_metadata_GC_clear_soft_refs ||
 265          _gc_cause == GCCause::_wb_full_gc;
 266 }
 267 
 268 void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t size,
 269                                           bool is_tlab, bool run_verification, bool clear_soft_refs,
 270                                           bool restore_marks_for_biased_locking) {
 271   FormatBuffer<> title("Collect gen: %s", gen->short_name());
 272   GCTraceTime(Trace, gc, phases) t1(title);
 273   TraceCollectorStats tcs(gen->counters());
 274   TraceMemoryManagerStats tmms(gen->gc_manager(), gc_cause());
 275 
 276   gen->stat_record()->invocations++;
 277   gen->stat_record()->accumulated_time.start();
 278 
 279   // Must be done anew before each collection because
 280   // a previous collection will do mangling and will
 281   // change top of some spaces.
 282   record_gen_tops_before_GC();
 283 
 284   log_trace(gc)("%s invoke=%d size=" SIZE_FORMAT, heap()->is_young_gen(gen) ? "Young" : "Old", gen->stat_record()->invocations, size * HeapWordSize);
 285 
 286   if (run_verification && VerifyBeforeGC) {
 287     HandleMark hm;  // Discard invalid handles created during verification
 288     Universe::verify("Before GC");
 289   }
 290   COMPILER2_PRESENT(DerivedPointerTable::clear());
 291 
 292   if (restore_marks_for_biased_locking) {
 293     // We perform this mark word preservation work lazily
 294     // because it's only at this point that we know whether we


< prev index next >