120 }
121 }
122
123 void CollectedHeap::print_heap_after_gc() {
124 Universe::print_heap_after_gc();
125 if (_gc_heap_log != NULL) {
126 _gc_heap_log->log_heap_after(this);
127 }
128 }
129
130 void CollectedHeap::print_on_error(outputStream* st) const {
131 st->print_cr("Heap:");
132 print_extended_on(st);
133 st->cr();
134
135 _barrier_set->print_on(st);
136 }
137
138 void CollectedHeap::register_nmethod(nmethod* nm) {
139 assert_locked_or_safepoint(CodeCache_lock);
140 }
141
142 void CollectedHeap::unregister_nmethod(nmethod* nm) {
143 assert_locked_or_safepoint(CodeCache_lock);
144 }
145
146 void CollectedHeap::trace_heap(GCWhen::Type when, const GCTracer* gc_tracer) {
147 const GCHeapSummary& heap_summary = create_heap_summary();
148 gc_tracer->report_gc_heap_summary(when, heap_summary);
149
150 const MetaspaceSummary& metaspace_summary = create_metaspace_summary();
151 gc_tracer->report_metaspace_summary(when, metaspace_summary);
152 }
153
154 void CollectedHeap::trace_heap_before_gc(const GCTracer* gc_tracer) {
155 trace_heap(GCWhen::BeforeGC, gc_tracer);
156 }
157
158 void CollectedHeap::trace_heap_after_gc(const GCTracer* gc_tracer) {
159 trace_heap(GCWhen::AfterGC, gc_tracer);
600 ResourceMark rm;
601 VM_GC_HeapInspection inspector(log.trace_stream(), false /* ! full gc */);
602 inspector.doit();
603 }
604 }
605
606 void CollectedHeap::pre_full_gc_dump(GCTimer* timer) {
607 full_gc_dump(timer, true);
608 }
609
610 void CollectedHeap::post_full_gc_dump(GCTimer* timer) {
611 full_gc_dump(timer, false);
612 }
613
614 void CollectedHeap::initialize_reserved_region(HeapWord *start, HeapWord *end) {
615 // It is important to do this in a way such that concurrent readers can't
616 // temporarily think something is in the heap. (Seen this happen in asserts.)
617 _reserved.set_word_size(0);
618 _reserved.set_start(start);
619 _reserved.set_end(end);
620 }
|
120 }
121 }
122
123 void CollectedHeap::print_heap_after_gc() {
124 Universe::print_heap_after_gc();
125 if (_gc_heap_log != NULL) {
126 _gc_heap_log->log_heap_after(this);
127 }
128 }
129
130 void CollectedHeap::print_on_error(outputStream* st) const {
131 st->print_cr("Heap:");
132 print_extended_on(st);
133 st->cr();
134
135 _barrier_set->print_on(st);
136 }
137
138 void CollectedHeap::register_nmethod(nmethod* nm) {
139 assert_locked_or_safepoint(CodeCache_lock);
140 if (!nm->on_scavenge_root_list() && nm->detect_scavenge_root_oops()) {
141 CodeCache::add_scavenge_root_nmethod(nm);
142 }
143 }
144
145 void CollectedHeap::unregister_nmethod(nmethod* nm) {
146 assert_locked_or_safepoint(CodeCache_lock);
147 }
148
149 void CollectedHeap::trace_heap(GCWhen::Type when, const GCTracer* gc_tracer) {
150 const GCHeapSummary& heap_summary = create_heap_summary();
151 gc_tracer->report_gc_heap_summary(when, heap_summary);
152
153 const MetaspaceSummary& metaspace_summary = create_metaspace_summary();
154 gc_tracer->report_metaspace_summary(when, metaspace_summary);
155 }
156
157 void CollectedHeap::trace_heap_before_gc(const GCTracer* gc_tracer) {
158 trace_heap(GCWhen::BeforeGC, gc_tracer);
159 }
160
161 void CollectedHeap::trace_heap_after_gc(const GCTracer* gc_tracer) {
162 trace_heap(GCWhen::AfterGC, gc_tracer);
603 ResourceMark rm;
604 VM_GC_HeapInspection inspector(log.trace_stream(), false /* ! full gc */);
605 inspector.doit();
606 }
607 }
608
609 void CollectedHeap::pre_full_gc_dump(GCTimer* timer) {
610 full_gc_dump(timer, true);
611 }
612
613 void CollectedHeap::post_full_gc_dump(GCTimer* timer) {
614 full_gc_dump(timer, false);
615 }
616
617 void CollectedHeap::initialize_reserved_region(HeapWord *start, HeapWord *end) {
618 // It is important to do this in a way such that concurrent readers can't
619 // temporarily think something is in the heap. (Seen this happen in asserts.)
620 _reserved.set_word_size(0);
621 _reserved.set_start(start);
622 _reserved.set_end(end);
623 }
624
625 void CollectedHeap::verify_nmethod_roots(nmethod* nmethod) {
626 nmethod->verify_scavenge_root_oops();
627 }
|