29 #include "gc/shared/adaptiveSizePolicy.hpp"
30 #include "gc/shared/ageTable.inline.hpp"
31 #include "gc/shared/cardTableRS.hpp"
32 #include "gc/shared/collectorCounters.hpp"
33 #include "gc/shared/gcHeapSummary.hpp"
34 #include "gc/shared/gcLocker.hpp"
35 #include "gc/shared/gcPolicyCounters.hpp"
36 #include "gc/shared/gcTimer.hpp"
37 #include "gc/shared/gcTrace.hpp"
38 #include "gc/shared/gcTraceTime.inline.hpp"
39 #include "gc/shared/genOopClosures.inline.hpp"
40 #include "gc/shared/generationSpec.hpp"
41 #include "gc/shared/preservedMarks.inline.hpp"
42 #include "gc/shared/referencePolicy.hpp"
43 #include "gc/shared/referenceProcessorPhaseTimes.hpp"
44 #include "gc/shared/space.inline.hpp"
45 #include "gc/shared/spaceDecorator.hpp"
46 #include "gc/shared/strongRootsScope.hpp"
47 #include "gc/shared/weakProcessor.hpp"
48 #include "logging/log.hpp"
49 #include "memory/iterator.hpp"
50 #include "memory/resourceArea.hpp"
51 #include "oops/instanceRefKlass.hpp"
52 #include "oops/oop.inline.hpp"
53 #include "runtime/atomic.hpp"
54 #include "runtime/java.hpp"
55 #include "runtime/prefetch.inline.hpp"
56 #include "runtime/thread.inline.hpp"
57 #include "utilities/align.hpp"
58 #include "utilities/copy.hpp"
59 #include "utilities/globalDefinitions.hpp"
60 #include "utilities/stack.inline.hpp"
61
62 //
63 // DefNewGeneration functions.
64
65 // Methods of protected closure types.
66
67 DefNewGeneration::IsAliveClosure::IsAliveClosure(Generation* young_gen) : _young_gen(young_gen) {
68 assert(_young_gen->kind() == Generation::ParNew ||
69 _young_gen->kind() == Generation::DefNew, "Expected the young generation here");
95 FastEvacuateFollowersClosure(SerialHeap* heap,
96 FastScanClosure* cur,
97 FastScanClosure* older) :
98 _heap(heap), _scan_cur_or_nonheap(cur), _scan_older(older)
99 {
100 }
101
102 void DefNewGeneration::FastEvacuateFollowersClosure::do_void() {
103 do {
104 _heap->oop_since_save_marks_iterate(_scan_cur_or_nonheap, _scan_older);
105 } while (!_heap->no_allocs_since_save_marks());
106 guarantee(_heap->young_gen()->promo_failure_scan_is_complete(), "Failed to finish scan");
107 }
108
109 ScanClosure::ScanClosure(DefNewGeneration* g, bool gc_barrier) :
110 OopsInClassLoaderDataOrGenClosure(g), _g(g), _gc_barrier(gc_barrier)
111 {
112 _boundary = _g->reserved().end();
113 }
114
115 void ScanClosure::do_oop(oop* p) { ScanClosure::do_oop_work(p); }
116 void ScanClosure::do_oop(narrowOop* p) { ScanClosure::do_oop_work(p); }
117
118 FastScanClosure::FastScanClosure(DefNewGeneration* g, bool gc_barrier) :
119 OopsInClassLoaderDataOrGenClosure(g), _g(g), _gc_barrier(gc_barrier)
120 {
121 _boundary = _g->reserved().end();
122 }
123
124 void FastScanClosure::do_oop(oop* p) { FastScanClosure::do_oop_work(p); }
125 void FastScanClosure::do_oop(narrowOop* p) { FastScanClosure::do_oop_work(p); }
126
127 void CLDScanClosure::do_cld(ClassLoaderData* cld) {
128 NOT_PRODUCT(ResourceMark rm);
129 log_develop_trace(gc, scavenge)("CLDScanClosure::do_cld " PTR_FORMAT ", %s, dirty: %s",
130 p2i(cld),
131 cld->loader_name_and_id(),
132 cld->has_modified_oops() ? "true" : "false");
133
134 // If the cld has not been dirtied we know that there's
135 // no references into the young gen and we can skip it.
136 if (cld->has_modified_oops()) {
137 if (_accumulate_modified_oops) {
138 cld->accumulate_modified_oops();
139 }
140
141 // Tell the closure which CLD is being scanned so that it can be dirtied
142 // if oops are left pointing into the young gen.
143 _scavenge_closure->set_scanned_cld(cld);
144
145 // Clean the cld since we're going to scavenge all the metadata.
146 cld->oops_do(_scavenge_closure, false, /*clear_modified_oops*/true);
147
148 _scavenge_closure->set_scanned_cld(NULL);
149 }
150 }
151
152 ScanWeakRefClosure::ScanWeakRefClosure(DefNewGeneration* g) :
153 _g(g)
154 {
155 _boundary = _g->reserved().end();
156 }
157
158 void ScanWeakRefClosure::do_oop(oop* p) { ScanWeakRefClosure::do_oop_work(p); }
159 void ScanWeakRefClosure::do_oop(narrowOop* p) { ScanWeakRefClosure::do_oop_work(p); }
160
161 DefNewGeneration::DefNewGeneration(ReservedSpace rs,
162 size_t initial_size,
163 const char* policy)
164 : Generation(rs, initial_size),
165 _preserved_marks_set(false /* in_c_heap */),
166 _promo_failure_drain_in_progress(false),
167 _should_allocate_from_space(false)
168 {
169 MemRegion cmr((HeapWord*)_virtual_space.low(),
170 (HeapWord*)_virtual_space.high());
171 GenCollectedHeap* gch = GenCollectedHeap::heap();
172
173 gch->rem_set()->resize_covered_region(cmr);
174
175 _eden_space = new ContiguousSpace();
176 _from_space = new ContiguousSpace();
177 _to_space = new ContiguousSpace();
178
179 if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) {
|
29 #include "gc/shared/adaptiveSizePolicy.hpp"
30 #include "gc/shared/ageTable.inline.hpp"
31 #include "gc/shared/cardTableRS.hpp"
32 #include "gc/shared/collectorCounters.hpp"
33 #include "gc/shared/gcHeapSummary.hpp"
34 #include "gc/shared/gcLocker.hpp"
35 #include "gc/shared/gcPolicyCounters.hpp"
36 #include "gc/shared/gcTimer.hpp"
37 #include "gc/shared/gcTrace.hpp"
38 #include "gc/shared/gcTraceTime.inline.hpp"
39 #include "gc/shared/genOopClosures.inline.hpp"
40 #include "gc/shared/generationSpec.hpp"
41 #include "gc/shared/preservedMarks.inline.hpp"
42 #include "gc/shared/referencePolicy.hpp"
43 #include "gc/shared/referenceProcessorPhaseTimes.hpp"
44 #include "gc/shared/space.inline.hpp"
45 #include "gc/shared/spaceDecorator.hpp"
46 #include "gc/shared/strongRootsScope.hpp"
47 #include "gc/shared/weakProcessor.hpp"
48 #include "logging/log.hpp"
49 #include "memory/iterator.inline.hpp"
50 #include "memory/resourceArea.hpp"
51 #include "oops/instanceRefKlass.hpp"
52 #include "oops/oop.inline.hpp"
53 #include "runtime/atomic.hpp"
54 #include "runtime/java.hpp"
55 #include "runtime/prefetch.inline.hpp"
56 #include "runtime/thread.inline.hpp"
57 #include "utilities/align.hpp"
58 #include "utilities/copy.hpp"
59 #include "utilities/globalDefinitions.hpp"
60 #include "utilities/stack.inline.hpp"
61
62 //
63 // DefNewGeneration functions.
64
65 // Methods of protected closure types.
66
67 DefNewGeneration::IsAliveClosure::IsAliveClosure(Generation* young_gen) : _young_gen(young_gen) {
68 assert(_young_gen->kind() == Generation::ParNew ||
69 _young_gen->kind() == Generation::DefNew, "Expected the young generation here");
95 FastEvacuateFollowersClosure(SerialHeap* heap,
96 FastScanClosure* cur,
97 FastScanClosure* older) :
98 _heap(heap), _scan_cur_or_nonheap(cur), _scan_older(older)
99 {
100 }
101
102 void DefNewGeneration::FastEvacuateFollowersClosure::do_void() {
103 do {
104 _heap->oop_since_save_marks_iterate(_scan_cur_or_nonheap, _scan_older);
105 } while (!_heap->no_allocs_since_save_marks());
106 guarantee(_heap->young_gen()->promo_failure_scan_is_complete(), "Failed to finish scan");
107 }
108
109 ScanClosure::ScanClosure(DefNewGeneration* g, bool gc_barrier) :
110 OopsInClassLoaderDataOrGenClosure(g), _g(g), _gc_barrier(gc_barrier)
111 {
112 _boundary = _g->reserved().end();
113 }
114
115 FastScanClosure::FastScanClosure(DefNewGeneration* g, bool gc_barrier) :
116 OopsInClassLoaderDataOrGenClosure(g), _g(g), _gc_barrier(gc_barrier)
117 {
118 _boundary = _g->reserved().end();
119 }
120
121 void CLDScanClosure::do_cld(ClassLoaderData* cld) {
122 NOT_PRODUCT(ResourceMark rm);
123 log_develop_trace(gc, scavenge)("CLDScanClosure::do_cld " PTR_FORMAT ", %s, dirty: %s",
124 p2i(cld),
125 cld->loader_name_and_id(),
126 cld->has_modified_oops() ? "true" : "false");
127
128 // If the cld has not been dirtied we know that there's
129 // no references into the young gen and we can skip it.
130 if (cld->has_modified_oops()) {
131 if (_accumulate_modified_oops) {
132 cld->accumulate_modified_oops();
133 }
134
135 // Tell the closure which CLD is being scanned so that it can be dirtied
136 // if oops are left pointing into the young gen.
137 _scavenge_closure->set_scanned_cld(cld);
138
139 // Clean the cld since we're going to scavenge all the metadata.
140 cld->oops_do(_scavenge_closure, false, /*clear_modified_oops*/true);
141
142 _scavenge_closure->set_scanned_cld(NULL);
143 }
144 }
145
146 ScanWeakRefClosure::ScanWeakRefClosure(DefNewGeneration* g) :
147 _g(g)
148 {
149 _boundary = _g->reserved().end();
150 }
151
152 DefNewGeneration::DefNewGeneration(ReservedSpace rs,
153 size_t initial_size,
154 const char* policy)
155 : Generation(rs, initial_size),
156 _preserved_marks_set(false /* in_c_heap */),
157 _promo_failure_drain_in_progress(false),
158 _should_allocate_from_space(false)
159 {
160 MemRegion cmr((HeapWord*)_virtual_space.low(),
161 (HeapWord*)_virtual_space.high());
162 GenCollectedHeap* gch = GenCollectedHeap::heap();
163
164 gch->rem_set()->resize_covered_region(cmr);
165
166 _eden_space = new ContiguousSpace();
167 _from_space = new ContiguousSpace();
168 _to_space = new ContiguousSpace();
169
170 if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) {
|