161 cld->oops_do(_scavenge_closure, false, /*clear_modified_oops*/true);
162
163 _scavenge_closure->set_scanned_cld(NULL);
164 }
165 }
166
167 ScanWeakRefClosure::ScanWeakRefClosure(DefNewGeneration* g) :
168 _g(g)
169 {
170 _boundary = _g->reserved().end();
171 }
172
173 void ScanWeakRefClosure::do_oop(oop* p) { ScanWeakRefClosure::do_oop_work(p); }
174 void ScanWeakRefClosure::do_oop(narrowOop* p) { ScanWeakRefClosure::do_oop_work(p); }
175
176 void FilteringClosure::do_oop(oop* p) { FilteringClosure::do_oop_work(p); }
177 void FilteringClosure::do_oop(narrowOop* p) { FilteringClosure::do_oop_work(p); }
178
179 DefNewGeneration::DefNewGeneration(ReservedSpace rs,
180 size_t initial_size,
181 const char* policy)
182 : Generation(rs, initial_size),
183 _preserved_marks_set(false /* in_c_heap */),
184 _promo_failure_drain_in_progress(false),
185 _should_allocate_from_space(false)
186 {
187 MemRegion cmr((HeapWord*)_virtual_space.low(),
188 (HeapWord*)_virtual_space.high());
189 GenCollectedHeap* gch = GenCollectedHeap::heap();
190
191 gch->barrier_set()->resize_covered_region(cmr);
192
193 _eden_space = new ContiguousSpace();
194 _from_space = new ContiguousSpace();
195 _to_space = new ContiguousSpace();
196
197 if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) {
198 vm_exit_during_initialization("Could not allocate a new gen space");
199 }
200
201 // Compute the maximum eden and survivor space sizes. These sizes
202 // are computed assuming the entire reserved space is committed.
|
161 cld->oops_do(_scavenge_closure, false, /*clear_modified_oops*/true);
162
163 _scavenge_closure->set_scanned_cld(NULL);
164 }
165 }
166
167 ScanWeakRefClosure::ScanWeakRefClosure(DefNewGeneration* g) :
168 _g(g)
169 {
170 _boundary = _g->reserved().end();
171 }
172
173 void ScanWeakRefClosure::do_oop(oop* p) { ScanWeakRefClosure::do_oop_work(p); }
174 void ScanWeakRefClosure::do_oop(narrowOop* p) { ScanWeakRefClosure::do_oop_work(p); }
175
176 void FilteringClosure::do_oop(oop* p) { FilteringClosure::do_oop_work(p); }
177 void FilteringClosure::do_oop(narrowOop* p) { FilteringClosure::do_oop_work(p); }
178
179 DefNewGeneration::DefNewGeneration(ReservedSpace rs,
180 size_t initial_size,
181 GCMemoryManager* mem_mgr,
182 const char* policy)
183 : Generation(rs, initial_size, mem_mgr),
184 _preserved_marks_set(false /* in_c_heap */),
185 _promo_failure_drain_in_progress(false),
186 _should_allocate_from_space(false)
187 {
188 MemRegion cmr((HeapWord*)_virtual_space.low(),
189 (HeapWord*)_virtual_space.high());
190 GenCollectedHeap* gch = GenCollectedHeap::heap();
191
192 gch->barrier_set()->resize_covered_region(cmr);
193
194 _eden_space = new ContiguousSpace();
195 _from_space = new ContiguousSpace();
196 _to_space = new ContiguousSpace();
197
198 if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) {
199 vm_exit_during_initialization("Could not allocate a new gen space");
200 }
201
202 // Compute the maximum eden and survivor space sizes. These sizes
203 // are computed assuming the entire reserved space is committed.
|