213 _oops_into_optional_regions[index].push_root(p);
214 }
215
216 template <typename T>
217 inline void G1ParScanThreadState::remember_reference_into_optional_region(T* p) {
218 oop o = RawAccess<IS_NOT_NULL>::oop_load(p);
219 uint index = _g1h->heap_region_containing(o)->index_in_opt_cset();
220 assert(index < _num_optional_regions,
221 "Trying to access optional region idx %u beyond " SIZE_FORMAT, index, _num_optional_regions);
222 _oops_into_optional_regions[index].push_oop(p);
223 DEBUG_ONLY(verify_ref(p);)
224 }
225
226 G1OopStarChunkedList* G1ParScanThreadState::oops_into_optional_region(const HeapRegion* hr) {
227 assert(hr->index_in_opt_cset() < _num_optional_regions,
228 "Trying to access optional region idx %u beyond " SIZE_FORMAT " " HR_FORMAT,
229 hr->index_in_opt_cset(), _num_optional_regions, HR_FORMAT_PARAMS(hr));
230 return &_oops_into_optional_regions[hr->index_in_opt_cset()];
231 }
232
233 #endif // SHARE_GC_G1_G1PARSCANTHREADSTATE_INLINE_HPP
|
213 _oops_into_optional_regions[index].push_root(p);
214 }
215
216 template <typename T>
217 inline void G1ParScanThreadState::remember_reference_into_optional_region(T* p) {
218 oop o = RawAccess<IS_NOT_NULL>::oop_load(p);
219 uint index = _g1h->heap_region_containing(o)->index_in_opt_cset();
220 assert(index < _num_optional_regions,
221 "Trying to access optional region idx %u beyond " SIZE_FORMAT, index, _num_optional_regions);
222 _oops_into_optional_regions[index].push_oop(p);
223 DEBUG_ONLY(verify_ref(p);)
224 }
225
226 G1OopStarChunkedList* G1ParScanThreadState::oops_into_optional_region(const HeapRegion* hr) {
227 assert(hr->index_in_opt_cset() < _num_optional_regions,
228 "Trying to access optional region idx %u beyond " SIZE_FORMAT " " HR_FORMAT,
229 hr->index_in_opt_cset(), _num_optional_regions, HR_FORMAT_PARAMS(hr));
230 return &_oops_into_optional_regions[hr->index_in_opt_cset()];
231 }
232
233 void G1ParScanThreadState::initialize_numa_stats() {
234 if (_numa->is_enabled()) {
235 LogTarget(Info, gc, heap, numa) lt;
236
237 if (lt.is_enabled()) {
238 uint num_nodes = _numa->num_active_nodes();
239 // Record only if there are multiple active nodes.
240 _obj_alloc_stat = NEW_C_HEAP_ARRAY(size_t, num_nodes, mtGC);
241 memset(_obj_alloc_stat, 0, sizeof(size_t) * num_nodes);
242 }
243 }
244 }
245
246 void G1ParScanThreadState::flush_numa_stats() {
247 if (_obj_alloc_stat != NULL) {
248 uint node_index = _numa->index_of_current_thread();
249 _numa->copy_statistics(G1NUMAStats::LocalObjProcessAtCopyToSurv, node_index, _obj_alloc_stat);
250 }
251 }
252
253 void G1ParScanThreadState::update_numa_stats(uint node_index) {
254 if (_obj_alloc_stat != NULL) {
255 _obj_alloc_stat[node_index]++;
256 }
257 }
258
259 #endif // SHARE_GC_G1_G1PARSCANTHREADSTATE_INLINE_HPP
|