55 assert((obj == RawAccess<>::oop_load(p)) ||
56 (obj->is_forwarded() &&
57 obj->forwardee() == RawAccess<>::oop_load(p)),
58 "p should still be pointing to obj or to its forwardee");
59
60 _par_scan_state->push_on_queue(p);
61 }
62
63 template <class T>
64 inline void G1ScanClosureBase::handle_non_cset_obj_common(InCSetState const state, T* p, oop const obj) {
65 if (state.is_humongous()) {
66 _g1h->set_humongous_is_live(obj);
67 }
68 }
69
70 inline void G1ScanClosureBase::trim_queue_partially() {
71 _par_scan_state->trim_queue_partially();
72 }
73
74 template <class T>
75 inline void G1ScanEvacuatedObjClosure::do_oop_nv(T* p) {
76 T heap_oop = RawAccess<>::oop_load(p);
77
78 if (CompressedOops::is_null(heap_oop)) {
79 return;
80 }
81 oop obj = CompressedOops::decode_not_null(heap_oop);
82 const InCSetState state = _g1h->in_cset_state(obj);
83 if (state.is_in_cset()) {
84 prefetch_and_push(p, obj);
85 } else {
86 if (HeapRegion::is_in_same_region(p, obj)) {
87 return;
88 }
89 handle_non_cset_obj_common(state, p, obj);
90 _par_scan_state->update_rs(_from, p, obj);
91 }
92 }
93
94 template <class T>
95 inline void G1CMOopClosure::do_oop_nv(T* p) {
96 _task->deal_with_reference(p);
97 }
98
99 template <class T>
100 inline void G1RootRegionScanClosure::do_oop_nv(T* p) {
101 T heap_oop = RawAccess<MO_VOLATILE>::oop_load(p);
102 if (CompressedOops::is_null(heap_oop)) {
103 return;
104 }
105 oop obj = CompressedOops::decode_not_null(heap_oop);
106 _cm->mark_in_next_bitmap(_worker_id, obj);
107 }
108
109 template <class T>
110 inline static void check_obj_during_refinement(T* p, oop const obj) {
111 #ifdef ASSERT
112 G1CollectedHeap* g1h = G1CollectedHeap::heap();
113 // can't do because of races
114 // assert(oopDesc::is_oop_or_null(obj), "expected an oop");
115 assert(check_obj_alignment(obj), "not oop aligned");
116 assert(g1h->is_in_reserved(obj), "must be in heap");
117
118 HeapRegion* from = g1h->heap_region_containing(p);
119
120 assert(from != NULL, "from region must be non-NULL");
121 assert(from->is_in_reserved(p) ||
122 (from->is_humongous() &&
123 g1h->heap_region_containing(p)->is_humongous() &&
124 from->humongous_start_region() == g1h->heap_region_containing(p)->humongous_start_region()),
125 "p " PTR_FORMAT " is not in the same region %u or part of the correct humongous object starting at region %u.",
126 p2i(p), from->hrm_index(), from->humongous_start_region()->hrm_index());
127 #endif // ASSERT
128 }
129
130 template <class T>
131 inline void G1ConcurrentRefineOopClosure::do_oop_nv(T* p) {
132 T o = RawAccess<MO_VOLATILE>::oop_load(p);
133 if (CompressedOops::is_null(o)) {
134 return;
135 }
136 oop obj = CompressedOops::decode_not_null(o);
137
138 check_obj_during_refinement(p, obj);
139
140 if (HeapRegion::is_in_same_region(p, obj)) {
141 // Normally this closure should only be called with cross-region references.
142 // But since Java threads are manipulating the references concurrently and we
143 // reload the values things may have changed.
144 // Also this check lets slip through references from a humongous continues region
145 // to its humongous start region, as they are in different regions, and adds a
146 // remembered set entry. This is benign (apart from memory usage), as we never
147 // try to either evacuate or eager reclaim humonguous arrays of j.l.O.
148 return;
149 }
150
151 HeapRegionRemSet* to_rem_set = _g1h->heap_region_containing(obj)->rem_set();
152
153 assert(to_rem_set != NULL, "Need per-region 'into' remsets.");
154 if (to_rem_set->is_tracked()) {
155 to_rem_set->add_reference(p, _worker_i);
156 }
157 }
158
159 template <class T>
160 inline void G1ScanObjsDuringUpdateRSClosure::do_oop_nv(T* p) {
161 T o = RawAccess<>::oop_load(p);
162 if (CompressedOops::is_null(o)) {
163 return;
164 }
165 oop obj = CompressedOops::decode_not_null(o);
166
167 check_obj_during_refinement(p, obj);
168
169 assert(!_g1h->is_in_cset((HeapWord*)p), "Oop originates from " PTR_FORMAT " (region: %u) which is in the collection set.", p2i(p), _g1h->addr_to_region((HeapWord*)p));
170 const InCSetState state = _g1h->in_cset_state(obj);
171 if (state.is_in_cset()) {
172 // Since the source is always from outside the collection set, here we implicitly know
173 // that this is a cross-region reference too.
174 prefetch_and_push(p, obj);
175 } else {
176 HeapRegion* to = _g1h->heap_region_containing(obj);
177 if (_from == to) {
178 return;
179 }
180 handle_non_cset_obj_common(state, p, obj);
181 to->rem_set()->add_reference(p, _worker_i);
182 }
183 }
184
185 template <class T>
186 inline void G1ScanObjsDuringScanRSClosure::do_oop_nv(T* p) {
187 T heap_oop = RawAccess<>::oop_load(p);
188 if (CompressedOops::is_null(heap_oop)) {
189 return;
190 }
191 oop obj = CompressedOops::decode_not_null(heap_oop);
192
193 const InCSetState state = _g1h->in_cset_state(obj);
194 if (state.is_in_cset()) {
195 prefetch_and_push(p, obj);
196 } else {
197 if (HeapRegion::is_in_same_region(p, obj)) {
198 return;
199 }
200 handle_non_cset_obj_common(state, p, obj);
201 }
202 }
203
204 void G1ParCopyHelper::do_cld_barrier(oop new_obj) {
205 if (_g1h->heap_region_containing(new_obj)->is_young()) {
206 _scanned_cld->record_modified_oops();
263 mark_forwarded_object(obj, forwardee);
264 }
265
266 if (barrier == G1BarrierCLD) {
267 do_cld_barrier(forwardee);
268 }
269 } else {
270 if (state.is_humongous()) {
271 _g1h->set_humongous_is_live(obj);
272 }
273
274 // The object is not in collection set. If we're a root scanning
275 // closure during an initial mark pause then attempt to mark the object.
276 if (do_mark_object == G1MarkFromRoot) {
277 mark_object(obj);
278 }
279 }
280 trim_queue_partially();
281 }
282
283 template <class T> void G1RebuildRemSetClosure::do_oop_nv(T* p) {
284 oop const obj = RawAccess<MO_VOLATILE>::oop_load(p);
285 if (obj == NULL) {
286 return;
287 }
288
289 if (HeapRegion::is_in_same_region(p, obj)) {
290 return;
291 }
292
293 HeapRegion* to = _g1h->heap_region_containing(obj);
294 HeapRegionRemSet* rem_set = to->rem_set();
295 rem_set->add_reference(p, _worker_id);
296 }
297
298 #endif // SHARE_VM_GC_G1_G1OOPCLOSURES_INLINE_HPP
|
55 assert((obj == RawAccess<>::oop_load(p)) ||
56 (obj->is_forwarded() &&
57 obj->forwardee() == RawAccess<>::oop_load(p)),
58 "p should still be pointing to obj or to its forwardee");
59
60 _par_scan_state->push_on_queue(p);
61 }
62
63 template <class T>
64 inline void G1ScanClosureBase::handle_non_cset_obj_common(InCSetState const state, T* p, oop const obj) {
65 if (state.is_humongous()) {
66 _g1h->set_humongous_is_live(obj);
67 }
68 }
69
70 inline void G1ScanClosureBase::trim_queue_partially() {
71 _par_scan_state->trim_queue_partially();
72 }
73
74 template <class T>
75 inline void G1ScanEvacuatedObjClosure::do_oop_work(T* p) {
76 T heap_oop = RawAccess<>::oop_load(p);
77
78 if (CompressedOops::is_null(heap_oop)) {
79 return;
80 }
81 oop obj = CompressedOops::decode_not_null(heap_oop);
82 const InCSetState state = _g1h->in_cset_state(obj);
83 if (state.is_in_cset()) {
84 prefetch_and_push(p, obj);
85 } else {
86 if (HeapRegion::is_in_same_region(p, obj)) {
87 return;
88 }
89 handle_non_cset_obj_common(state, p, obj);
90 _par_scan_state->update_rs(_from, p, obj);
91 }
92 }
93
94 template <class T>
95 inline void G1CMOopClosure::do_oop_work(T* p) {
96 _task->deal_with_reference(p);
97 }
98
99 template <class T>
100 inline void G1RootRegionScanClosure::do_oop_work(T* p) {
101 T heap_oop = RawAccess<MO_VOLATILE>::oop_load(p);
102 if (CompressedOops::is_null(heap_oop)) {
103 return;
104 }
105 oop obj = CompressedOops::decode_not_null(heap_oop);
106 _cm->mark_in_next_bitmap(_worker_id, obj);
107 }
108
109 template <class T>
110 inline static void check_obj_during_refinement(T* p, oop const obj) {
111 #ifdef ASSERT
112 G1CollectedHeap* g1h = G1CollectedHeap::heap();
113 // can't do because of races
114 // assert(oopDesc::is_oop_or_null(obj), "expected an oop");
115 assert(check_obj_alignment(obj), "not oop aligned");
116 assert(g1h->is_in_reserved(obj), "must be in heap");
117
118 HeapRegion* from = g1h->heap_region_containing(p);
119
120 assert(from != NULL, "from region must be non-NULL");
121 assert(from->is_in_reserved(p) ||
122 (from->is_humongous() &&
123 g1h->heap_region_containing(p)->is_humongous() &&
124 from->humongous_start_region() == g1h->heap_region_containing(p)->humongous_start_region()),
125 "p " PTR_FORMAT " is not in the same region %u or part of the correct humongous object starting at region %u.",
126 p2i(p), from->hrm_index(), from->humongous_start_region()->hrm_index());
127 #endif // ASSERT
128 }
129
130 template <class T>
131 inline void G1ConcurrentRefineOopClosure::do_oop_work(T* p) {
132 T o = RawAccess<MO_VOLATILE>::oop_load(p);
133 if (CompressedOops::is_null(o)) {
134 return;
135 }
136 oop obj = CompressedOops::decode_not_null(o);
137
138 check_obj_during_refinement(p, obj);
139
140 if (HeapRegion::is_in_same_region(p, obj)) {
141 // Normally this closure should only be called with cross-region references.
142 // But since Java threads are manipulating the references concurrently and we
143 // reload the values things may have changed.
144 // Also this check lets slip through references from a humongous continues region
145 // to its humongous start region, as they are in different regions, and adds a
146 // remembered set entry. This is benign (apart from memory usage), as we never
147 // try to either evacuate or eager reclaim humonguous arrays of j.l.O.
148 return;
149 }
150
151 HeapRegionRemSet* to_rem_set = _g1h->heap_region_containing(obj)->rem_set();
152
153 assert(to_rem_set != NULL, "Need per-region 'into' remsets.");
154 if (to_rem_set->is_tracked()) {
155 to_rem_set->add_reference(p, _worker_i);
156 }
157 }
158
159 template <class T>
160 inline void G1ScanObjsDuringUpdateRSClosure::do_oop_work(T* p) {
161 T o = RawAccess<>::oop_load(p);
162 if (CompressedOops::is_null(o)) {
163 return;
164 }
165 oop obj = CompressedOops::decode_not_null(o);
166
167 check_obj_during_refinement(p, obj);
168
169 assert(!_g1h->is_in_cset((HeapWord*)p), "Oop originates from " PTR_FORMAT " (region: %u) which is in the collection set.", p2i(p), _g1h->addr_to_region((HeapWord*)p));
170 const InCSetState state = _g1h->in_cset_state(obj);
171 if (state.is_in_cset()) {
172 // Since the source is always from outside the collection set, here we implicitly know
173 // that this is a cross-region reference too.
174 prefetch_and_push(p, obj);
175 } else {
176 HeapRegion* to = _g1h->heap_region_containing(obj);
177 if (_from == to) {
178 return;
179 }
180 handle_non_cset_obj_common(state, p, obj);
181 to->rem_set()->add_reference(p, _worker_i);
182 }
183 }
184
185 template <class T>
186 inline void G1ScanObjsDuringScanRSClosure::do_oop_work(T* p) {
187 T heap_oop = RawAccess<>::oop_load(p);
188 if (CompressedOops::is_null(heap_oop)) {
189 return;
190 }
191 oop obj = CompressedOops::decode_not_null(heap_oop);
192
193 const InCSetState state = _g1h->in_cset_state(obj);
194 if (state.is_in_cset()) {
195 prefetch_and_push(p, obj);
196 } else {
197 if (HeapRegion::is_in_same_region(p, obj)) {
198 return;
199 }
200 handle_non_cset_obj_common(state, p, obj);
201 }
202 }
203
204 void G1ParCopyHelper::do_cld_barrier(oop new_obj) {
205 if (_g1h->heap_region_containing(new_obj)->is_young()) {
206 _scanned_cld->record_modified_oops();
263 mark_forwarded_object(obj, forwardee);
264 }
265
266 if (barrier == G1BarrierCLD) {
267 do_cld_barrier(forwardee);
268 }
269 } else {
270 if (state.is_humongous()) {
271 _g1h->set_humongous_is_live(obj);
272 }
273
274 // The object is not in collection set. If we're a root scanning
275 // closure during an initial mark pause then attempt to mark the object.
276 if (do_mark_object == G1MarkFromRoot) {
277 mark_object(obj);
278 }
279 }
280 trim_queue_partially();
281 }
282
283 template <class T> void G1RebuildRemSetClosure::do_oop_work(T* p) {
284 oop const obj = RawAccess<MO_VOLATILE>::oop_load(p);
285 if (obj == NULL) {
286 return;
287 }
288
289 if (HeapRegion::is_in_same_region(p, obj)) {
290 return;
291 }
292
293 HeapRegion* to = _g1h->heap_region_containing(obj);
294 HeapRegionRemSet* rem_set = to->rem_set();
295 rem_set->add_reference(p, _worker_id);
296 }
297
298 #endif // SHARE_VM_GC_G1_G1OOPCLOSURES_INLINE_HPP
|