69 if (HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value ||
70 HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
71 return;
72 }
73 if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
74 T heap_oop = RawAccess<>::oop_load(field);
75 if (!CompressedOops::is_null(heap_oop)) {
76 enqueue(CompressedOops::decode(heap_oop));
77 }
78 }
79 }
80
81 inline void ShenandoahBarrierSet::satb_enqueue(oop value) {
82 assert(value != NULL, "checked before");
83 if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
84 enqueue(value);
85 }
86 }
87
88 inline void ShenandoahBarrierSet::storeval_barrier(oop obj) {
89 if (obj != NULL && ShenandoahStoreValEnqueueBarrier) {
90 enqueue(obj);
91 }
92 }
93
94 inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) {
95 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
96 const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0;
97 const bool peek = (decorators & AS_NO_KEEPALIVE) != 0;
98 if (!peek && !on_strong_oop_ref) {
99 satb_enqueue(value);
100 }
101 }
102
103 template <DecoratorSet decorators>
104 inline void ShenandoahBarrierSet::keep_alive_if_weak(oop value) {
105 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
106 if (!HasDecorator<decorators, ON_STRONG_OOP_REF>::value &&
107 !HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
108 satb_enqueue(value);
109 }
232 template <DecoratorSet decorators, typename BarrierSetT>
233 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
234 return oop_atomic_xchg_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), new_value);
235 }
236
237 // Clone barrier support
238 template <DecoratorSet decorators, typename BarrierSetT>
239 void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
240 if (ShenandoahCloneBarrier) {
241 ShenandoahBarrierSet::barrier_set()->clone_barrier_runtime(src);
242 }
243 Raw::clone(src, dst, size);
244 }
245
246 template <DecoratorSet decorators, typename BarrierSetT>
247 template <typename T>
248 bool ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
249 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
250 size_t length) {
251 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
252 bs->arraycopy_pre(arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw),
253 arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw),
254 length);
255 return Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
256 }
257
258 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
259 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
260 assert(HAS_FWD == _heap->has_forwarded_objects(), "Forwarded object status is sane");
261
262 Thread* thread = Thread::current();
263 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
264 ShenandoahMarkingContext* ctx = _heap->marking_context();
265 const ShenandoahCollectionSet* const cset = _heap->collection_set();
266 T* end = src + count;
267 for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
268 T o = RawAccess<>::oop_load(elem_ptr);
269 if (!CompressedOops::is_null(o)) {
270 oop obj = CompressedOops::decode_not_null(o);
271 if (HAS_FWD && cset->is_in(obj)) {
272 oop fwd = resolve_forwarded_not_null(obj);
273 if (EVAC && obj == fwd) {
274 fwd = _heap->evacuate_object(obj, thread);
275 }
276 assert(obj != fwd || _heap->cancelled_gc(), "must be forwarded");
277 oop witness = ShenandoahHeap::cas_oop(fwd, elem_ptr, o);
278 obj = fwd;
279 }
280 if (ENQUEUE && !ctx->is_marked(obj)) {
281 queue.enqueue_known_active(obj);
282 }
283 }
284 }
285 }
286
287 template <class T>
288 void ShenandoahBarrierSet::arraycopy_pre_work(T* src, T* dst, size_t count) {
289 if (_heap->is_concurrent_mark_in_progress() &&
290 !_heap->marking_context()->allocated_after_mark_start(reinterpret_cast<HeapWord*>(dst))) {
291 arraycopy_work<T, false, false, true>(dst, count);
292 }
293
294 if (_heap->has_forwarded_objects()) {
295 arraycopy_update_impl(src, count);
296 }
297 }
298
299 void ShenandoahBarrierSet::arraycopy_pre(oop* src, oop* dst, size_t count) {
300 arraycopy_pre_work(src, dst, count);
301 }
302
303 void ShenandoahBarrierSet::arraycopy_pre(narrowOop* src, narrowOop* dst, size_t count) {
304 arraycopy_pre_work(src, dst, count);
305 }
306
307 inline bool ShenandoahBarrierSet::skip_bulk_update(HeapWord* dst) {
308 return dst >= _heap->heap_region_containing(dst)->get_update_watermark();
309 }
310
311 template <class T>
312 void ShenandoahBarrierSet::arraycopy_update_impl(T* src, size_t count) {
313 if (skip_bulk_update(reinterpret_cast<HeapWord*>(src))) return;
314 if (_heap->is_evacuation_in_progress()) {
315 ShenandoahEvacOOMScope oom_evac;
316 arraycopy_work<T, true, true, false>(src, count);
317 } else if (_heap->has_forwarded_objects()) {
318 arraycopy_work<T, true, false, false>(src, count);
319 }
320 }
321
322 void ShenandoahBarrierSet::arraycopy_update(oop* src, size_t count) {
323 arraycopy_update_impl(src, count);
324 }
325
326 void ShenandoahBarrierSet::arraycopy_update(narrowOop* src, size_t count) {
327 arraycopy_update_impl(src, count);
328 }
329
330 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
|
69 if (HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value ||
70 HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
71 return;
72 }
73 if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
74 T heap_oop = RawAccess<>::oop_load(field);
75 if (!CompressedOops::is_null(heap_oop)) {
76 enqueue(CompressedOops::decode(heap_oop));
77 }
78 }
79 }
80
81 inline void ShenandoahBarrierSet::satb_enqueue(oop value) {
82 assert(value != NULL, "checked before");
83 if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
84 enqueue(value);
85 }
86 }
87
88 inline void ShenandoahBarrierSet::storeval_barrier(oop obj) {
89 if (ShenandoahStoreValEnqueueBarrier && obj != NULL && _heap->is_concurrent_mark_in_progress()) {
90 enqueue(obj);
91 }
92 }
93
94 inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) {
95 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
96 const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0;
97 const bool peek = (decorators & AS_NO_KEEPALIVE) != 0;
98 if (!peek && !on_strong_oop_ref) {
99 satb_enqueue(value);
100 }
101 }
102
103 template <DecoratorSet decorators>
104 inline void ShenandoahBarrierSet::keep_alive_if_weak(oop value) {
105 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
106 if (!HasDecorator<decorators, ON_STRONG_OOP_REF>::value &&
107 !HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
108 satb_enqueue(value);
109 }
232 template <DecoratorSet decorators, typename BarrierSetT>
233 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
234 return oop_atomic_xchg_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), new_value);
235 }
236
237 // Clone barrier support
238 template <DecoratorSet decorators, typename BarrierSetT>
239 void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
240 if (ShenandoahCloneBarrier) {
241 ShenandoahBarrierSet::barrier_set()->clone_barrier_runtime(src);
242 }
243 Raw::clone(src, dst, size);
244 }
245
246 template <DecoratorSet decorators, typename BarrierSetT>
247 template <typename T>
248 bool ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
249 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
250 size_t length) {
251 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
252 bs->arraycopy_barrier(arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw),
253 arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw),
254 length);
255 return Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
256 }
257
258 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
259 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
260 assert(HAS_FWD == _heap->has_forwarded_objects(), "Forwarded object status is sane");
261
262 Thread* thread = Thread::current();
263 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
264 ShenandoahMarkingContext* ctx = _heap->marking_context();
265 const ShenandoahCollectionSet* const cset = _heap->collection_set();
266 T* end = src + count;
267 for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
268 T o = RawAccess<>::oop_load(elem_ptr);
269 if (!CompressedOops::is_null(o)) {
270 oop obj = CompressedOops::decode_not_null(o);
271 if (HAS_FWD && cset->is_in(obj)) {
272 oop fwd = resolve_forwarded_not_null(obj);
273 if (EVAC && obj == fwd) {
274 fwd = _heap->evacuate_object(obj, thread);
275 }
276 assert(obj != fwd || _heap->cancelled_gc(), "must be forwarded");
277 oop witness = ShenandoahHeap::cas_oop(fwd, elem_ptr, o);
278 obj = fwd;
279 }
280 if (ENQUEUE && !ctx->is_marked(obj)) {
281 queue.enqueue_known_active(obj);
282 }
283 }
284 }
285 }
286
287 template <class T>
288 void ShenandoahBarrierSet::arraycopy_barrier(T* src, T* dst, size_t count) {
289 if (count == 0) {
290 return;
291 }
292 int gc_state = _heap->gc_state();
293 if ((gc_state & ShenandoahHeap::MARKING) != 0) {
294 arraycopy_marking(src, dst, count);
295 } else if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {
296 arraycopy_evacuation(src, count);
297 } else if ((gc_state & ShenandoahHeap::UPDATEREFS) != 0) {
298 arraycopy_update(src, count);
299 }
300 }
301
302 template <class T>
303 void ShenandoahBarrierSet::arraycopy_marking(T* src, T* dst, size_t count) {
304 assert(_heap->is_concurrent_mark_in_progress(), "only during marking");
305 T* array = ShenandoahSATBBarrier ? dst : src;
306 if (!_heap->marking_context()->allocated_after_mark_start(reinterpret_cast<HeapWord*>(array))) {
307 arraycopy_work<T, false, false, true>(array, count);
308 }
309 }
310
311 inline bool ShenandoahBarrierSet::need_bulk_update(HeapWord* ary) {
312 return ary < _heap->heap_region_containing(ary)->get_update_watermark();
313 }
314
315 template <class T>
316 void ShenandoahBarrierSet::arraycopy_evacuation(T* src, size_t count) {
317 assert(_heap->is_evacuation_in_progress(), "only during evacuation");
318 if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
319 ShenandoahEvacOOMScope oom_evac;
320 arraycopy_work<T, true, true, false>(src, count);
321 }
322 }
323
324 template <class T>
325 void ShenandoahBarrierSet::arraycopy_update(T* src, size_t count) {
326 assert(_heap->is_update_refs_in_progress(), "only during update-refs");
327 if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
328 arraycopy_work<T, true, false, false>(src, count);
329 }
330 }
331
332 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
|