25 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
26 #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
27
28 #include "gc/shared/barrierSet.hpp"
29 #include "gc/shenandoah/shenandoahAsserts.hpp"
30 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
31 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
32 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
33 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
34 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
35 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
36 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
37 #include "memory/iterator.inline.hpp"
38 #include "oops/oop.inline.hpp"
39
40 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
41 return ShenandoahForwarding::get_forwardee(p);
42 }
43
44 inline oop ShenandoahBarrierSet::resolve_forwarded(oop p) {
45 if (((HeapWord*) p) != NULL) {
46 return resolve_forwarded_not_null(p);
47 } else {
48 return p;
49 }
50 }
51
52 inline void ShenandoahBarrierSet::enqueue(oop obj) {
53 shenandoah_assert_not_forwarded_if(NULL, obj, _heap->is_concurrent_traversal_in_progress());
54 assert(_satb_mark_queue_set.is_active(), "only get here when SATB active");
55
56 // Filter marked objects before hitting the SATB queues. The same predicate would
57 // be used by SATBMQ::filter to eliminate already marked objects downstream, but
58 // filtering here helps to avoid wasteful SATB queueing work to begin with.
59 if (!_heap->requires_marking<false>(obj)) return;
60
61 ShenandoahThreadLocalData::satb_mark_queue(Thread::current()).enqueue_known_active(obj);
62 }
63
64 template <DecoratorSet decorators, typename T>
65 inline void ShenandoahBarrierSet::satb_barrier(T *field) {
251 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
252 size_t length) {
253 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
254 bs->arraycopy_pre(arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw),
255 arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw),
256 length);
257 return Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
258 }
259
260 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
261 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
262 Thread* thread = Thread::current();
263 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
264 ShenandoahMarkingContext* ctx = _heap->marking_context();
265 const ShenandoahCollectionSet* const cset = _heap->collection_set();
266 T* end = src + count;
267 for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
268 T o = RawAccess<>::oop_load(elem_ptr);
269 if (!CompressedOops::is_null(o)) {
270 oop obj = CompressedOops::decode_not_null(o);
271 if (HAS_FWD && cset->is_in((HeapWord *) obj)) {
272 assert(_heap->has_forwarded_objects(), "only get here with forwarded objects");
273 oop fwd = resolve_forwarded_not_null(obj);
274 if (EVAC && obj == fwd) {
275 fwd = _heap->evacuate_object(obj, thread);
276 }
277 assert(obj != fwd || _heap->cancelled_gc(), "must be forwarded");
278 oop witness = ShenandoahHeap::cas_oop(fwd, elem_ptr, o);
279 obj = fwd;
280 }
281 if (ENQUEUE && !ctx->is_marked(obj)) {
282 queue.enqueue_known_active(obj);
283 }
284 }
285 }
286 }
287
288 template <class T>
289 void ShenandoahBarrierSet::arraycopy_pre_work(T* src, T* dst, size_t count) {
290 if (_heap->is_concurrent_mark_in_progress()) {
291 if (_heap->has_forwarded_objects()) {
|
25 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
26 #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
27
28 #include "gc/shared/barrierSet.hpp"
29 #include "gc/shenandoah/shenandoahAsserts.hpp"
30 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
31 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
32 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
33 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
34 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
35 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
36 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
37 #include "memory/iterator.inline.hpp"
38 #include "oops/oop.inline.hpp"
39
40 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
41 return ShenandoahForwarding::get_forwardee(p);
42 }
43
44 inline oop ShenandoahBarrierSet::resolve_forwarded(oop p) {
45 if (cast_from_oop<HeapWord*>(p) != NULL) {
46 return resolve_forwarded_not_null(p);
47 } else {
48 return p;
49 }
50 }
51
52 inline void ShenandoahBarrierSet::enqueue(oop obj) {
53 shenandoah_assert_not_forwarded_if(NULL, obj, _heap->is_concurrent_traversal_in_progress());
54 assert(_satb_mark_queue_set.is_active(), "only get here when SATB active");
55
56 // Filter marked objects before hitting the SATB queues. The same predicate would
57 // be used by SATBMQ::filter to eliminate already marked objects downstream, but
58 // filtering here helps to avoid wasteful SATB queueing work to begin with.
59 if (!_heap->requires_marking<false>(obj)) return;
60
61 ShenandoahThreadLocalData::satb_mark_queue(Thread::current()).enqueue_known_active(obj);
62 }
63
64 template <DecoratorSet decorators, typename T>
65 inline void ShenandoahBarrierSet::satb_barrier(T *field) {
251 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
252 size_t length) {
253 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
254 bs->arraycopy_pre(arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw),
255 arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw),
256 length);
257 return Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
258 }
259
260 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
261 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
262 Thread* thread = Thread::current();
263 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
264 ShenandoahMarkingContext* ctx = _heap->marking_context();
265 const ShenandoahCollectionSet* const cset = _heap->collection_set();
266 T* end = src + count;
267 for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
268 T o = RawAccess<>::oop_load(elem_ptr);
269 if (!CompressedOops::is_null(o)) {
270 oop obj = CompressedOops::decode_not_null(o);
271 if (HAS_FWD && cset->is_in(cast_from_oop<HeapWord *>(obj))) {
272 assert(_heap->has_forwarded_objects(), "only get here with forwarded objects");
273 oop fwd = resolve_forwarded_not_null(obj);
274 if (EVAC && obj == fwd) {
275 fwd = _heap->evacuate_object(obj, thread);
276 }
277 assert(obj != fwd || _heap->cancelled_gc(), "must be forwarded");
278 oop witness = ShenandoahHeap::cas_oop(fwd, elem_ptr, o);
279 obj = fwd;
280 }
281 if (ENQUEUE && !ctx->is_marked(obj)) {
282 queue.enqueue_known_active(obj);
283 }
284 }
285 }
286 }
287
288 template <class T>
289 void ShenandoahBarrierSet::arraycopy_pre_work(T* src, T* dst, size_t count) {
290 if (_heap->is_concurrent_mark_in_progress()) {
291 if (_heap->has_forwarded_objects()) {
|