1 /*
   2  * Copyright (c) 2015, 2020, Red Hat, Inc. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
  26 #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
  27 
  28 #include "gc/shared/barrierSet.hpp"
  29 #include "gc/shenandoah/shenandoahAsserts.hpp"
  30 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
  31 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
  32 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  33 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  34 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
  35 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
  36 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
  37 #include "memory/iterator.inline.hpp"
  38 #include "oops/oop.inline.hpp"
  39 
  40 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
  41   return ShenandoahForwarding::get_forwardee(p);
  42 }
  43 
  44 inline oop ShenandoahBarrierSet::resolve_forwarded(oop p) {
  45   if (p != NULL) {
  46     return resolve_forwarded_not_null(p);
  47   } else {
  48     return p;
  49   }
  50 }
  51 
  52 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null_mutator(oop p) {
  53   return ShenandoahForwarding::get_forwardee_mutator(p);
  54 }
  55 
  56 inline void ShenandoahBarrierSet::enqueue(oop obj) {
  57   shenandoah_assert_not_forwarded_if(NULL, obj, _heap->is_concurrent_traversal_in_progress());
  58   assert(_satb_mark_queue_set.is_active(), "only get here when SATB active");
  59 
  60   // Filter marked objects before hitting the SATB queues. The same predicate would
  61   // be used by SATBMQ::filter to eliminate already marked objects downstream, but
  62   // filtering here helps to avoid wasteful SATB queueing work to begin with.
  63   if (!_heap->requires_marking<false>(obj)) return;
  64 
  65   ShenandoahThreadLocalData::satb_mark_queue(Thread::current()).enqueue_known_active(obj);
  66 }
  67 
  68 template <DecoratorSet decorators, typename T>
  69 inline void ShenandoahBarrierSet::satb_barrier(T *field) {
  70   if (HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value ||
  71       HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
  72     return;
  73   }
  74   if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
  75     T heap_oop = RawAccess<>::oop_load(field);
  76     if (!CompressedOops::is_null(heap_oop)) {
  77       enqueue(CompressedOops::decode(heap_oop));
  78     }
  79   }
  80 }
  81 
  82 inline void ShenandoahBarrierSet::satb_enqueue(oop value) {
  83   assert(value != NULL, "checked before");
  84   if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
  85     enqueue(value);
  86   }
  87 }
  88 
  89 inline void ShenandoahBarrierSet::storeval_barrier(oop obj) {
  90   if (obj != NULL && ShenandoahStoreValEnqueueBarrier && _heap->is_concurrent_traversal_in_progress()) {
  91     enqueue(obj);
  92   }
  93 }
  94 
  95 inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) {
  96   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
  97   const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0;
  98   const bool peek              = (decorators & AS_NO_KEEPALIVE) != 0;
  99   if (!peek && !on_strong_oop_ref) {
 100     satb_enqueue(value);
 101   }
 102 }
 103 
 104 template <DecoratorSet decorators>
 105 inline void ShenandoahBarrierSet::keep_alive_if_weak(oop value) {
 106   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
 107   if (!HasDecorator<decorators, ON_STRONG_OOP_REF>::value &&
 108       !HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
 109     satb_enqueue(value);
 110   }
 111 }
 112 
 113 template <DecoratorSet decorators, typename BarrierSetT>
 114 template <typename T>
 115 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(T* addr) {
 116   oop value = Raw::oop_load_not_in_heap(addr);
 117   if (value != NULL) {
 118     ShenandoahBarrierSet *const bs = ShenandoahBarrierSet::barrier_set();
 119     value = bs->load_reference_barrier_native(value, addr);
 120     if (value != NULL) {
 121       bs->keep_alive_if_weak<decorators>(value);
 122     }
 123   }
 124   return value;
 125 }
 126 
 127 template <DecoratorSet decorators, typename BarrierSetT>
 128 template <typename T>
 129 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap(T* addr) {
 130   oop value = Raw::oop_load_in_heap(addr);
 131   if (value != NULL) {
 132     ShenandoahBarrierSet *const bs = ShenandoahBarrierSet::barrier_set();
 133     value = bs->load_reference_barrier_not_null(value);
 134     bs->keep_alive_if_weak<decorators>(value);
 135   }
 136   return value;
 137 }
 138 
 139 template <DecoratorSet decorators, typename BarrierSetT>
 140 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap_at(oop base, ptrdiff_t offset) {
 141   oop value = Raw::oop_load_in_heap_at(base, offset);
 142   if (value != NULL) {
 143     ShenandoahBarrierSet *const bs = ShenandoahBarrierSet::barrier_set();
 144     value = bs->load_reference_barrier_not_null(value);
 145     bs->keep_alive_if_weak(AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset),
 146                            value);
 147   }
 148   return value;
 149 }
 150 
 151 template <DecoratorSet decorators, typename BarrierSetT>
 152 template <typename T>
 153 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_not_in_heap(T* addr, oop value) {
 154   shenandoah_assert_marked_if(NULL, value, !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress());
 155   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
 156   bs->storeval_barrier(value);
 157   bs->satb_barrier<decorators>(addr);
 158   Raw::oop_store(addr, value);
 159 }
 160 
 161 template <DecoratorSet decorators, typename BarrierSetT>
 162 template <typename T>
 163 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap(T* addr, oop value) {
 164   shenandoah_assert_not_in_cset_loc_except(addr, ShenandoahHeap::heap()->cancelled_gc());
 165   shenandoah_assert_not_forwarded_except  (addr, value, value == NULL || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
 166   shenandoah_assert_not_in_cset_except    (addr, value, value == NULL || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
 167 
 168   oop_store_not_in_heap(addr, value);
 169 }
 170 
 171 template <DecoratorSet decorators, typename BarrierSetT>
 172 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap_at(oop base, ptrdiff_t offset, oop value) {
 173   oop_store_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), value);
 174 }
 175 
 176 template <DecoratorSet decorators, typename BarrierSetT>
 177 template <typename T>
 178 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_not_in_heap(T* addr, oop compare_value, oop new_value) {
 179   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
 180   bs->storeval_barrier(new_value);
 181 
 182   oop res;
 183   oop expected = compare_value;
 184   do {
 185     compare_value = expected;
 186     res = Raw::oop_atomic_cmpxchg(addr, compare_value, new_value);
 187     expected = res;
 188   } while ((compare_value != expected) && (resolve_forwarded(compare_value) == resolve_forwarded(expected)));
 189 
 190   // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,
 191   // because it must be the previous value.
 192   if (res != NULL) {
 193     res = ShenandoahBarrierSet::barrier_set()->load_reference_barrier_not_null(res);
 194     bs->satb_enqueue(res);
 195   }
 196   return res;
 197 }
 198 
 199 template <DecoratorSet decorators, typename BarrierSetT>
 200 template <typename T>
 201 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap(T* addr, oop compare_value, oop new_value) {
 202   return oop_atomic_cmpxchg_not_in_heap(addr, compare_value, new_value);
 203 }
 204 
 205 template <DecoratorSet decorators, typename BarrierSetT>
 206 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) {
 207   return oop_atomic_cmpxchg_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), compare_value, new_value);
 208 }
 209 
 210 template <DecoratorSet decorators, typename BarrierSetT>
 211 template <typename T>
 212 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_not_in_heap(T* addr, oop new_value) {
 213   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
 214   bs->storeval_barrier(new_value);
 215 
 216   oop previous = Raw::oop_atomic_xchg(addr, new_value);
 217 
 218   // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,
 219   // because it must be the previous value.
 220   if (previous != NULL) {
 221     previous = ShenandoahBarrierSet::barrier_set()->load_reference_barrier_not_null(previous);
 222     bs->satb_enqueue(previous);
 223   }
 224   return previous;
 225 }
 226 
 227 template <DecoratorSet decorators, typename BarrierSetT>
 228 template <typename T>
 229 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap(T* addr, oop new_value) {
 230   return oop_atomic_xchg_not_in_heap(addr, new_value);
 231 }
 232 
 233 template <DecoratorSet decorators, typename BarrierSetT>
 234 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
 235   return oop_atomic_xchg_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), new_value);
 236 }
 237 
 238 // Clone barrier support
 239 template <DecoratorSet decorators, typename BarrierSetT>
 240 void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
 241   if (ShenandoahCloneBarrier) {
 242     ShenandoahBarrierSet::barrier_set()->clone_barrier_runtime(src);
 243   }
 244   Raw::clone(src, dst, size);
 245 }
 246 
 247 template <DecoratorSet decorators, typename BarrierSetT>
 248 template <typename T>
 249 bool ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 250                                                                                          arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 251                                                                                          size_t length) {
 252   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
 253   bs->arraycopy_pre(arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw),
 254                     arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw),
 255                     length);
 256   return Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 257 }
 258 
 259 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
 260 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
 261   assert(HAS_FWD == _heap->has_forwarded_objects(), "Forwarded object status is sane");
 262 
 263   Thread* thread = Thread::current();
 264   SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
 265   ShenandoahMarkingContext* ctx = _heap->marking_context();
 266   const ShenandoahCollectionSet* const cset = _heap->collection_set();
 267   T* end = src + count;
 268   for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
 269     T o = RawAccess<>::oop_load(elem_ptr);
 270     if (!CompressedOops::is_null(o)) {
 271       oop obj = CompressedOops::decode_not_null(o);
 272       if (HAS_FWD && cset->is_in(obj)) {
 273         oop fwd = resolve_forwarded_not_null(obj);
 274         if (EVAC && obj == fwd) {
 275           fwd = _heap->evacuate_object(obj, thread);
 276         }
 277         assert(obj != fwd || _heap->cancelled_gc(), "must be forwarded");
 278         oop witness = ShenandoahHeap::cas_oop(fwd, elem_ptr, o);
 279         obj = fwd;
 280       }
 281       if (ENQUEUE && !ctx->is_marked(obj)) {
 282         queue.enqueue_known_active(obj);
 283       }
 284     }
 285   }
 286 }
 287 
 288 template <class T>
 289 void ShenandoahBarrierSet::arraycopy_pre_work(T* src, T* dst, size_t count) {
 290   if (_heap->is_concurrent_mark_in_progress() &&
 291       !_heap->marking_context()->allocated_after_mark_start(reinterpret_cast<HeapWord*>(dst))) {
 292     arraycopy_work<T, false, false, true>(dst, count);
 293   }
 294 
 295   if (_heap->has_forwarded_objects()) {
 296     arraycopy_update_impl(src, count);
 297   }
 298 }
 299 
 300 void ShenandoahBarrierSet::arraycopy_pre(oop* src, oop* dst, size_t count) {
 301   arraycopy_pre_work(src, dst, count);
 302 }
 303 
 304 void ShenandoahBarrierSet::arraycopy_pre(narrowOop* src, narrowOop* dst, size_t count) {
 305   arraycopy_pre_work(src, dst, count);
 306 }
 307 
 308 inline bool ShenandoahBarrierSet::skip_bulk_update(HeapWord* dst) {
 309   return dst >= _heap->heap_region_containing(dst)->get_update_watermark();
 310 }
 311 
 312 template <class T>
 313 void ShenandoahBarrierSet::arraycopy_update_impl(T* src, size_t count) {
 314   if (skip_bulk_update(reinterpret_cast<HeapWord*>(src))) return;
 315   if (_heap->is_evacuation_in_progress()) {
 316     ShenandoahEvacOOMScope oom_evac;
 317     arraycopy_work<T, true, true, false>(src, count);
 318   } else if (_heap->is_concurrent_traversal_in_progress()){
 319     ShenandoahEvacOOMScope oom_evac;
 320     arraycopy_work<T, true, true, true>(src, count);
 321   } else if (_heap->has_forwarded_objects()) {
 322     arraycopy_work<T, true, false, false>(src, count);
 323   }
 324 }
 325 
 326 void ShenandoahBarrierSet::arraycopy_update(oop* src, size_t count) {
 327   arraycopy_update_impl(src, count);
 328 }
 329 
 330 void ShenandoahBarrierSet::arraycopy_update(narrowOop* src, size_t count) {
 331   arraycopy_update_impl(src, count);
 332 }
 333 
 334 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP