< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahConcurrentMark.inline.hpp

Print this page
rev 51547 : Refactor to group marking bitmap and TAMS structure in one class ShenandoahMarkingContext
rev 51548 : Avoid indirection to next-mark-context


 192 
 193 #ifdef ASSERT
 194   int len = array->length();
 195   assert (0 <= from && from < len, "from is sane: %d/%d", from, len);
 196   assert (0 < to && to <= len, "to is sane: %d/%d", to, len);
 197 #endif
 198 
 199   array->oop_iterate_range(cl, from, to);
 200 }
 201 
 202 inline bool ShenandoahConcurrentMark::try_queue(ShenandoahObjToScanQueue* q, ShenandoahMarkTask &task) {
 203   return (q->pop_buffer(task) ||
 204           q->pop_local(task) ||
 205           q->pop_overflow(task));
 206 }
 207 
 208 class ShenandoahSATBBufferClosure : public SATBBufferClosure {
 209 private:
 210   ShenandoahObjToScanQueue* _queue;
 211   ShenandoahHeap* _heap;

 212 public:
 213   ShenandoahSATBBufferClosure(ShenandoahObjToScanQueue* q) :
 214     _queue(q), _heap(ShenandoahHeap::heap())

 215   {
 216   }
 217 
 218   void do_buffer(void **buffer, size_t size) {
 219     if (_heap->has_forwarded_objects()) {
 220       do_buffer_impl<RESOLVE>(buffer, size);
 221     } else {
 222       do_buffer_impl<NONE>(buffer, size);
 223     }
 224   }
 225 
 226   template<UpdateRefsMode UPDATE_REFS>
 227   void do_buffer_impl(void **buffer, size_t size) {
 228     for (size_t i = 0; i < size; ++i) {
 229       oop *p = (oop *) &buffer[i];
 230       ShenandoahConcurrentMark::mark_through_ref<oop, UPDATE_REFS>(p, _heap, _queue);
 231     }
 232   }
 233 };
 234 
 235 template<class T, UpdateRefsMode UPDATE_REFS>
 236 inline void ShenandoahConcurrentMark::mark_through_ref(T *p, ShenandoahHeap* heap, ShenandoahObjToScanQueue* q) {
 237   ShenandoahConcurrentMark::mark_through_ref<T, UPDATE_REFS, false /* string dedup */>(p, heap, q);
 238 }
 239 
 240 template<class T, UpdateRefsMode UPDATE_REFS, bool STRING_DEDUP>
 241 inline void ShenandoahConcurrentMark::mark_through_ref(T *p, ShenandoahHeap* heap, ShenandoahObjToScanQueue* q) {
 242   T o = RawAccess<>::oop_load(p);
 243   if (!CompressedOops::is_null(o)) {
 244     oop obj = CompressedOops::decode_not_null(o);
 245     switch (UPDATE_REFS) {
 246     case NONE:
 247       break;
 248     case RESOLVE:
 249       obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
 250       break;
 251     case SIMPLE:
 252       // We piggy-back reference updating to the marking tasks.
 253       obj = heap->update_with_forwarded_not_null(p, obj);
 254       break;
 255     case CONCURRENT:
 256       obj = heap->maybe_update_with_forwarded_not_null(p, obj);
 257       break;
 258     default:
 259       ShouldNotReachHere();
 260     }
 261 
 262     // Note: Only when concurrently updating references can obj become NULL here.
 263     // It happens when a mutator thread beats us by writing another value. In that
 264     // case we don't need to do anything else.
 265     if (UPDATE_REFS != CONCURRENT || !CompressedOops::is_null(obj)) {
 266       shenandoah_assert_not_forwarded(p, obj);
 267       shenandoah_assert_not_in_cset_except(p, obj, heap->cancelled_gc());
 268 
 269       if (heap->next_marking_context()->mark(obj)) {
 270         bool pushed = q->push(ShenandoahMarkTask(obj));
 271         assert(pushed, "overflow queue should always succeed pushing");
 272 
 273         if (STRING_DEDUP && ShenandoahStringDedup::is_candidate(obj)) {
 274           assert(ShenandoahStringDedup::is_enabled(), "Must be enabled");
 275           ShenandoahStringDedup::enqueue_candidate(obj);
 276         }
 277       }
 278 
 279       shenandoah_assert_marked_next(p, obj);
 280     }
 281   }
 282 }
 283 
 284 #endif // SHARE_VM_GC_SHENANDOAH_SHENANDOAHCONCURRENTMARK_INLINE_HPP


 192 
 193 #ifdef ASSERT
 194   int len = array->length();
 195   assert (0 <= from && from < len, "from is sane: %d/%d", from, len);
 196   assert (0 < to && to <= len, "to is sane: %d/%d", to, len);
 197 #endif
 198 
 199   array->oop_iterate_range(cl, from, to);
 200 }
 201 
 202 inline bool ShenandoahConcurrentMark::try_queue(ShenandoahObjToScanQueue* q, ShenandoahMarkTask &task) {
 203   return (q->pop_buffer(task) ||
 204           q->pop_local(task) ||
 205           q->pop_overflow(task));
 206 }
 207 
 208 class ShenandoahSATBBufferClosure : public SATBBufferClosure {
 209 private:
 210   ShenandoahObjToScanQueue* _queue;
 211   ShenandoahHeap* _heap;
 212   ShenandoahMarkingContext* const _mark_context;
 213 public:
 214   ShenandoahSATBBufferClosure(ShenandoahObjToScanQueue* q) :
 215     _queue(q), _heap(ShenandoahHeap::heap()),
 216     _mark_context(_heap->next_marking_context())
 217   {
 218   }
 219 
 220   void do_buffer(void **buffer, size_t size) {
 221     if (_heap->has_forwarded_objects()) {
 222       do_buffer_impl<RESOLVE>(buffer, size);
 223     } else {
 224       do_buffer_impl<NONE>(buffer, size);
 225     }
 226   }
 227 
 228   template<UpdateRefsMode UPDATE_REFS>
 229   void do_buffer_impl(void **buffer, size_t size) {
 230     for (size_t i = 0; i < size; ++i) {
 231       oop *p = (oop *) &buffer[i];
 232       ShenandoahConcurrentMark::mark_through_ref<oop, UPDATE_REFS>(p, _heap, _queue, _mark_context);
 233     }
 234   }
 235 };
 236 
 237 template<class T, UpdateRefsMode UPDATE_REFS>
 238 inline void ShenandoahConcurrentMark::mark_through_ref(T *p, ShenandoahHeap* heap, ShenandoahObjToScanQueue* q, ShenandoahMarkingContext* const mark_context) {
 239   ShenandoahConcurrentMark::mark_through_ref<T, UPDATE_REFS, false /* string dedup */>(p, heap, q, mark_context);
 240 }
 241 
 242 template<class T, UpdateRefsMode UPDATE_REFS, bool STRING_DEDUP>
 243 inline void ShenandoahConcurrentMark::mark_through_ref(T *p, ShenandoahHeap* heap, ShenandoahObjToScanQueue* q, ShenandoahMarkingContext* const mark_context) {
 244   T o = RawAccess<>::oop_load(p);
 245   if (!CompressedOops::is_null(o)) {
 246     oop obj = CompressedOops::decode_not_null(o);
 247     switch (UPDATE_REFS) {
 248     case NONE:
 249       break;
 250     case RESOLVE:
 251       obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
 252       break;
 253     case SIMPLE:
 254       // We piggy-back reference updating to the marking tasks.
 255       obj = heap->update_with_forwarded_not_null(p, obj);
 256       break;
 257     case CONCURRENT:
 258       obj = heap->maybe_update_with_forwarded_not_null(p, obj);
 259       break;
 260     default:
 261       ShouldNotReachHere();
 262     }
 263 
 264     // Note: Only when concurrently updating references can obj become NULL here.
 265     // It happens when a mutator thread beats us by writing another value. In that
 266     // case we don't need to do anything else.
 267     if (UPDATE_REFS != CONCURRENT || !CompressedOops::is_null(obj)) {
 268       shenandoah_assert_not_forwarded(p, obj);
 269       shenandoah_assert_not_in_cset_except(p, obj, heap->cancelled_gc());
 270 
 271       if (mark_context->mark(obj)) {
 272         bool pushed = q->push(ShenandoahMarkTask(obj));
 273         assert(pushed, "overflow queue should always succeed pushing");
 274 
 275         if (STRING_DEDUP && ShenandoahStringDedup::is_candidate(obj)) {
 276           assert(ShenandoahStringDedup::is_enabled(), "Must be enabled");
 277           ShenandoahStringDedup::enqueue_candidate(obj);
 278         }
 279       }
 280 
 281       shenandoah_assert_marked_next(p, obj);
 282     }
 283   }
 284 }
 285 
 286 #endif // SHARE_VM_GC_SHENANDOAH_SHENANDOAHCONCURRENTMARK_INLINE_HPP
< prev index next >