< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahHeap.inline.hpp

Print this page




 266       copy = allocate_from_gclab(thread, size);
 267     }
 268     if (copy == NULL) {
 269       ShenandoahAllocRequest req = ShenandoahAllocRequest::for_shared_gc(size);
 270       copy = allocate_memory(req);
 271       alloc_from_gclab = false;
 272     }
 273 #ifdef ASSERT
 274   }
 275 #endif
 276 
 277   if (copy == NULL) {
 278     control_thread()->handle_alloc_failure_evac(size);
 279 
 280     _oom_evac_handler.handle_out_of_memory_during_evacuation();
 281 
 282     return ShenandoahBarrierSet::resolve_forwarded(p);
 283   }
 284 
 285   // Copy the object:
 286   Copy::aligned_disjoint_words(cast_from_oop<HeapWord*>(p), copy, size);
 287 
 288   // Try to install the new forwarding pointer.
 289   oop copy_val = oop(copy);
 290   oop result = ShenandoahForwarding::try_update_forwardee(p, copy_val);
 291   if (result == copy_val) {
 292     // Successfully evacuated. Our copy is now the public one!
 293     shenandoah_assert_correct(NULL, copy_val);
 294     return copy_val;
 295   }  else {
 296     // Failed to evacuate. We need to deal with the object that is left behind. Since this
 297     // new allocation is certainly after TAMS, it will be considered live in the next cycle.
 298     // But if it happens to contain references to evacuated regions, those references would
 299     // not get updated for this stale copy during this cycle, and we will crash while scanning
 300     // it the next cycle.
 301     //
 302     // For GCLAB allocations, it is enough to rollback the allocation ptr. Either the next
 303     // object will overwrite this stale copy, or the filler object on LAB retirement will
 304     // do this. For non-GCLAB allocations, we have no way to retract the allocation, and
 305     // have to explicitly overwrite the copy with the filler object. With that overwrite,
 306     // we have to keep the fwdptr initialized and pointing to our (stale) copy.


 309     } else {
 310       fill_with_object(copy, size);
 311       shenandoah_assert_correct(NULL, copy_val);
 312     }
 313     shenandoah_assert_correct(NULL, result);
 314     return result;
 315   }
 316 }
 317 
 318 template<bool RESOLVE>
 319 inline bool ShenandoahHeap::requires_marking(const void* entry) const {
 320   oop obj = oop(entry);
 321   if (RESOLVE) {
 322     obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
 323   }
 324   return !_marking_context->is_marked(obj);
 325 }
 326 
 327 template <class T>
 328 inline bool ShenandoahHeap::in_collection_set(T p) const {
 329   HeapWord* obj = cast_from_oop<HeapWord*>(p);
 330   assert(collection_set() != NULL, "Sanity");
 331   assert(is_in(obj), "should be in heap");
 332 
 333   return collection_set()->is_in(obj);
 334 }
 335 
 336 inline bool ShenandoahHeap::is_stable() const {
 337   return _gc_state.is_clear();
 338 }
 339 
 340 inline bool ShenandoahHeap::is_idle() const {
 341   return _gc_state.is_unset(MARKING | EVACUATION | UPDATEREFS | TRAVERSAL);
 342 }
 343 
 344 inline bool ShenandoahHeap::is_concurrent_mark_in_progress() const {
 345   return _gc_state.is_set(MARKING);
 346 }
 347 
 348 inline bool ShenandoahHeap::is_concurrent_traversal_in_progress() const {
 349   return _gc_state.is_set(TRAVERSAL);




 266       copy = allocate_from_gclab(thread, size);
 267     }
 268     if (copy == NULL) {
 269       ShenandoahAllocRequest req = ShenandoahAllocRequest::for_shared_gc(size);
 270       copy = allocate_memory(req);
 271       alloc_from_gclab = false;
 272     }
 273 #ifdef ASSERT
 274   }
 275 #endif
 276 
 277   if (copy == NULL) {
 278     control_thread()->handle_alloc_failure_evac(size);
 279 
 280     _oom_evac_handler.handle_out_of_memory_during_evacuation();
 281 
 282     return ShenandoahBarrierSet::resolve_forwarded(p);
 283   }
 284 
 285   // Copy the object:
 286   Copy::aligned_disjoint_words((HeapWord*) p, copy, size);
 287 
 288   // Try to install the new forwarding pointer.
 289   oop copy_val = oop(copy);
 290   oop result = ShenandoahForwarding::try_update_forwardee(p, copy_val);
 291   if (result == copy_val) {
 292     // Successfully evacuated. Our copy is now the public one!
 293     shenandoah_assert_correct(NULL, copy_val);
 294     return copy_val;
 295   }  else {
 296     // Failed to evacuate. We need to deal with the object that is left behind. Since this
 297     // new allocation is certainly after TAMS, it will be considered live in the next cycle.
 298     // But if it happens to contain references to evacuated regions, those references would
 299     // not get updated for this stale copy during this cycle, and we will crash while scanning
 300     // it the next cycle.
 301     //
 302     // For GCLAB allocations, it is enough to rollback the allocation ptr. Either the next
 303     // object will overwrite this stale copy, or the filler object on LAB retirement will
 304     // do this. For non-GCLAB allocations, we have no way to retract the allocation, and
 305     // have to explicitly overwrite the copy with the filler object. With that overwrite,
 306     // we have to keep the fwdptr initialized and pointing to our (stale) copy.


 309     } else {
 310       fill_with_object(copy, size);
 311       shenandoah_assert_correct(NULL, copy_val);
 312     }
 313     shenandoah_assert_correct(NULL, result);
 314     return result;
 315   }
 316 }
 317 
 318 template<bool RESOLVE>
 319 inline bool ShenandoahHeap::requires_marking(const void* entry) const {
 320   oop obj = oop(entry);
 321   if (RESOLVE) {
 322     obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
 323   }
 324   return !_marking_context->is_marked(obj);
 325 }
 326 
 327 template <class T>
 328 inline bool ShenandoahHeap::in_collection_set(T p) const {
 329   HeapWord* obj = (HeapWord*) p;
 330   assert(collection_set() != NULL, "Sanity");
 331   assert(is_in(obj), "should be in heap");
 332 
 333   return collection_set()->is_in(obj);
 334 }
 335 
 336 inline bool ShenandoahHeap::is_stable() const {
 337   return _gc_state.is_clear();
 338 }
 339 
 340 inline bool ShenandoahHeap::is_idle() const {
 341   return _gc_state.is_unset(MARKING | EVACUATION | UPDATEREFS | TRAVERSAL);
 342 }
 343 
 344 inline bool ShenandoahHeap::is_concurrent_mark_in_progress() const {
 345   return _gc_state.is_set(MARKING);
 346 }
 347 
 348 inline bool ShenandoahHeap::is_concurrent_traversal_in_progress() const {
 349   return _gc_state.is_set(TRAVERSAL);


< prev index next >