< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahHeap.inline.hpp

Print this page
rev 50092 : [mq]: allocations-rt.patch


 335     assert(oopDesc::is_oop(copy_val), "expect oop");
 336     assert(p->klass() == copy_val->klass(), "Should have the same class p: "PTR_FORMAT", copy: "PTR_FORMAT,
 337                                               p2i(p), p2i(copy));
 338 #endif
 339     return copy_val;
 340   }  else {
 341     // Failed to evacuate. We need to deal with the object that is left behind. Since this
 342     // new allocation is certainly after TAMS, it will be considered live in the next cycle.
 343     // But if it happens to contain references to evacuated regions, those references would
 344     // not get updated for this stale copy during this cycle, and we will crash while scanning
 345     // it the next cycle.
 346     //
 347     // For GCLAB allocations, it is enough to rollback the allocation ptr. Either the next
 348     // object will overwrite this stale copy, or the filler object on LAB retirement will
 349     // do this. For non-GCLAB allocations, we have no way to retract the allocation, and
 350     // have to explicitly overwrite the copy with the filler object. With that overwrite,
 351     // we have to keep the fwdptr initialized and pointing to our (stale) copy.
 352     if (alloc_from_gclab) {
 353       thread->gclab().rollback(size_with_fwdptr);
 354     } else {
 355       fill_with_object(copy, size_no_fwdptr);
 356     }
 357     log_develop_trace(gc, compaction)("Copy object: " PTR_FORMAT " -> " PTR_FORMAT " failed, use other: " PTR_FORMAT,
 358                                       p2i(p), p2i(copy), p2i(result));
 359     return result;
 360   }
 361 }
 362 
 363 inline bool ShenandoahHeap::requires_marking(const void* entry) const {
 364   return ! is_marked_next(oop(entry));
 365 }
 366 
 367 bool ShenandoahHeap::region_in_collection_set(size_t region_index) const {
 368   assert(collection_set() != NULL, "Sanity");
 369   return collection_set()->is_in(region_index);
 370 }
 371 
 372 bool ShenandoahHeap::in_collection_set(ShenandoahHeapRegion* r) const {
 373   return region_in_collection_set(r->region_number());
 374 }
 375 




 335     assert(oopDesc::is_oop(copy_val), "expect oop");
 336     assert(p->klass() == copy_val->klass(), "Should have the same class p: "PTR_FORMAT", copy: "PTR_FORMAT,
 337                                               p2i(p), p2i(copy));
 338 #endif
 339     return copy_val;
 340   }  else {
 341     // Failed to evacuate. We need to deal with the object that is left behind. Since this
 342     // new allocation is certainly after TAMS, it will be considered live in the next cycle.
 343     // But if it happens to contain references to evacuated regions, those references would
 344     // not get updated for this stale copy during this cycle, and we will crash while scanning
 345     // it the next cycle.
 346     //
 347     // For GCLAB allocations, it is enough to rollback the allocation ptr. Either the next
 348     // object will overwrite this stale copy, or the filler object on LAB retirement will
 349     // do this. For non-GCLAB allocations, we have no way to retract the allocation, and
 350     // have to explicitly overwrite the copy with the filler object. With that overwrite,
 351     // we have to keep the fwdptr initialized and pointing to our (stale) copy.
 352     if (alloc_from_gclab) {
 353       thread->gclab().rollback(size_with_fwdptr);
 354     } else {
 355       fill_with_object(filler, size_with_fwdptr);
 356     }
 357     log_develop_trace(gc, compaction)("Copy object: " PTR_FORMAT " -> " PTR_FORMAT " failed, use other: " PTR_FORMAT,
 358                                       p2i(p), p2i(copy), p2i(result));
 359     return result;
 360   }
 361 }
 362 
 363 inline bool ShenandoahHeap::requires_marking(const void* entry) const {
 364   return ! is_marked_next(oop(entry));
 365 }
 366 
 367 bool ShenandoahHeap::region_in_collection_set(size_t region_index) const {
 368   assert(collection_set() != NULL, "Sanity");
 369   return collection_set()->is_in(region_index);
 370 }
 371 
 372 bool ShenandoahHeap::in_collection_set(ShenandoahHeapRegion* r) const {
 373   return region_in_collection_set(r->region_number());
 374 }
 375 


< prev index next >