< prev index next >
src/share/vm/gc/shenandoah/shenandoahHeap.inline.hpp
Print this page
rev 12551 : Refactor/consolidate/cleanup
@@ -40,11 +40,11 @@
/*
* Marks the object. Returns true if the object has not been marked before and has
* been marked by this thread. Returns false if the object has already been marked,
* or if a competing thread succeeded in marking this object.
*/
-inline bool ShenandoahHeap::mark_current(oop obj) const {
+inline bool ShenandoahHeap::mark_next(oop obj) const {
#ifdef ASSERT
if (! oopDesc::unsafe_equals(obj, oopDesc::bs()->read_barrier(obj))) {
tty->print_cr("heap region containing obj:");
ShenandoahHeapRegion* obj_region = heap_region_containing(obj);
obj_region->print();
@@ -53,36 +53,26 @@
forward_region->print();
}
#endif
assert(oopDesc::unsafe_equals(obj, oopDesc::bs()->read_barrier(obj)), "only mark forwarded copy of objects");
- return mark_current_no_checks(obj);
+ return mark_next_no_checks(obj);
}
-inline bool ShenandoahHeap::mark_current_no_checks(oop obj) const {
+inline bool ShenandoahHeap::mark_next_no_checks(oop obj) const {
HeapWord* addr = (HeapWord*) obj;
- return (! allocated_after_mark_start(addr)) && _next_mark_bit_map->parMark(addr);
+ return (! allocated_after_next_mark_start(addr)) && _next_mark_bit_map->parMark(addr);
}
-inline bool ShenandoahHeap::is_marked_current(oop obj) const {
+inline bool ShenandoahHeap::is_marked_next(oop obj) const {
HeapWord* addr = (HeapWord*) obj;
- return allocated_after_mark_start(addr) || _next_mark_bit_map->isMarked(addr);
+ return allocated_after_next_mark_start(addr) || _next_mark_bit_map->isMarked(addr);
}
-inline bool ShenandoahHeap::is_marked_current(oop obj, ShenandoahHeapRegion* r) const {
+inline bool ShenandoahHeap::is_marked_complete(oop obj) const {
HeapWord* addr = (HeapWord*) obj;
- return _next_mark_bit_map->isMarked(addr) || r->allocated_after_mark_start(addr);
-}
-
-inline bool ShenandoahHeap::is_marked_prev(oop obj) const {
- ShenandoahHeapRegion* r = heap_region_containing((void*) obj);
- return is_marked_prev(obj, r);
-}
-
-inline bool ShenandoahHeap::is_marked_prev(oop obj, const ShenandoahHeapRegion* r) const {
- HeapWord* addr = (HeapWord*) obj;
- return _prev_mark_bit_map->isMarked(addr) || r->allocated_after_prev_mark_start(addr);
+ return allocated_after_complete_mark_start(addr) || _complete_mark_bit_map->isMarked(addr);
}
inline bool ShenandoahHeap::need_update_refs() const {
return _need_update_refs;
}
@@ -111,11 +101,11 @@
return result;
}
template <class T>
inline oop ShenandoahHeap::update_oop_ref_not_null(T* p, oop obj) {
- if (in_cset_fast_test((HeapWord*) obj)) {
+ if (in_collection_set(obj)) {
oop forw = ShenandoahBarrierSet::resolve_oop_static_not_null(obj);
assert(! oopDesc::unsafe_equals(forw, obj) || is_full_gc_in_progress(), "expect forwarded object");
obj = forw;
oopDesc::encode_store_heap_oop(p, obj);
}
@@ -149,11 +139,11 @@
}
template <class T>
inline oop ShenandoahHeap::maybe_update_oop_ref_not_null(T* p, oop heap_oop) {
- assert((! is_in(p)) || (! heap_region_containing(p)->is_in_collection_set())
+ assert((! is_in(p)) || (! in_collection_set(p))
|| is_full_gc_in_progress(),
"never update refs in from-space, unless evacuation has been cancelled");
#ifdef ASSERT
if (! is_in(heap_oop)) {
@@ -161,11 +151,11 @@
tty->print_cr("object not in heap: "PTR_FORMAT", referenced by: "PTR_FORMAT, p2i((HeapWord*) heap_oop), p2i(p));
assert(is_in(heap_oop), "object must be in heap");
}
#endif
assert(is_in(heap_oop), "only ever call this on objects in the heap");
- if (in_cset_fast_test((HeapWord*) heap_oop)) {
+ if (in_collection_set(heap_oop)) {
oop forwarded_oop = ShenandoahBarrierSet::resolve_oop_static_not_null(heap_oop); // read brooks ptr
assert(! oopDesc::unsafe_equals(forwarded_oop, heap_oop) || is_full_gc_in_progress(), "expect forwarded object");
log_develop_trace(gc)("Updating old ref: "PTR_FORMAT" pointing to "PTR_FORMAT" to new ref: "PTR_FORMAT, p2i(p), p2i(heap_oop), p2i(forwarded_oop));
@@ -293,27 +283,31 @@
return return_val;
}
inline bool ShenandoahHeap::requires_marking(const void* entry) const {
- return ! is_marked_current(oop(entry));
+ return ! is_marked_next(oop(entry));
+}
+
+bool ShenandoahHeap::region_in_collection_set(size_t region_index) const {
+ return _in_cset_fast_test_base[region_index];
+}
+
+bool ShenandoahHeap::in_collection_set(ShenandoahHeapRegion* r) const {
+ return region_in_collection_set(r->region_number());
}
-inline bool ShenandoahHeap::in_cset_fast_test(HeapWord* obj) {
+template <class T>
+inline bool ShenandoahHeap::in_collection_set(T p) const {
+ HeapWord* obj = (HeapWord*) p;
assert(_in_cset_fast_test != NULL, "sanity");
assert(is_in(obj), "should be in heap");
// no need to subtract the bottom of the heap from obj,
// _in_cset_fast_test is biased
uintx index = ((uintx) obj) >> ShenandoahHeapRegion::RegionSizeShift;
- bool ret = _in_cset_fast_test[index];
-
- // let's make sure the result is consistent with what the slower
- // test returns
- assert( ret || !is_in_collection_set(obj), "sanity");
- assert(!ret || is_in_collection_set(obj), "sanity");
- return ret;
+ return _in_cset_fast_test[index];
}
inline bool ShenandoahHeap::concurrent_mark_in_progress() {
return _concurrent_mark_in_progress != 0;
}
@@ -324,34 +318,31 @@
inline bool ShenandoahHeap::is_evacuation_in_progress() {
return _evacuation_in_progress != 0;
}
-inline bool ShenandoahHeap::allocated_after_mark_start(HeapWord* addr) const {
+inline bool ShenandoahHeap::allocated_after_next_mark_start(HeapWord* addr) const {
uintx index = ((uintx) addr) >> ShenandoahHeapRegion::RegionSizeShift;
- HeapWord* top_at_mark_start = _top_at_mark_starts[index];
+ HeapWord* top_at_mark_start = _next_top_at_mark_starts[index];
bool alloc_after_mark_start = addr >= top_at_mark_start;
-#ifdef ASSERT
- ShenandoahHeapRegion* r = heap_region_containing(addr);
- assert(alloc_after_mark_start == r->allocated_after_mark_start(addr), "sanity");
-#endif
return alloc_after_mark_start;
}
-template<class T>
-inline void ShenandoahHeap::marked_prev_object_iterate(ShenandoahHeapRegion* region, T* cl) {
- marked_object_iterate(region, cl, _prev_mark_bit_map, region->top_at_prev_mark_start());
-}
-
-template<class T>
-inline void ShenandoahHeap::marked_next_object_iterate(ShenandoahHeapRegion* region, T* cl) {
- marked_object_iterate(region, cl, _next_mark_bit_map, region->top_at_mark_start());
+inline bool ShenandoahHeap::allocated_after_complete_mark_start(HeapWord* addr) const {
+ uintx index = ((uintx) addr) >> ShenandoahHeapRegion::RegionSizeShift;
+ HeapWord* top_at_mark_start = _complete_top_at_mark_starts[index];
+ bool alloc_after_mark_start = addr >= top_at_mark_start;
+ return alloc_after_mark_start;
}
template<class T>
-inline void ShenandoahHeap::marked_object_iterate(ShenandoahHeapRegion* region, T* cl, CMBitMap* mark_bit_map, HeapWord* top_at_mark_start) {
+inline void ShenandoahHeap::marked_object_iterate(ShenandoahHeapRegion* region, T* cl) {
assert(BrooksPointer::word_offset() < 0, "skip_delta calculation below assumes the forwarding ptr is before obj");
+
+ CMBitMap* mark_bit_map = _complete_mark_bit_map;
+ HeapWord* top_at_mark_start = complete_top_at_mark_start(region->bottom());
+
size_t skip_bitmap_delta = BrooksPointer::word_size() + 1;
size_t skip_objsize_delta = BrooksPointer::word_size() /* + actual obj.size() below */;
HeapWord* start = region->bottom() + BrooksPointer::word_size();
HeapWord* limit = region->top();
@@ -418,15 +409,12 @@
inline void ShenandoahHeap::do_marked_object(CMBitMap* bitmap, T* cl, oop obj) {
#ifdef ASSERT
assert(!oopDesc::is_null(obj), "sanity");
assert(obj->is_oop(), "sanity");
assert(is_in(obj), "sanity");
- if (bitmap == _prev_mark_bit_map) {
- assert(is_marked_prev(obj), "object expected to be marked");
- } else {
- assert(is_marked_current(obj), "object expected to be marked");
- }
+ assert(bitmap == _complete_mark_bit_map, "only iterate completed mark bitmap");
+ assert(is_marked_complete(obj), "object expected to be marked");
#endif
cl->do_object(obj);
}
#endif // SHARE_VM_GC_SHENANDOAH_SHENANDOAHHEAP_INLINE_HPP
< prev index next >