< prev index next >

src/share/vm/gc/g1/heapRegion.inline.hpp

Print this page

        

*** 113,122 **** --- 113,133 ---- } inline bool HeapRegion::block_is_obj(const HeapWord* p) const { G1CollectedHeap* g1h = G1CollectedHeap::heap(); + + if (!this->is_in(p)) { + HeapRegion* hr = g1h->heap_region_containing(p); + #ifdef ASSERT + assert(hr->is_humongous(), "This case can only happen for humongous regions"); + oop obj = oop(hr->humongous_start_region()->bottom()); + assert((HeapWord*)obj <= p, "p must be in humongous object"); + assert(p <= (HeapWord*)obj + obj->size(), "p must be in humongous object"); + #endif + return hr->block_is_obj(p); + } if (ClassUnloadingWithConcurrentMark) { return !g1h->is_obj_dead(oop(p), this); } return p < top(); }
*** 174,187 **** inline void HeapRegion::note_end_of_marking() { _prev_top_at_mark_start = _next_top_at_mark_start; _prev_marked_bytes = _next_marked_bytes; _next_marked_bytes = 0; - - assert(_prev_marked_bytes <= - (size_t) pointer_delta(prev_top_at_mark_start(), bottom()) * - HeapWordSize, "invariant"); } inline void HeapRegion::note_start_of_copying(bool during_initial_mark) { if (is_survivor()) { // This is how we always allocate survivors. --- 185,194 ----
< prev index next >