< prev index next >

src/share/vm/gc/g1/heapRegion.inline.hpp

Print this page




 100                                                         size_t desired_word_size,
 101                                                         size_t* actual_size) {
 102   MutexLocker x(&_par_alloc_lock);
 103   return allocate(min_word_size, desired_word_size, actual_size);
 104 }
 105 
 106 inline HeapWord* G1OffsetTableContigSpace::block_start(const void* p) {
 107   return _offsets.block_start(p);
 108 }
 109 
 110 inline HeapWord*
 111 G1OffsetTableContigSpace::block_start_const(const void* p) const {
 112   return _offsets.block_start_const(p);
 113 }
 114 
 115 inline bool
 116 HeapRegion::block_is_obj(const HeapWord* p) const {
 117   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 118 
 119   if (!this->is_in(p)) {
 120     HeapRegion* hr = g1h->heap_region_containing(p);
 121 #ifdef ASSERT
 122     assert(hr->is_humongous(), "This case can only happen for humongous regions");
 123     oop obj = oop(hr->humongous_start_region()->bottom());
 124     assert((HeapWord*)obj <= p, "p must be in humongous object");
 125     assert(p <= (HeapWord*)obj + obj->size(), "p must be in humongous object");
 126 #endif
 127     return hr->block_is_obj(p);
 128   }
 129   if (ClassUnloadingWithConcurrentMark) {
 130     return !g1h->is_obj_dead(oop(p), this);
 131   }
 132   return p < top();
 133 }
 134 
 135 inline size_t
 136 HeapRegion::block_size(const HeapWord *addr) const {
 137   if (addr == top()) {
 138     return pointer_delta(end(), addr);
 139   }
 140 
 141   if (block_is_obj(addr)) {
 142     return oop(addr)->size();
 143   }
 144 
 145   assert(ClassUnloadingWithConcurrentMark,
 146          "All blocks should be objects if G1 Class Unloading isn't used. "
 147          "HR: [" PTR_FORMAT ", " PTR_FORMAT ", " PTR_FORMAT ") "




 100                                                         size_t desired_word_size,
 101                                                         size_t* actual_size) {
 102   MutexLocker x(&_par_alloc_lock);
 103   return allocate(min_word_size, desired_word_size, actual_size);
 104 }
 105 
 106 inline HeapWord* G1OffsetTableContigSpace::block_start(const void* p) {
 107   return _offsets.block_start(p);
 108 }
 109 
 110 inline HeapWord*
 111 G1OffsetTableContigSpace::block_start_const(const void* p) const {
 112   return _offsets.block_start_const(p);
 113 }
 114 
 115 inline bool
 116 HeapRegion::block_is_obj(const HeapWord* p) const {
 117   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 118 
 119   if (!this->is_in(p)) {
 120     assert(is_continues_humongous(), "This case can only happen for humongous regions");
 121     return (p == humongous_start_region()->bottom());






 122   }
 123   if (ClassUnloadingWithConcurrentMark) {
 124     return !g1h->is_obj_dead(oop(p), this);
 125   }
 126   return p < top();
 127 }
 128 
 129 inline size_t
 130 HeapRegion::block_size(const HeapWord *addr) const {
 131   if (addr == top()) {
 132     return pointer_delta(end(), addr);
 133   }
 134 
 135   if (block_is_obj(addr)) {
 136     return oop(addr)->size();
 137   }
 138 
 139   assert(ClassUnloadingWithConcurrentMark,
 140          "All blocks should be objects if G1 Class Unloading isn't used. "
 141          "HR: [" PTR_FORMAT ", " PTR_FORMAT ", " PTR_FORMAT ") "


< prev index next >