< prev index next >

src/share/vm/gc/g1/heapRegion.inline.hpp

Print this page




 100 inline HeapWord* G1ContiguousSpace::par_allocate(size_t min_word_size,
 101                                                  size_t desired_word_size,
 102                                                  size_t* actual_size) {
 103   MutexLocker x(&_par_alloc_lock);
 104   return allocate(min_word_size, desired_word_size, actual_size);
 105 }
 106 
 107 inline HeapWord* G1ContiguousSpace::block_start(const void* p) {
 108   return _bot_part.block_start(p);
 109 }
 110 
 111 inline HeapWord*
 112 G1ContiguousSpace::block_start_const(const void* p) const {
 113   return _bot_part.block_start_const(p);
 114 }
 115 
 116 inline bool HeapRegion::is_obj_dead_with_size(const oop obj, G1CMBitMapRO* prev_bitmap, size_t* size) const {
 117   HeapWord* addr = (HeapWord*) obj;
 118 
 119   assert(addr < top(), "must be");
 120   assert(!is_archive(), "Archive regions should not have references into interesting regions.");
 121   assert(!is_humongous(), "Humongous objects not handled here");
 122   bool obj_is_dead = is_obj_dead(obj, prev_bitmap);
 123 
 124   if (ClassUnloadingWithConcurrentMark && obj_is_dead) {
 125     assert(!block_is_obj(addr), "must be");
 126     *size = block_size_using_bitmap(addr, prev_bitmap);
 127   } else {
 128     assert(block_is_obj(addr), "must be");
 129     *size = obj->size();
 130   }
 131   return obj_is_dead;
 132 }
 133 
 134 inline bool
 135 HeapRegion::block_is_obj(const HeapWord* p) const {
 136   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 137 
 138   if (!this->is_in(p)) {
 139     assert(is_continues_humongous(), "This case can only happen for humongous regions");
 140     return (p == humongous_start_region()->bottom());
 141   }
 142   if (ClassUnloadingWithConcurrentMark) {
 143     return !g1h->is_obj_dead(oop(p), this);
 144   }
 145   return p < top();




 100 inline HeapWord* G1ContiguousSpace::par_allocate(size_t min_word_size,
 101                                                  size_t desired_word_size,
 102                                                  size_t* actual_size) {
 103   MutexLocker x(&_par_alloc_lock);
 104   return allocate(min_word_size, desired_word_size, actual_size);
 105 }
 106 
 107 inline HeapWord* G1ContiguousSpace::block_start(const void* p) {
 108   return _bot_part.block_start(p);
 109 }
 110 
 111 inline HeapWord*
 112 G1ContiguousSpace::block_start_const(const void* p) const {
 113   return _bot_part.block_start_const(p);
 114 }
 115 
 116 inline bool HeapRegion::is_obj_dead_with_size(const oop obj, G1CMBitMapRO* prev_bitmap, size_t* size) const {
 117   HeapWord* addr = (HeapWord*) obj;
 118 
 119   assert(addr < top(), "must be");
 120   assert(!is_closed_archive(), "Archive regions should not have references into interesting regions.");
 121   assert(!is_humongous(), "Humongous objects not handled here");
 122   bool obj_is_dead = is_obj_dead(obj, prev_bitmap);
 123 
 124   if (ClassUnloadingWithConcurrentMark && obj_is_dead) {
 125     assert(!block_is_obj(addr) || is_open_archive(), "must be");
 126     *size = block_size_using_bitmap(addr, prev_bitmap);
 127   } else {
 128     assert(block_is_obj(addr), "must be");
 129     *size = obj->size();
 130   }
 131   return obj_is_dead;
 132 }
 133 
 134 inline bool
 135 HeapRegion::block_is_obj(const HeapWord* p) const {
 136   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 137 
 138   if (!this->is_in(p)) {
 139     assert(is_continues_humongous(), "This case can only happen for humongous regions");
 140     return (p == humongous_start_region()->bottom());
 141   }
 142   if (ClassUnloadingWithConcurrentMark) {
 143     return !g1h->is_obj_dead(oop(p), this);
 144   }
 145   return p < top();


< prev index next >