src/share/vm/gc_implementation/g1/heapRegion.inline.hpp

Print this page
rev 7056 : [mq]: 8058298
rev 7057 : imported patch review


 178 }
 179 
 180 inline void HeapRegion::note_end_of_copying(bool during_initial_mark) {
 181   if (is_survivor()) {
 182     // This is how we always allocate survivors.
 183     assert(_next_top_at_mark_start == bottom(), "invariant");
 184   } else {
 185     if (during_initial_mark) {
 186       // See the comment for note_start_of_copying() for the details
 187       // on this.
 188       assert(_next_top_at_mark_start == end(), "pre-condition");
 189       _next_top_at_mark_start = top();
 190     } else {
 191       // See the comment for note_start_of_copying() for the details
 192       // on this.
 193       assert(top() >= _next_top_at_mark_start, "invariant");
 194     }
 195   }
 196 }
 197 
 198 inline void HeapRegionClaimer::initialize(uint n_workers) {
 199   assert(n_workers > 0, "Need at least one worker.");
 200   assert(_claims == NULL, "Must initialize only once.");
 201   _n_workers = n_workers;
 202   _n_regions = G1CollectedHeap::heap()->_hrm._allocated_heapregions_length;
 203   _claims = NEW_C_HEAP_ARRAY(uint, _n_regions, mtGC);
 204   memset(_claims, Unclaimed, sizeof(*_claims) * _n_regions);
 205 }
 206 
 207 inline uint HeapRegionClaimer::start_region_for_worker(uint worker_id) const {
 208   assert(_n_workers != 0, "Must initialize before use.");
 209   assert(worker_id < _n_workers, "Invalid worker_id.");
 210   return _n_regions * worker_id / _n_workers;
 211 }
 212 
 213 inline bool HeapRegionClaimer::is_region_claimed(uint region_index) const {
 214   assert(_claims != NULL, "Must initialize before use.");
 215   assert(region_index < _n_regions, "Invalid index.");
 216   return _claims[region_index] == Claimed;
 217 }
 218 
 219 inline bool HeapRegionClaimer::claim_region(uint region_index) {
 220   assert(_claims != NULL, "Must initialize before use.");
 221   assert(region_index < _n_regions, "Invalid index.");
 222   if (Atomic::cmpxchg(Claimed, &_claims[region_index], Unclaimed) == Unclaimed) {
 223     return true;
 224   }
 225   return false;
 226 }
 227 #endif // SHARE_VM_GC_IMPLEMENTATION_G1_HEAPREGION_INLINE_HPP


 178 }
 179 
 180 inline void HeapRegion::note_end_of_copying(bool during_initial_mark) {
 181   if (is_survivor()) {
 182     // This is how we always allocate survivors.
 183     assert(_next_top_at_mark_start == bottom(), "invariant");
 184   } else {
 185     if (during_initial_mark) {
 186       // See the comment for note_start_of_copying() for the details
 187       // on this.
 188       assert(_next_top_at_mark_start == end(), "pre-condition");
 189       _next_top_at_mark_start = top();
 190     } else {
 191       // See the comment for note_start_of_copying() for the details
 192       // on this.
 193       assert(top() >= _next_top_at_mark_start, "invariant");
 194     }
 195   }
 196 }
 197 





























 198 #endif // SHARE_VM_GC_IMPLEMENTATION_G1_HEAPREGION_INLINE_HPP