< prev index next >

src/hotspot/share/gc/g1/g1ConcurrentMark.cpp

Print this page
rev 58059 : imported patch 8238999-memregion-custom-operator


 244   TaskQueueEntryChunk* cur = remove_chunk_from_chunk_list();
 245 
 246   if (cur == NULL) {
 247     return false;
 248   }
 249 
 250   Copy::conjoint_memory_atomic(cur->data, ptr_arr, EntriesPerChunk * sizeof(G1TaskQueueEntry));
 251 
 252   add_chunk_to_free_list(cur);
 253   return true;
 254 }
 255 
 256 void G1CMMarkStack::set_empty() {
 257   _chunks_in_chunk_list = 0;
 258   _hwm = 0;
 259   _chunk_list = NULL;
 260   _free_list = NULL;
 261 }
 262 
 263 G1CMRootMemRegions::G1CMRootMemRegions(uint const max_regions) :
 264     _root_regions(NULL),
 265     _max_regions(max_regions),
 266     _num_root_regions(0),
 267     _claimed_root_regions(0),
 268     _scan_in_progress(false),
 269     _should_abort(false) {
 270   _root_regions = new MemRegion[_max_regions];
 271   if (_root_regions == NULL) {
 272     vm_exit_during_initialization("Could not allocate root MemRegion set.");
 273   }
 274 }
 275 
 276 G1CMRootMemRegions::~G1CMRootMemRegions() {
 277   delete[] _root_regions;
 278 }
 279 
 280 void G1CMRootMemRegions::reset() {
 281   _num_root_regions = 0;
 282 }
 283 
 284 void G1CMRootMemRegions::add(HeapWord* start, HeapWord* end) {
 285   assert_at_safepoint();
 286   size_t idx = Atomic::fetch_and_add(&_num_root_regions, 1u);
 287   assert(idx < _max_regions, "Trying to add more root MemRegions than there is space " SIZE_FORMAT, _max_regions);
 288   assert(start != NULL && end != NULL && start <= end, "Start (" PTR_FORMAT ") should be less or equal to "
 289          "end (" PTR_FORMAT ")", p2i(start), p2i(end));
 290   _root_regions[idx].set_start(start);
 291   _root_regions[idx].set_end(end);
 292 }
 293 
 294 void G1CMRootMemRegions::prepare_for_scan() {
 295   assert(!scan_in_progress(), "pre-condition");
 296 
 297   _scan_in_progress = _num_root_regions > 0;




 244   TaskQueueEntryChunk* cur = remove_chunk_from_chunk_list();
 245 
 246   if (cur == NULL) {
 247     return false;
 248   }
 249 
 250   Copy::conjoint_memory_atomic(cur->data, ptr_arr, EntriesPerChunk * sizeof(G1TaskQueueEntry));
 251 
 252   add_chunk_to_free_list(cur);
 253   return true;
 254 }
 255 
 256 void G1CMMarkStack::set_empty() {
 257   _chunks_in_chunk_list = 0;
 258   _hwm = 0;
 259   _chunk_list = NULL;
 260   _free_list = NULL;
 261 }
 262 
 263 G1CMRootMemRegions::G1CMRootMemRegions(uint const max_regions) :
 264     _root_regions(NEW_C_HEAP_ARRAY(MemRegion, max_regions, mtGC)),
 265     _max_regions(max_regions),
 266     _num_root_regions(0),
 267     _claimed_root_regions(0),
 268     _scan_in_progress(false),
 269     _should_abort(false) {
 270   for (uint i = 0; i < max_regions; i++) {
 271     ::new (&_root_regions[i]) MemRegion();

 272   }
 273 }
 274 
 275 G1CMRootMemRegions::~G1CMRootMemRegions() {
 276   FREE_C_HEAP_ARRAY(MemRegion, _root_regions);
 277 }
 278 
 279 void G1CMRootMemRegions::reset() {
 280   _num_root_regions = 0;
 281 }
 282 
 283 void G1CMRootMemRegions::add(HeapWord* start, HeapWord* end) {
 284   assert_at_safepoint();
 285   size_t idx = Atomic::fetch_and_add(&_num_root_regions, 1u);
 286   assert(idx < _max_regions, "Trying to add more root MemRegions than there is space " SIZE_FORMAT, _max_regions);
 287   assert(start != NULL && end != NULL && start <= end, "Start (" PTR_FORMAT ") should be less or equal to "
 288          "end (" PTR_FORMAT ")", p2i(start), p2i(end));
 289   _root_regions[idx].set_start(start);
 290   _root_regions[idx].set_end(end);
 291 }
 292 
 293 void G1CMRootMemRegions::prepare_for_scan() {
 294   assert(!scan_in_progress(), "pre-condition");
 295 
 296   _scan_in_progress = _num_root_regions > 0;


< prev index next >