< prev index next >

src/share/vm/gc/g1/heapRegionRemSet.cpp

Print this page
rev 11047 : [mq]: 8155233-jon-review


 248 
 249   static void test_fl_mem_size();
 250 };
 251 
 252 PerRegionTable* PerRegionTable::_free_list = NULL;
 253 
 254 size_t OtherRegionsTable::_max_fine_entries = 0;
 255 size_t OtherRegionsTable::_mod_max_fine_entries_mask = 0;
 256 size_t OtherRegionsTable::_fine_eviction_stride = 0;
 257 size_t OtherRegionsTable::_fine_eviction_sample_size = 0;
 258 
 259 OtherRegionsTable::OtherRegionsTable(HeapRegion* hr, Mutex* m) :
 260   _g1h(G1CollectedHeap::heap()),
 261   _hr(hr), _m(m),
 262   _coarse_map(G1CollectedHeap::heap()->max_regions(),
 263               false /* in-resource-area */),
 264   _fine_grain_regions(NULL),
 265   _first_all_fine_prts(NULL), _last_all_fine_prts(NULL),
 266   _n_fine_entries(0), _n_coarse_entries(0),
 267   _fine_eviction_start(0),
 268   _sparse_table(hr),
 269   _coarse_dirty(false)
 270 {
 271   typedef PerRegionTable* PerRegionTablePtr;
 272 
 273   if (_max_fine_entries == 0) {
 274     assert(_mod_max_fine_entries_mask == 0, "Both or none.");
 275     size_t max_entries_log = (size_t)log2_long((jlong)G1RSetRegionEntries);
 276     _max_fine_entries = (size_t)1 << max_entries_log;
 277     _mod_max_fine_entries_mask = _max_fine_entries - 1;
 278 
 279     assert(_fine_eviction_sample_size == 0
 280            && _fine_eviction_stride == 0, "All init at same time.");
 281     _fine_eviction_sample_size = MAX2((size_t)4, max_entries_log);
 282     _fine_eviction_stride = _max_fine_entries / _fine_eviction_sample_size;
 283   }
 284 
 285   _fine_grain_regions = NEW_C_HEAP_ARRAY3(PerRegionTablePtr, _max_fine_entries,
 286                         mtGC, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
 287 
 288   if (_fine_grain_regions == NULL) {
 289     vm_exit_out_of_memory(sizeof(void*)*_max_fine_entries, OOM_MALLOC_ERROR,


 488       prev = cur->collision_list_next_addr();
 489       cur = cur->collision_list_next();
 490     }
 491     i = i + _fine_eviction_stride;
 492     if (i >= _n_fine_entries) i = i - _n_fine_entries;
 493   }
 494 
 495   _fine_eviction_start++;
 496 
 497   if (_fine_eviction_start >= _n_fine_entries) {
 498     _fine_eviction_start -= _n_fine_entries;
 499   }
 500 
 501   guarantee(max != NULL, "Since _n_fine_entries > 0");
 502   guarantee(max_prev != NULL, "Since max != NULL.");
 503 
 504   // Set the corresponding coarse bit.
 505   size_t max_hrm_index = (size_t) max->hr()->hrm_index();
 506   if (!_coarse_map.at(max_hrm_index)) {
 507     _coarse_map.at_put(max_hrm_index, true);
 508     _coarse_dirty = true;
 509     _n_coarse_entries++;
 510   }
 511 
 512   // Unsplice.
 513   *max_prev = max->collision_list_next();
 514   Atomic::inc(&_n_coarsenings);
 515   _n_fine_entries--;
 516   return max;
 517 }
 518 
 519 void OtherRegionsTable::scrub(G1CardLiveData* live_data) {
 520   // First eliminated garbage regions from the coarse map.
 521   log_develop_trace(gc, remset, scrub)("Scrubbing region %u:", _hr->hrm_index());
 522 
 523   log_develop_trace(gc, remset, scrub)("   Coarse map: before = " SIZE_FORMAT "...", _n_coarse_entries);
 524   if (_coarse_dirty) {
 525     live_data->remove_nonlive_regions(&_coarse_map);
 526     _n_coarse_entries = _coarse_map.count_one_bits();
 527     _coarse_dirty = _n_coarse_entries != 0;
 528   }
 529   log_develop_trace(gc, remset, scrub)("   after = " SIZE_FORMAT ".", _n_coarse_entries);
 530 
 531   // Now do the fine-grained maps.
 532   for (size_t i = 0; i < _max_fine_entries; i++) {
 533     PerRegionTable* cur = _fine_grain_regions[i];
 534     PerRegionTable** prev = &_fine_grain_regions[i];
 535     while (cur != NULL) {
 536       PerRegionTable* nxt = cur->collision_list_next();
 537       // If the entire region is dead, eliminate.
 538       log_develop_trace(gc, remset, scrub)("     For other region %u:", cur->hr()->hrm_index());
 539       if (!live_data->is_region_live(cur->hr()->hrm_index())) {
 540         *prev = nxt;
 541         cur->set_collision_list_next(NULL);
 542         _n_fine_entries--;
 543         log_develop_trace(gc, remset, scrub)("          deleted via region map.");
 544         unlink_from_all(cur);
 545         PerRegionTable::free(cur);
 546       } else {
 547         // Do fine-grain elimination.


 634 size_t OtherRegionsTable::fl_mem_size() {
 635   return PerRegionTable::fl_mem_size();
 636 }
 637 
 638 void OtherRegionsTable::clear_fcc() {
 639   G1FromCardCache::clear(_hr->hrm_index());
 640 }
 641 
 642 void OtherRegionsTable::clear() {
 643   // if there are no entries, skip this step
 644   if (_first_all_fine_prts != NULL) {
 645     guarantee(_first_all_fine_prts != NULL && _last_all_fine_prts != NULL, "just checking");
 646     PerRegionTable::bulk_free(_first_all_fine_prts, _last_all_fine_prts);
 647     memset(_fine_grain_regions, 0, _max_fine_entries * sizeof(_fine_grain_regions[0]));
 648   } else {
 649     guarantee(_first_all_fine_prts == NULL && _last_all_fine_prts == NULL, "just checking");
 650   }
 651 
 652   _first_all_fine_prts = _last_all_fine_prts = NULL;
 653   _sparse_table.clear();
 654   if (_coarse_dirty) {
 655     _coarse_map.clear();
 656     _coarse_dirty = false;
 657   }
 658   _n_fine_entries = 0;
 659   _n_coarse_entries = 0;
 660 
 661   clear_fcc();
 662 }
 663 
 664 bool OtherRegionsTable::contains_reference(OopOrNarrowOopStar from) const {
 665   // Cast away const in this case.
 666   MutexLockerEx x((Mutex*)_m, Mutex::_no_safepoint_check_flag);
 667   return contains_reference_locked(from);
 668 }
 669 
 670 bool OtherRegionsTable::contains_reference_locked(OopOrNarrowOopStar from) const {
 671   HeapRegion* hr = _g1h->heap_region_containing(from);
 672   RegionIdx_t hr_ind = (RegionIdx_t) hr->hrm_index();
 673   // Is this region in the coarse map?
 674   if (_coarse_map.at(hr_ind)) return true;
 675 
 676   PerRegionTable* prt = find_region_table(hr_ind & _mod_max_fine_entries_mask,




 248 
 249   static void test_fl_mem_size();
 250 };
 251 
 252 PerRegionTable* PerRegionTable::_free_list = NULL;
 253 
 254 size_t OtherRegionsTable::_max_fine_entries = 0;
 255 size_t OtherRegionsTable::_mod_max_fine_entries_mask = 0;
 256 size_t OtherRegionsTable::_fine_eviction_stride = 0;
 257 size_t OtherRegionsTable::_fine_eviction_sample_size = 0;
 258 
 259 OtherRegionsTable::OtherRegionsTable(HeapRegion* hr, Mutex* m) :
 260   _g1h(G1CollectedHeap::heap()),
 261   _hr(hr), _m(m),
 262   _coarse_map(G1CollectedHeap::heap()->max_regions(),
 263               false /* in-resource-area */),
 264   _fine_grain_regions(NULL),
 265   _first_all_fine_prts(NULL), _last_all_fine_prts(NULL),
 266   _n_fine_entries(0), _n_coarse_entries(0),
 267   _fine_eviction_start(0),
 268   _sparse_table(hr)

 269 {
 270   typedef PerRegionTable* PerRegionTablePtr;
 271 
 272   if (_max_fine_entries == 0) {
 273     assert(_mod_max_fine_entries_mask == 0, "Both or none.");
 274     size_t max_entries_log = (size_t)log2_long((jlong)G1RSetRegionEntries);
 275     _max_fine_entries = (size_t)1 << max_entries_log;
 276     _mod_max_fine_entries_mask = _max_fine_entries - 1;
 277 
 278     assert(_fine_eviction_sample_size == 0
 279            && _fine_eviction_stride == 0, "All init at same time.");
 280     _fine_eviction_sample_size = MAX2((size_t)4, max_entries_log);
 281     _fine_eviction_stride = _max_fine_entries / _fine_eviction_sample_size;
 282   }
 283 
 284   _fine_grain_regions = NEW_C_HEAP_ARRAY3(PerRegionTablePtr, _max_fine_entries,
 285                         mtGC, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
 286 
 287   if (_fine_grain_regions == NULL) {
 288     vm_exit_out_of_memory(sizeof(void*)*_max_fine_entries, OOM_MALLOC_ERROR,


 487       prev = cur->collision_list_next_addr();
 488       cur = cur->collision_list_next();
 489     }
 490     i = i + _fine_eviction_stride;
 491     if (i >= _n_fine_entries) i = i - _n_fine_entries;
 492   }
 493 
 494   _fine_eviction_start++;
 495 
 496   if (_fine_eviction_start >= _n_fine_entries) {
 497     _fine_eviction_start -= _n_fine_entries;
 498   }
 499 
 500   guarantee(max != NULL, "Since _n_fine_entries > 0");
 501   guarantee(max_prev != NULL, "Since max != NULL.");
 502 
 503   // Set the corresponding coarse bit.
 504   size_t max_hrm_index = (size_t) max->hr()->hrm_index();
 505   if (!_coarse_map.at(max_hrm_index)) {
 506     _coarse_map.at_put(max_hrm_index, true);

 507     _n_coarse_entries++;
 508   }
 509 
 510   // Unsplice.
 511   *max_prev = max->collision_list_next();
 512   Atomic::inc(&_n_coarsenings);
 513   _n_fine_entries--;
 514   return max;
 515 }
 516 
 517 void OtherRegionsTable::scrub(G1CardLiveData* live_data) {
 518   // First eliminated garbage regions from the coarse map.
 519   log_develop_trace(gc, remset, scrub)("Scrubbing region %u:", _hr->hrm_index());
 520 
 521   log_develop_trace(gc, remset, scrub)("   Coarse map: before = " SIZE_FORMAT "...", _n_coarse_entries);
 522   if (_n_coarse_entries > 0) {
 523     live_data->remove_nonlive_regions(&_coarse_map);
 524     _n_coarse_entries = _coarse_map.count_one_bits();

 525   }
 526   log_develop_trace(gc, remset, scrub)("   after = " SIZE_FORMAT ".", _n_coarse_entries);
 527 
 528   // Now do the fine-grained maps.
 529   for (size_t i = 0; i < _max_fine_entries; i++) {
 530     PerRegionTable* cur = _fine_grain_regions[i];
 531     PerRegionTable** prev = &_fine_grain_regions[i];
 532     while (cur != NULL) {
 533       PerRegionTable* nxt = cur->collision_list_next();
 534       // If the entire region is dead, eliminate.
 535       log_develop_trace(gc, remset, scrub)("     For other region %u:", cur->hr()->hrm_index());
 536       if (!live_data->is_region_live(cur->hr()->hrm_index())) {
 537         *prev = nxt;
 538         cur->set_collision_list_next(NULL);
 539         _n_fine_entries--;
 540         log_develop_trace(gc, remset, scrub)("          deleted via region map.");
 541         unlink_from_all(cur);
 542         PerRegionTable::free(cur);
 543       } else {
 544         // Do fine-grain elimination.


 631 size_t OtherRegionsTable::fl_mem_size() {
 632   return PerRegionTable::fl_mem_size();
 633 }
 634 
 635 void OtherRegionsTable::clear_fcc() {
 636   G1FromCardCache::clear(_hr->hrm_index());
 637 }
 638 
 639 void OtherRegionsTable::clear() {
 640   // if there are no entries, skip this step
 641   if (_first_all_fine_prts != NULL) {
 642     guarantee(_first_all_fine_prts != NULL && _last_all_fine_prts != NULL, "just checking");
 643     PerRegionTable::bulk_free(_first_all_fine_prts, _last_all_fine_prts);
 644     memset(_fine_grain_regions, 0, _max_fine_entries * sizeof(_fine_grain_regions[0]));
 645   } else {
 646     guarantee(_first_all_fine_prts == NULL && _last_all_fine_prts == NULL, "just checking");
 647   }
 648 
 649   _first_all_fine_prts = _last_all_fine_prts = NULL;
 650   _sparse_table.clear();
 651   if (_n_coarse_entries > 0) {
 652     _coarse_map.clear();

 653   }
 654   _n_fine_entries = 0;
 655   _n_coarse_entries = 0;
 656 
 657   clear_fcc();
 658 }
 659 
 660 bool OtherRegionsTable::contains_reference(OopOrNarrowOopStar from) const {
 661   // Cast away const in this case.
 662   MutexLockerEx x((Mutex*)_m, Mutex::_no_safepoint_check_flag);
 663   return contains_reference_locked(from);
 664 }
 665 
 666 bool OtherRegionsTable::contains_reference_locked(OopOrNarrowOopStar from) const {
 667   HeapRegion* hr = _g1h->heap_region_containing(from);
 668   RegionIdx_t hr_ind = (RegionIdx_t) hr->hrm_index();
 669   // Is this region in the coarse map?
 670   if (_coarse_map.at(hr_ind)) return true;
 671 
 672   PerRegionTable* prt = find_region_table(hr_ind & _mod_max_fine_entries_mask,


< prev index next >