< prev index next >

src/share/vm/gc/g1/heapRegionRemSet.cpp

Print this page
rev 11045 : 8155233: Lazy coarse map clear
Summary: Only clear the coarse bitmaps of the remembered sets if they were dirtied.
Reviewed-by:


 248 
 249   static void test_fl_mem_size();
 250 };
 251 
 252 PerRegionTable* PerRegionTable::_free_list = NULL;
 253 
 254 size_t OtherRegionsTable::_max_fine_entries = 0;
 255 size_t OtherRegionsTable::_mod_max_fine_entries_mask = 0;
 256 size_t OtherRegionsTable::_fine_eviction_stride = 0;
 257 size_t OtherRegionsTable::_fine_eviction_sample_size = 0;
 258 
 259 OtherRegionsTable::OtherRegionsTable(HeapRegion* hr, Mutex* m) :
 260   _g1h(G1CollectedHeap::heap()),
 261   _hr(hr), _m(m),
 262   _coarse_map(G1CollectedHeap::heap()->max_regions(),
 263               false /* in-resource-area */),
 264   _fine_grain_regions(NULL),
 265   _first_all_fine_prts(NULL), _last_all_fine_prts(NULL),
 266   _n_fine_entries(0), _n_coarse_entries(0),
 267   _fine_eviction_start(0),
 268   _sparse_table(hr)

 269 {
 270   typedef PerRegionTable* PerRegionTablePtr;
 271 
 272   if (_max_fine_entries == 0) {
 273     assert(_mod_max_fine_entries_mask == 0, "Both or none.");
 274     size_t max_entries_log = (size_t)log2_long((jlong)G1RSetRegionEntries);
 275     _max_fine_entries = (size_t)1 << max_entries_log;
 276     _mod_max_fine_entries_mask = _max_fine_entries - 1;
 277 
 278     assert(_fine_eviction_sample_size == 0
 279            && _fine_eviction_stride == 0, "All init at same time.");
 280     _fine_eviction_sample_size = MAX2((size_t)4, max_entries_log);
 281     _fine_eviction_stride = _max_fine_entries / _fine_eviction_sample_size;
 282   }
 283 
 284   _fine_grain_regions = NEW_C_HEAP_ARRAY3(PerRegionTablePtr, _max_fine_entries,
 285                         mtGC, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
 286 
 287   if (_fine_grain_regions == NULL) {
 288     vm_exit_out_of_memory(sizeof(void*)*_max_fine_entries, OOM_MALLOC_ERROR,


 487       prev = cur->collision_list_next_addr();
 488       cur = cur->collision_list_next();
 489     }
 490     i = i + _fine_eviction_stride;
 491     if (i >= _n_fine_entries) i = i - _n_fine_entries;
 492   }
 493 
 494   _fine_eviction_start++;
 495 
 496   if (_fine_eviction_start >= _n_fine_entries) {
 497     _fine_eviction_start -= _n_fine_entries;
 498   }
 499 
 500   guarantee(max != NULL, "Since _n_fine_entries > 0");
 501   guarantee(max_prev != NULL, "Since max != NULL.");
 502 
 503   // Set the corresponding coarse bit.
 504   size_t max_hrm_index = (size_t) max->hr()->hrm_index();
 505   if (!_coarse_map.at(max_hrm_index)) {
 506     _coarse_map.at_put(max_hrm_index, true);

 507     _n_coarse_entries++;
 508   }
 509 
 510   // Unsplice.
 511   *max_prev = max->collision_list_next();
 512   Atomic::inc(&_n_coarsenings);
 513   _n_fine_entries--;
 514   return max;
 515 }
 516 
 517 void OtherRegionsTable::scrub(G1CardLiveData* live_data) {
 518   // First eliminated garbage regions from the coarse map.
 519   log_develop_trace(gc, remset, scrub)("Scrubbing region %u:", _hr->hrm_index());
 520 
 521   log_develop_trace(gc, remset, scrub)("   Coarse map: before = " SIZE_FORMAT "...", _n_coarse_entries);
 522   live_data->remove_nonlive_regions(&_coarse_map);
 523   _n_coarse_entries = _coarse_map.count_one_bits();

 524   log_develop_trace(gc, remset, scrub)("   after = " SIZE_FORMAT ".", _n_coarse_entries);
 525 
 526   // Now do the fine-grained maps.
 527   for (size_t i = 0; i < _max_fine_entries; i++) {
 528     PerRegionTable* cur = _fine_grain_regions[i];
 529     PerRegionTable** prev = &_fine_grain_regions[i];
 530     while (cur != NULL) {
 531       PerRegionTable* nxt = cur->collision_list_next();
 532       // If the entire region is dead, eliminate.
 533       log_develop_trace(gc, remset, scrub)("     For other region %u:", cur->hr()->hrm_index());
 534       if (!live_data->is_region_live(cur->hr()->hrm_index())) {
 535         *prev = nxt;
 536         cur->set_collision_list_next(NULL);
 537         _n_fine_entries--;
 538         log_develop_trace(gc, remset, scrub)("          deleted via region map.");
 539         unlink_from_all(cur);
 540         PerRegionTable::free(cur);
 541       } else {
 542         // Do fine-grain elimination.
 543         log_develop_trace(gc, remset, scrub)("          occ: before = %4d.", cur->occupied());


 629 size_t OtherRegionsTable::fl_mem_size() {
 630   return PerRegionTable::fl_mem_size();
 631 }
 632 
 633 void OtherRegionsTable::clear_fcc() {
 634   G1FromCardCache::clear(_hr->hrm_index());
 635 }
 636 
 637 void OtherRegionsTable::clear() {
 638   // if there are no entries, skip this step
 639   if (_first_all_fine_prts != NULL) {
 640     guarantee(_first_all_fine_prts != NULL && _last_all_fine_prts != NULL, "just checking");
 641     PerRegionTable::bulk_free(_first_all_fine_prts, _last_all_fine_prts);
 642     memset(_fine_grain_regions, 0, _max_fine_entries * sizeof(_fine_grain_regions[0]));
 643   } else {
 644     guarantee(_first_all_fine_prts == NULL && _last_all_fine_prts == NULL, "just checking");
 645   }
 646 
 647   _first_all_fine_prts = _last_all_fine_prts = NULL;
 648   _sparse_table.clear();

 649   _coarse_map.clear();


 650   _n_fine_entries = 0;
 651   _n_coarse_entries = 0;
 652 
 653   clear_fcc();
 654 }
 655 
 656 bool OtherRegionsTable::contains_reference(OopOrNarrowOopStar from) const {
 657   // Cast away const in this case.
 658   MutexLockerEx x((Mutex*)_m, Mutex::_no_safepoint_check_flag);
 659   return contains_reference_locked(from);
 660 }
 661 
 662 bool OtherRegionsTable::contains_reference_locked(OopOrNarrowOopStar from) const {
 663   HeapRegion* hr = _g1h->heap_region_containing(from);
 664   RegionIdx_t hr_ind = (RegionIdx_t) hr->hrm_index();
 665   // Is this region in the coarse map?
 666   if (_coarse_map.at(hr_ind)) return true;
 667 
 668   PerRegionTable* prt = find_region_table(hr_ind & _mod_max_fine_entries_mask,
 669                                      hr);




 248 
 249   static void test_fl_mem_size();
 250 };
 251 
 252 PerRegionTable* PerRegionTable::_free_list = NULL;
 253 
 254 size_t OtherRegionsTable::_max_fine_entries = 0;
 255 size_t OtherRegionsTable::_mod_max_fine_entries_mask = 0;
 256 size_t OtherRegionsTable::_fine_eviction_stride = 0;
 257 size_t OtherRegionsTable::_fine_eviction_sample_size = 0;
 258 
 259 OtherRegionsTable::OtherRegionsTable(HeapRegion* hr, Mutex* m) :
 260   _g1h(G1CollectedHeap::heap()),
 261   _hr(hr), _m(m),
 262   _coarse_map(G1CollectedHeap::heap()->max_regions(),
 263               false /* in-resource-area */),
 264   _fine_grain_regions(NULL),
 265   _first_all_fine_prts(NULL), _last_all_fine_prts(NULL),
 266   _n_fine_entries(0), _n_coarse_entries(0),
 267   _fine_eviction_start(0),
 268   _sparse_table(hr),
 269   _coarse_dirty(false)
 270 {
 271   typedef PerRegionTable* PerRegionTablePtr;
 272 
 273   if (_max_fine_entries == 0) {
 274     assert(_mod_max_fine_entries_mask == 0, "Both or none.");
 275     size_t max_entries_log = (size_t)log2_long((jlong)G1RSetRegionEntries);
 276     _max_fine_entries = (size_t)1 << max_entries_log;
 277     _mod_max_fine_entries_mask = _max_fine_entries - 1;
 278 
 279     assert(_fine_eviction_sample_size == 0
 280            && _fine_eviction_stride == 0, "All init at same time.");
 281     _fine_eviction_sample_size = MAX2((size_t)4, max_entries_log);
 282     _fine_eviction_stride = _max_fine_entries / _fine_eviction_sample_size;
 283   }
 284 
 285   _fine_grain_regions = NEW_C_HEAP_ARRAY3(PerRegionTablePtr, _max_fine_entries,
 286                         mtGC, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
 287 
 288   if (_fine_grain_regions == NULL) {
 289     vm_exit_out_of_memory(sizeof(void*)*_max_fine_entries, OOM_MALLOC_ERROR,


 488       prev = cur->collision_list_next_addr();
 489       cur = cur->collision_list_next();
 490     }
 491     i = i + _fine_eviction_stride;
 492     if (i >= _n_fine_entries) i = i - _n_fine_entries;
 493   }
 494 
 495   _fine_eviction_start++;
 496 
 497   if (_fine_eviction_start >= _n_fine_entries) {
 498     _fine_eviction_start -= _n_fine_entries;
 499   }
 500 
 501   guarantee(max != NULL, "Since _n_fine_entries > 0");
 502   guarantee(max_prev != NULL, "Since max != NULL.");
 503 
 504   // Set the corresponding coarse bit.
 505   size_t max_hrm_index = (size_t) max->hr()->hrm_index();
 506   if (!_coarse_map.at(max_hrm_index)) {
 507     _coarse_map.at_put(max_hrm_index, true);
 508     _coarse_dirty = true;
 509     _n_coarse_entries++;
 510   }
 511 
 512   // Unsplice.
 513   *max_prev = max->collision_list_next();
 514   Atomic::inc(&_n_coarsenings);
 515   _n_fine_entries--;
 516   return max;
 517 }
 518 
 519 void OtherRegionsTable::scrub(G1CardLiveData* live_data) {
 520   // First eliminated garbage regions from the coarse map.
 521   log_develop_trace(gc, remset, scrub)("Scrubbing region %u:", _hr->hrm_index());
 522 
 523   log_develop_trace(gc, remset, scrub)("   Coarse map: before = " SIZE_FORMAT "...", _n_coarse_entries);
 524   live_data->remove_nonlive_regions(&_coarse_map);
 525   _n_coarse_entries = _coarse_map.count_one_bits();
 526   _coarse_dirty = _n_coarse_entries != 0;
 527   log_develop_trace(gc, remset, scrub)("   after = " SIZE_FORMAT ".", _n_coarse_entries);
 528 
 529   // Now do the fine-grained maps.
 530   for (size_t i = 0; i < _max_fine_entries; i++) {
 531     PerRegionTable* cur = _fine_grain_regions[i];
 532     PerRegionTable** prev = &_fine_grain_regions[i];
 533     while (cur != NULL) {
 534       PerRegionTable* nxt = cur->collision_list_next();
 535       // If the entire region is dead, eliminate.
 536       log_develop_trace(gc, remset, scrub)("     For other region %u:", cur->hr()->hrm_index());
 537       if (!live_data->is_region_live(cur->hr()->hrm_index())) {
 538         *prev = nxt;
 539         cur->set_collision_list_next(NULL);
 540         _n_fine_entries--;
 541         log_develop_trace(gc, remset, scrub)("          deleted via region map.");
 542         unlink_from_all(cur);
 543         PerRegionTable::free(cur);
 544       } else {
 545         // Do fine-grain elimination.
 546         log_develop_trace(gc, remset, scrub)("          occ: before = %4d.", cur->occupied());


 632 size_t OtherRegionsTable::fl_mem_size() {
 633   return PerRegionTable::fl_mem_size();
 634 }
 635 
 636 void OtherRegionsTable::clear_fcc() {
 637   G1FromCardCache::clear(_hr->hrm_index());
 638 }
 639 
 640 void OtherRegionsTable::clear() {
 641   // if there are no entries, skip this step
 642   if (_first_all_fine_prts != NULL) {
 643     guarantee(_first_all_fine_prts != NULL && _last_all_fine_prts != NULL, "just checking");
 644     PerRegionTable::bulk_free(_first_all_fine_prts, _last_all_fine_prts);
 645     memset(_fine_grain_regions, 0, _max_fine_entries * sizeof(_fine_grain_regions[0]));
 646   } else {
 647     guarantee(_first_all_fine_prts == NULL && _last_all_fine_prts == NULL, "just checking");
 648   }
 649 
 650   _first_all_fine_prts = _last_all_fine_prts = NULL;
 651   _sparse_table.clear();
 652   if (_coarse_dirty) {
 653     _coarse_map.clear();
 654     _coarse_dirty = false;
 655   }
 656   _n_fine_entries = 0;
 657   _n_coarse_entries = 0;
 658 
 659   clear_fcc();
 660 }
 661 
 662 bool OtherRegionsTable::contains_reference(OopOrNarrowOopStar from) const {
 663   // Cast away const in this case.
 664   MutexLockerEx x((Mutex*)_m, Mutex::_no_safepoint_check_flag);
 665   return contains_reference_locked(from);
 666 }
 667 
 668 bool OtherRegionsTable::contains_reference_locked(OopOrNarrowOopStar from) const {
 669   HeapRegion* hr = _g1h->heap_region_containing(from);
 670   RegionIdx_t hr_ind = (RegionIdx_t) hr->hrm_index();
 671   // Is this region in the coarse map?
 672   if (_coarse_map.at(hr_ind)) return true;
 673 
 674   PerRegionTable* prt = find_region_table(hr_ind & _mod_max_fine_entries_mask,
 675                                      hr);


< prev index next >