< prev index next >

src/share/vm/gc/g1/heapRegionRemSet.cpp

Print this page




  72     _collision_list_next(NULL), _next(NULL), _prev(NULL)
  73   {}
  74 
  75   void add_card_work(CardIdx_t from_card, bool par) {
  76     if (!_bm.at(from_card)) {
  77       if (par) {
  78         if (_bm.par_at_put(from_card, 1)) {
  79           Atomic::inc(&_occupied);
  80         }
  81       } else {
  82         _bm.at_put(from_card, 1);
  83         _occupied++;
  84       }
  85     }
  86   }
  87 
  88   void add_reference_work(OopOrNarrowOopStar from, bool par) {
  89     // Must make this robust in case "from" is not in "_hr", because of
  90     // concurrency.
  91 
  92     if (G1TraceHeapRegionRememberedSet) {
  93       gclog_or_tty->print_cr("    PRT::Add_reference_work(" PTR_FORMAT "->" PTR_FORMAT ").",
  94                              p2i(from),
  95                              UseCompressedOops
  96                              ? p2i(oopDesc::load_decode_heap_oop((narrowOop*)from))
  97                              : p2i(oopDesc::load_decode_heap_oop((oop*)from)));
  98     }
  99 
 100     HeapRegion* loc_hr = hr();
 101     // If the test below fails, then this table was reused concurrently
 102     // with this operation.  This is OK, since the old table was coarsened,
 103     // and adding a bit to the new table is never incorrect.
 104     // If the table used to belong to a continues humongous region and is
 105     // now reused for the corresponding start humongous region, we need to
 106     // make sure that we detect this. Thus, we call is_in_reserved_raw()
 107     // instead of just is_in_reserved() here.
 108     if (loc_hr->is_in_reserved_raw(from)) {
 109       size_t hw_offset = pointer_delta((HeapWord*)from, loc_hr->bottom());
 110       CardIdx_t from_card = (CardIdx_t)
 111           hw_offset >> (CardTableModRefBS::card_shift - LogHeapWordSize);
 112 
 113       assert(0 <= from_card && (size_t)from_card < HeapRegion::CardsPerRegion,
 114              "Must be in range.");
 115       add_card_work(from_card, par);
 116     }
 117   }
 118 


 391 void FromCardCache::print(outputStream* out) {
 392   for (uint i = 0; i < HeapRegionRemSet::num_par_rem_sets(); i++) {
 393     for (uint j = 0; j < _max_regions; j++) {
 394       out->print_cr("_from_card_cache[%u][%u] = %d.",
 395                     i, j, at(i, j));
 396     }
 397   }
 398 }
 399 #endif
 400 
 401 void FromCardCache::clear(uint region_idx) {
 402   uint num_par_remsets = HeapRegionRemSet::num_par_rem_sets();
 403   for (uint i = 0; i < num_par_remsets; i++) {
 404     set(i, region_idx, InvalidCard);
 405   }
 406 }
 407 
 408 void OtherRegionsTable::add_reference(OopOrNarrowOopStar from, uint tid) {
 409   uint cur_hrm_ind = _hr->hrm_index();
 410 
 411   if (G1TraceHeapRegionRememberedSet) {
 412     gclog_or_tty->print_cr("ORT::add_reference_work(" PTR_FORMAT "->" PTR_FORMAT ").",
 413                                                     p2i(from),
 414                                                     UseCompressedOops
 415                                                     ? p2i(oopDesc::load_decode_heap_oop((narrowOop*)from))
 416                                                     : p2i(oopDesc::load_decode_heap_oop((oop*)from)));
 417   }
 418 
 419   int from_card = (int)(uintptr_t(from) >> CardTableModRefBS::card_shift);
 420 
 421   if (G1TraceHeapRegionRememberedSet) {
 422     gclog_or_tty->print_cr("Table for [" PTR_FORMAT "...): card %d (cache = %d)",
 423                   p2i(_hr->bottom()), from_card,
 424                   FromCardCache::at(tid, cur_hrm_ind));
 425   }
 426 
 427   if (FromCardCache::contains_or_replace(tid, cur_hrm_ind, from_card)) {
 428     if (G1TraceHeapRegionRememberedSet) {
 429       gclog_or_tty->print_cr("  from-card cache hit.");
 430     }
 431     assert(contains_reference(from), "We just added it!");
 432     return;
 433   }
 434 
 435   // Note that this may be a continued H region.
 436   HeapRegion* from_hr = _g1h->heap_region_containing_raw(from);
 437   RegionIdx_t from_hrm_ind = (RegionIdx_t) from_hr->hrm_index();
 438 
 439   // If the region is already coarsened, return.
 440   if (_coarse_map.at(from_hrm_ind)) {
 441     if (G1TraceHeapRegionRememberedSet) {
 442       gclog_or_tty->print_cr("  coarse map hit.");
 443     }
 444     assert(contains_reference(from), "We just added it!");
 445     return;
 446   }
 447 
 448   // Otherwise find a per-region table to add it to.
 449   size_t ind = from_hrm_ind & _mod_max_fine_entries_mask;
 450   PerRegionTable* prt = find_region_table(ind, from_hr);
 451   if (prt == NULL) {
 452     MutexLockerEx x(_m, Mutex::_no_safepoint_check_flag);
 453     // Confirm that it's really not there...
 454     prt = find_region_table(ind, from_hr);
 455     if (prt == NULL) {
 456 
 457       uintptr_t from_hr_bot_card_index =
 458         uintptr_t(from_hr->bottom())
 459           >> CardTableModRefBS::card_shift;
 460       CardIdx_t card_index = from_card - from_hr_bot_card_index;
 461       assert(0 <= card_index && (size_t)card_index < HeapRegion::CardsPerRegion,
 462              "Must be in range.");
 463       if (G1HRRSUseSparseTable &&
 464           _sparse_table.add_card(from_hrm_ind, card_index)) {
 465         if (G1TraceHeapRegionRememberedSet) {
 466           gclog_or_tty->print_cr("   added card to sparse table.");
 467         }
 468         assert(contains_reference_locked(from), "We just added it!");
 469         return;
 470       } else {
 471         if (G1TraceHeapRegionRememberedSet) {
 472           gclog_or_tty->print_cr("   [tid %u] sparse table entry "
 473                         "overflow(f: %d, t: %u)",
 474                         tid, from_hrm_ind, cur_hrm_ind);
 475         }
 476       }
 477 
 478       if (_n_fine_entries == _max_fine_entries) {
 479         prt = delete_region_table();
 480         // There is no need to clear the links to the 'all' list here:
 481         // prt will be reused immediately, i.e. remain in the 'all' list.
 482         prt->init(from_hr, false /* clear_links_to_all_list */);
 483       } else {
 484         prt = PerRegionTable::alloc(from_hr);
 485         link_to_all(prt);
 486       }
 487 
 488       PerRegionTable* first_prt = _fine_grain_regions[ind];
 489       prt->set_collision_list_next(first_prt);
 490       // The assignment into _fine_grain_regions allows the prt to
 491       // start being used concurrently. In addition to
 492       // collision_list_next which must be visible (else concurrent
 493       // parsing of the list, if any, may fail to see other entries),
 494       // the content of the prt must be visible (else for instance
 495       // some mark bits may not yet seem cleared or a 'later' update
 496       // performed by a concurrent thread could be undone when the


 568       cur = cur->collision_list_next();
 569     }
 570     i = i + _fine_eviction_stride;
 571     if (i >= _n_fine_entries) i = i - _n_fine_entries;
 572   }
 573 
 574   _fine_eviction_start++;
 575 
 576   if (_fine_eviction_start >= _n_fine_entries) {
 577     _fine_eviction_start -= _n_fine_entries;
 578   }
 579 
 580   guarantee(max != NULL, "Since _n_fine_entries > 0");
 581   guarantee(max_prev != NULL, "Since max != NULL.");
 582 
 583   // Set the corresponding coarse bit.
 584   size_t max_hrm_index = (size_t) max->hr()->hrm_index();
 585   if (!_coarse_map.at(max_hrm_index)) {
 586     _coarse_map.at_put(max_hrm_index, true);
 587     _n_coarse_entries++;
 588     if (G1TraceHeapRegionRememberedSet) {
 589       gclog_or_tty->print("Coarsened entry in region [" PTR_FORMAT "...] "
 590                  "for region [" PTR_FORMAT "...] (" SIZE_FORMAT " coarse entries).\n",
 591                  p2i(_hr->bottom()),
 592                  p2i(max->hr()->bottom()),
 593                  _n_coarse_entries);
 594     }
 595   }
 596 
 597   // Unsplice.
 598   *max_prev = max->collision_list_next();
 599   Atomic::inc(&_n_coarsenings);
 600   _n_fine_entries--;
 601   return max;
 602 }
 603 
 604 void OtherRegionsTable::scrub(CardTableModRefBS* ctbs,
 605                               BitMap* region_bm, BitMap* card_bm) {
 606   // First eliminated garbage regions from the coarse map.
 607   if (G1RSScrubVerbose) {
 608     gclog_or_tty->print_cr("Scrubbing region %u:", _hr->hrm_index());
 609   }
 610 
 611   assert(_coarse_map.size() == region_bm->size(), "Precondition");
 612   if (G1RSScrubVerbose) {
 613     gclog_or_tty->print("   Coarse map: before = " SIZE_FORMAT "...",
 614                         _n_coarse_entries);
 615   }
 616   _coarse_map.set_intersection(*region_bm);
 617   _n_coarse_entries = _coarse_map.count_one_bits();
 618   if (G1RSScrubVerbose) {
 619     gclog_or_tty->print_cr("   after = " SIZE_FORMAT ".", _n_coarse_entries);
 620   }
 621 
 622   // Now do the fine-grained maps.
 623   for (size_t i = 0; i < _max_fine_entries; i++) {
 624     PerRegionTable* cur = _fine_grain_regions[i];
 625     PerRegionTable** prev = &_fine_grain_regions[i];
 626     while (cur != NULL) {
 627       PerRegionTable* nxt = cur->collision_list_next();
 628       // If the entire region is dead, eliminate.
 629       if (G1RSScrubVerbose) {
 630         gclog_or_tty->print_cr("     For other region %u:",
 631                                cur->hr()->hrm_index());
 632       }
 633       if (!region_bm->at((size_t) cur->hr()->hrm_index())) {
 634         *prev = nxt;
 635         cur->set_collision_list_next(NULL);
 636         _n_fine_entries--;
 637         if (G1RSScrubVerbose) {
 638           gclog_or_tty->print_cr("          deleted via region map.");
 639         }
 640         unlink_from_all(cur);
 641         PerRegionTable::free(cur);
 642       } else {
 643         // Do fine-grain elimination.
 644         if (G1RSScrubVerbose) {
 645           gclog_or_tty->print("          occ: before = %4d.", cur->occupied());
 646         }
 647         cur->scrub(ctbs, card_bm);
 648         if (G1RSScrubVerbose) {
 649           gclog_or_tty->print_cr("          after = %4d.", cur->occupied());
 650         }
 651         // Did that empty the table completely?
 652         if (cur->occupied() == 0) {
 653           *prev = nxt;
 654           cur->set_collision_list_next(NULL);
 655           _n_fine_entries--;
 656           unlink_from_all(cur);
 657           PerRegionTable::free(cur);
 658         } else {
 659           prev = cur->collision_list_next_addr();
 660         }
 661       }
 662       cur = nxt;
 663     }
 664   }
 665   // Since we may have deleted a from_card_cache entry from the RS, clear
 666   // the FCC.
 667   clear_fcc();
 668 }
 669 
 670 bool OtherRegionsTable::occupancy_less_or_equal_than(size_t limit) const {


 826   if (_iter_state != Unclaimed) return false;
 827   jint res = Atomic::cmpxchg(Claimed, (jint*)(&_iter_state), Unclaimed);
 828   return (res == Unclaimed);
 829 }
 830 
 831 void HeapRegionRemSet::set_iter_complete() {
 832   _iter_state = Complete;
 833 }
 834 
 835 bool HeapRegionRemSet::iter_is_complete() {
 836   return _iter_state == Complete;
 837 }
 838 
 839 #ifndef PRODUCT
 840 void HeapRegionRemSet::print() {
 841   HeapRegionRemSetIterator iter(this);
 842   size_t card_index;
 843   while (iter.has_next(card_index)) {
 844     HeapWord* card_start =
 845       G1CollectedHeap::heap()->bot_shared()->address_for_index(card_index);
 846     gclog_or_tty->print_cr("  Card " PTR_FORMAT, p2i(card_start));
 847   }
 848   if (iter.n_yielded() != occupied()) {
 849     gclog_or_tty->print_cr("Yielded disagrees with occupied:");
 850     gclog_or_tty->print_cr("  " SIZE_FORMAT_W(6) " yielded (" SIZE_FORMAT_W(6)
 851                   " coarse, " SIZE_FORMAT_W(6) " fine).",
 852                   iter.n_yielded(),
 853                   iter.n_yielded_coarse(), iter.n_yielded_fine());
 854     gclog_or_tty->print_cr("  " SIZE_FORMAT_W(6) " occ     (" SIZE_FORMAT_W(6)
 855                            " coarse, " SIZE_FORMAT_W(6) " fine).",
 856                   occupied(), occ_coarse(), occ_fine());
 857   }
 858   guarantee(iter.n_yielded() == occupied(),
 859             "We should have yielded all the represented cards.");
 860 }
 861 #endif
 862 
 863 void HeapRegionRemSet::cleanup() {
 864   SparsePRT::cleanup_all();
 865 }
 866 
 867 void HeapRegionRemSet::clear() {
 868   MutexLockerEx x(&_m, Mutex::_no_safepoint_check_flag);
 869   clear_locked();
 870 }
 871 
 872 void HeapRegionRemSet::clear_locked() {
 873   _code_roots.clear();
 874   _other_regions.clear();


1116 
1117   hrrs->add_reference((OopOrNarrowOopStar)hr2_start);
1118   hrrs->add_reference((OopOrNarrowOopStar)hr2_mid);
1119   hrrs->add_reference((OopOrNarrowOopStar)hr2_last);
1120 
1121   hrrs->add_reference((OopOrNarrowOopStar)hr3_start);
1122   hrrs->add_reference((OopOrNarrowOopStar)hr3_mid);
1123   hrrs->add_reference((OopOrNarrowOopStar)hr3_last);
1124 
1125   // Now cause a coarsening.
1126   hrrs->add_reference((OopOrNarrowOopStar)hr4->bottom());
1127   hrrs->add_reference((OopOrNarrowOopStar)hr5->bottom());
1128 
1129   // Now, does iteration yield these three?
1130   HeapRegionRemSetIterator iter(hrrs);
1131   size_t sum = 0;
1132   size_t card_index;
1133   while (iter.has_next(card_index)) {
1134     HeapWord* card_start =
1135       G1CollectedHeap::heap()->bot_shared()->address_for_index(card_index);
1136     gclog_or_tty->print_cr("  Card " PTR_FORMAT ".", p2i(card_start));
1137     sum++;
1138   }
1139   guarantee(sum == 11 - 3 + 2048, "Failure");
1140   guarantee(sum == hrrs->occupied(), "Failure");
1141 }
1142 #endif


  72     _collision_list_next(NULL), _next(NULL), _prev(NULL)
  73   {}
  74 
  75   void add_card_work(CardIdx_t from_card, bool par) {
  76     if (!_bm.at(from_card)) {
  77       if (par) {
  78         if (_bm.par_at_put(from_card, 1)) {
  79           Atomic::inc(&_occupied);
  80         }
  81       } else {
  82         _bm.at_put(from_card, 1);
  83         _occupied++;
  84       }
  85     }
  86   }
  87 
  88   void add_reference_work(OopOrNarrowOopStar from, bool par) {
  89     // Must make this robust in case "from" is not in "_hr", because of
  90     // concurrency.
  91 
  92     log_develop(gc, remset)("    PRT::Add_reference_work(" PTR_FORMAT "->" PTR_FORMAT ").",

  93                             p2i(from),
  94                             UseCompressedOops
  95                             ? p2i(oopDesc::load_decode_heap_oop((narrowOop*)from))
  96                             : p2i(oopDesc::load_decode_heap_oop((oop*)from)));

  97 
  98     HeapRegion* loc_hr = hr();
  99     // If the test below fails, then this table was reused concurrently
 100     // with this operation.  This is OK, since the old table was coarsened,
 101     // and adding a bit to the new table is never incorrect.
 102     // If the table used to belong to a continues humongous region and is
 103     // now reused for the corresponding start humongous region, we need to
 104     // make sure that we detect this. Thus, we call is_in_reserved_raw()
 105     // instead of just is_in_reserved() here.
 106     if (loc_hr->is_in_reserved_raw(from)) {
 107       size_t hw_offset = pointer_delta((HeapWord*)from, loc_hr->bottom());
 108       CardIdx_t from_card = (CardIdx_t)
 109           hw_offset >> (CardTableModRefBS::card_shift - LogHeapWordSize);
 110 
 111       assert(0 <= from_card && (size_t)from_card < HeapRegion::CardsPerRegion,
 112              "Must be in range.");
 113       add_card_work(from_card, par);
 114     }
 115   }
 116 


 389 void FromCardCache::print(outputStream* out) {
 390   for (uint i = 0; i < HeapRegionRemSet::num_par_rem_sets(); i++) {
 391     for (uint j = 0; j < _max_regions; j++) {
 392       out->print_cr("_from_card_cache[%u][%u] = %d.",
 393                     i, j, at(i, j));
 394     }
 395   }
 396 }
 397 #endif
 398 
 399 void FromCardCache::clear(uint region_idx) {
 400   uint num_par_remsets = HeapRegionRemSet::num_par_rem_sets();
 401   for (uint i = 0; i < num_par_remsets; i++) {
 402     set(i, region_idx, InvalidCard);
 403   }
 404 }
 405 
 406 void OtherRegionsTable::add_reference(OopOrNarrowOopStar from, uint tid) {
 407   uint cur_hrm_ind = _hr->hrm_index();
 408 
 409   log_develop(gc, remset)("ORT::add_reference_work(" PTR_FORMAT "->" PTR_FORMAT ").",

 410                                                    p2i(from),
 411                                                    UseCompressedOops
 412                                                    ? p2i(oopDesc::load_decode_heap_oop((narrowOop*)from))
 413                                                    : p2i(oopDesc::load_decode_heap_oop((oop*)from)));

 414 
 415   int from_card = (int)(uintptr_t(from) >> CardTableModRefBS::card_shift);
 416 
 417   log_develop(gc, remset)("Table for [" PTR_FORMAT "...): card %d (cache = %d)",

 418                           p2i(_hr->bottom()), from_card,
 419                           FromCardCache::at(tid, cur_hrm_ind));

 420 
 421   if (FromCardCache::contains_or_replace(tid, cur_hrm_ind, from_card)) {
 422     log_develop(gc, remset)("  from-card cache hit.");


 423     assert(contains_reference(from), "We just added it!");
 424     return;
 425   }
 426 
 427   // Note that this may be a continued H region.
 428   HeapRegion* from_hr = _g1h->heap_region_containing_raw(from);
 429   RegionIdx_t from_hrm_ind = (RegionIdx_t) from_hr->hrm_index();
 430 
 431   // If the region is already coarsened, return.
 432   if (_coarse_map.at(from_hrm_ind)) {
 433     log_develop(gc, remset)("  coarse map hit.");


 434     assert(contains_reference(from), "We just added it!");
 435     return;
 436   }
 437 
 438   // Otherwise find a per-region table to add it to.
 439   size_t ind = from_hrm_ind & _mod_max_fine_entries_mask;
 440   PerRegionTable* prt = find_region_table(ind, from_hr);
 441   if (prt == NULL) {
 442     MutexLockerEx x(_m, Mutex::_no_safepoint_check_flag);
 443     // Confirm that it's really not there...
 444     prt = find_region_table(ind, from_hr);
 445     if (prt == NULL) {
 446 
 447       uintptr_t from_hr_bot_card_index =
 448         uintptr_t(from_hr->bottom())
 449           >> CardTableModRefBS::card_shift;
 450       CardIdx_t card_index = from_card - from_hr_bot_card_index;
 451       assert(0 <= card_index && (size_t)card_index < HeapRegion::CardsPerRegion,
 452              "Must be in range.");
 453       if (G1HRRSUseSparseTable &&
 454           _sparse_table.add_card(from_hrm_ind, card_index)) {
 455         log_develop(gc, remset)("   added card to sparse table.");


 456         assert(contains_reference_locked(from), "We just added it!");
 457         return;
 458       } else {
 459         log_develop(gc, remset)("   [tid %u] sparse table entry overflow(f: %d, t: %u)",


 460                                 tid, from_hrm_ind, cur_hrm_ind);
 461       }

 462 
 463       if (_n_fine_entries == _max_fine_entries) {
 464         prt = delete_region_table();
 465         // There is no need to clear the links to the 'all' list here:
 466         // prt will be reused immediately, i.e. remain in the 'all' list.
 467         prt->init(from_hr, false /* clear_links_to_all_list */);
 468       } else {
 469         prt = PerRegionTable::alloc(from_hr);
 470         link_to_all(prt);
 471       }
 472 
 473       PerRegionTable* first_prt = _fine_grain_regions[ind];
 474       prt->set_collision_list_next(first_prt);
 475       // The assignment into _fine_grain_regions allows the prt to
 476       // start being used concurrently. In addition to
 477       // collision_list_next which must be visible (else concurrent
 478       // parsing of the list, if any, may fail to see other entries),
 479       // the content of the prt must be visible (else for instance
 480       // some mark bits may not yet seem cleared or a 'later' update
 481       // performed by a concurrent thread could be undone when the


 553       cur = cur->collision_list_next();
 554     }
 555     i = i + _fine_eviction_stride;
 556     if (i >= _n_fine_entries) i = i - _n_fine_entries;
 557   }
 558 
 559   _fine_eviction_start++;
 560 
 561   if (_fine_eviction_start >= _n_fine_entries) {
 562     _fine_eviction_start -= _n_fine_entries;
 563   }
 564 
 565   guarantee(max != NULL, "Since _n_fine_entries > 0");
 566   guarantee(max_prev != NULL, "Since max != NULL.");
 567 
 568   // Set the corresponding coarse bit.
 569   size_t max_hrm_index = (size_t) max->hr()->hrm_index();
 570   if (!_coarse_map.at(max_hrm_index)) {
 571     _coarse_map.at_put(max_hrm_index, true);
 572     _n_coarse_entries++;
 573       log_develop(gc, remset)("Coarsened entry in region [" PTR_FORMAT "...] for region [" PTR_FORMAT "...] (" SIZE_FORMAT " coarse entries)",
 574                               p2i(_hr->bottom()), p2i(max->hr()->bottom()), _n_coarse_entries);





 575   }
 576 
 577   // Unsplice.
 578   *max_prev = max->collision_list_next();
 579   Atomic::inc(&_n_coarsenings);
 580   _n_fine_entries--;
 581   return max;
 582 }
 583 
 584 void OtherRegionsTable::scrub(CardTableModRefBS* ctbs,
 585                               BitMap* region_bm, BitMap* card_bm) {
 586   // First eliminated garbage regions from the coarse map.
 587   log_develop(gc, remset, scrub)("Scrubbing region %u:", _hr->hrm_index());


 588 
 589   assert(_coarse_map.size() == region_bm->size(), "Precondition");
 590   log_develop(gc, remset, scrub)("   Coarse map: before = " SIZE_FORMAT "...", _n_coarse_entries);



 591   _coarse_map.set_intersection(*region_bm);
 592   _n_coarse_entries = _coarse_map.count_one_bits();
 593   log_develop(gc, remset, scrub)("   after = " SIZE_FORMAT ".", _n_coarse_entries);


 594 
 595   // Now do the fine-grained maps.
 596   for (size_t i = 0; i < _max_fine_entries; i++) {
 597     PerRegionTable* cur = _fine_grain_regions[i];
 598     PerRegionTable** prev = &_fine_grain_regions[i];
 599     while (cur != NULL) {
 600       PerRegionTable* nxt = cur->collision_list_next();
 601       // If the entire region is dead, eliminate.
 602       log_develop(gc, remset, scrub)("     For other region %u:", cur->hr()->hrm_index());



 603       if (!region_bm->at((size_t) cur->hr()->hrm_index())) {
 604         *prev = nxt;
 605         cur->set_collision_list_next(NULL);
 606         _n_fine_entries--;
 607         log_develop(gc, remset, scrub)("          deleted via region map.");


 608         unlink_from_all(cur);
 609         PerRegionTable::free(cur);
 610       } else {
 611         // Do fine-grain elimination.
 612         log_develop(gc, remset, scrub)("          occ: before = %4d.", cur->occupied());


 613         cur->scrub(ctbs, card_bm);
 614         log_develop(gc, remset, scrub)("          after = %4d.", cur->occupied());


 615         // Did that empty the table completely?
 616         if (cur->occupied() == 0) {
 617           *prev = nxt;
 618           cur->set_collision_list_next(NULL);
 619           _n_fine_entries--;
 620           unlink_from_all(cur);
 621           PerRegionTable::free(cur);
 622         } else {
 623           prev = cur->collision_list_next_addr();
 624         }
 625       }
 626       cur = nxt;
 627     }
 628   }
 629   // Since we may have deleted a from_card_cache entry from the RS, clear
 630   // the FCC.
 631   clear_fcc();
 632 }
 633 
 634 bool OtherRegionsTable::occupancy_less_or_equal_than(size_t limit) const {


 790   if (_iter_state != Unclaimed) return false;
 791   jint res = Atomic::cmpxchg(Claimed, (jint*)(&_iter_state), Unclaimed);
 792   return (res == Unclaimed);
 793 }
 794 
 795 void HeapRegionRemSet::set_iter_complete() {
 796   _iter_state = Complete;
 797 }
 798 
 799 bool HeapRegionRemSet::iter_is_complete() {
 800   return _iter_state == Complete;
 801 }
 802 
 803 #ifndef PRODUCT
 804 void HeapRegionRemSet::print() {
 805   HeapRegionRemSetIterator iter(this);
 806   size_t card_index;
 807   while (iter.has_next(card_index)) {
 808     HeapWord* card_start =
 809       G1CollectedHeap::heap()->bot_shared()->address_for_index(card_index);
 810     tty->print_cr("  Card " PTR_FORMAT, p2i(card_start));
 811   }
 812   if (iter.n_yielded() != occupied()) {
 813     tty->print_cr("Yielded disagrees with occupied:");
 814     tty->print_cr("  " SIZE_FORMAT_W(6) " yielded (" SIZE_FORMAT_W(6)
 815                   " coarse, " SIZE_FORMAT_W(6) " fine).",
 816                   iter.n_yielded(),
 817                   iter.n_yielded_coarse(), iter.n_yielded_fine());
 818     tty->print_cr("  " SIZE_FORMAT_W(6) " occ     (" SIZE_FORMAT_W(6)
 819                            " coarse, " SIZE_FORMAT_W(6) " fine).",
 820                   occupied(), occ_coarse(), occ_fine());
 821   }
 822   guarantee(iter.n_yielded() == occupied(),
 823             "We should have yielded all the represented cards.");
 824 }
 825 #endif
 826 
 827 void HeapRegionRemSet::cleanup() {
 828   SparsePRT::cleanup_all();
 829 }
 830 
 831 void HeapRegionRemSet::clear() {
 832   MutexLockerEx x(&_m, Mutex::_no_safepoint_check_flag);
 833   clear_locked();
 834 }
 835 
 836 void HeapRegionRemSet::clear_locked() {
 837   _code_roots.clear();
 838   _other_regions.clear();


1080 
1081   hrrs->add_reference((OopOrNarrowOopStar)hr2_start);
1082   hrrs->add_reference((OopOrNarrowOopStar)hr2_mid);
1083   hrrs->add_reference((OopOrNarrowOopStar)hr2_last);
1084 
1085   hrrs->add_reference((OopOrNarrowOopStar)hr3_start);
1086   hrrs->add_reference((OopOrNarrowOopStar)hr3_mid);
1087   hrrs->add_reference((OopOrNarrowOopStar)hr3_last);
1088 
1089   // Now cause a coarsening.
1090   hrrs->add_reference((OopOrNarrowOopStar)hr4->bottom());
1091   hrrs->add_reference((OopOrNarrowOopStar)hr5->bottom());
1092 
1093   // Now, does iteration yield these three?
1094   HeapRegionRemSetIterator iter(hrrs);
1095   size_t sum = 0;
1096   size_t card_index;
1097   while (iter.has_next(card_index)) {
1098     HeapWord* card_start =
1099       G1CollectedHeap::heap()->bot_shared()->address_for_index(card_index);
1100     tty->print_cr("  Card " PTR_FORMAT ".", p2i(card_start));
1101     sum++;
1102   }
1103   guarantee(sum == 11 - 3 + 2048, "Failure");
1104   guarantee(sum == hrrs->occupied(), "Failure");
1105 }
1106 #endif
< prev index next >