88 void add_reference_work(OopOrNarrowOopStar from, bool par) {
89 // Must make this robust in case "from" is not in "_hr", because of
90 // concurrency.
91
92 if (G1TraceHeapRegionRememberedSet) {
93 gclog_or_tty->print_cr(" PRT::Add_reference_work(" PTR_FORMAT "->" PTR_FORMAT ").",
94 p2i(from),
95 UseCompressedOops
96 ? p2i(oopDesc::load_decode_heap_oop((narrowOop*)from))
97 : p2i(oopDesc::load_decode_heap_oop((oop*)from)));
98 }
99
100 HeapRegion* loc_hr = hr();
101 // If the test below fails, then this table was reused concurrently
102 // with this operation. This is OK, since the old table was coarsened,
103 // and adding a bit to the new table is never incorrect.
104 // If the table used to belong to a continues humongous region and is
105 // now reused for the corresponding start humongous region, we need to
106 // make sure that we detect this. Thus, we call is_in_reserved_raw()
107 // instead of just is_in_reserved() here.
108 if (loc_hr->is_in_reserved_raw(from)) {
109 size_t hw_offset = pointer_delta((HeapWord*)from, loc_hr->bottom());
110 CardIdx_t from_card = (CardIdx_t)
111 hw_offset >> (CardTableModRefBS::card_shift - LogHeapWordSize);
112
113 assert(0 <= from_card && (size_t)from_card < HeapRegion::CardsPerRegion,
114 "Must be in range.");
115 add_card_work(from_card, par);
116 }
117 }
118
119 public:
120
121 HeapRegion* hr() const { return _hr; }
122
123 jint occupied() const {
124 // Overkill, but if we ever need it...
125 // guarantee(_occupied == _bm.count_one_bits(), "Check");
126 return _occupied;
127 }
128
416 : p2i(oopDesc::load_decode_heap_oop((oop*)from)));
417 }
418
419 int from_card = (int)(uintptr_t(from) >> CardTableModRefBS::card_shift);
420
421 if (G1TraceHeapRegionRememberedSet) {
422 gclog_or_tty->print_cr("Table for [" PTR_FORMAT "...): card %d (cache = %d)",
423 p2i(_hr->bottom()), from_card,
424 FromCardCache::at(tid, cur_hrm_ind));
425 }
426
427 if (FromCardCache::contains_or_replace(tid, cur_hrm_ind, from_card)) {
428 if (G1TraceHeapRegionRememberedSet) {
429 gclog_or_tty->print_cr(" from-card cache hit.");
430 }
431 assert(contains_reference(from), "We just added it!");
432 return;
433 }
434
435 // Note that this may be a continued H region.
436 HeapRegion* from_hr = _g1h->heap_region_containing_raw(from);
437 RegionIdx_t from_hrm_ind = (RegionIdx_t) from_hr->hrm_index();
438
439 // If the region is already coarsened, return.
440 if (_coarse_map.at(from_hrm_ind)) {
441 if (G1TraceHeapRegionRememberedSet) {
442 gclog_or_tty->print_cr(" coarse map hit.");
443 }
444 assert(contains_reference(from), "We just added it!");
445 return;
446 }
447
448 // Otherwise find a per-region table to add it to.
449 size_t ind = from_hrm_ind & _mod_max_fine_entries_mask;
450 PerRegionTable* prt = find_region_table(ind, from_hr);
451 if (prt == NULL) {
452 MutexLockerEx x(_m, Mutex::_no_safepoint_check_flag);
453 // Confirm that it's really not there...
454 prt = find_region_table(ind, from_hr);
455 if (prt == NULL) {
456
769 } else {
770 guarantee(_first_all_fine_prts == NULL && _last_all_fine_prts == NULL, "just checking");
771 }
772
773 _first_all_fine_prts = _last_all_fine_prts = NULL;
774 _sparse_table.clear();
775 _coarse_map.clear();
776 _n_fine_entries = 0;
777 _n_coarse_entries = 0;
778
779 clear_fcc();
780 }
781
782 bool OtherRegionsTable::contains_reference(OopOrNarrowOopStar from) const {
783 // Cast away const in this case.
784 MutexLockerEx x((Mutex*)_m, Mutex::_no_safepoint_check_flag);
785 return contains_reference_locked(from);
786 }
787
788 bool OtherRegionsTable::contains_reference_locked(OopOrNarrowOopStar from) const {
789 HeapRegion* hr = _g1h->heap_region_containing_raw(from);
790 RegionIdx_t hr_ind = (RegionIdx_t) hr->hrm_index();
791 // Is this region in the coarse map?
792 if (_coarse_map.at(hr_ind)) return true;
793
794 PerRegionTable* prt = find_region_table(hr_ind & _mod_max_fine_entries_mask,
795 hr);
796 if (prt != NULL) {
797 return prt->contains_reference(from);
798
799 } else {
800 uintptr_t from_card =
801 (uintptr_t(from) >> CardTableModRefBS::card_shift);
802 uintptr_t hr_bot_card_index =
803 uintptr_t(hr->bottom()) >> CardTableModRefBS::card_shift;
804 assert(from_card >= hr_bot_card_index, "Inv");
805 CardIdx_t card_index = from_card - hr_bot_card_index;
806 assert(0 <= card_index && (size_t)card_index < HeapRegion::CardsPerRegion,
807 "Must be in range.");
808 return _sparse_table.contains_card(hr_ind, card_index);
809 }
|
88 void add_reference_work(OopOrNarrowOopStar from, bool par) {
89 // Must make this robust in case "from" is not in "_hr", because of
90 // concurrency.
91
92 if (G1TraceHeapRegionRememberedSet) {
93 gclog_or_tty->print_cr(" PRT::Add_reference_work(" PTR_FORMAT "->" PTR_FORMAT ").",
94 p2i(from),
95 UseCompressedOops
96 ? p2i(oopDesc::load_decode_heap_oop((narrowOop*)from))
97 : p2i(oopDesc::load_decode_heap_oop((oop*)from)));
98 }
99
100 HeapRegion* loc_hr = hr();
101 // If the test below fails, then this table was reused concurrently
102 // with this operation. This is OK, since the old table was coarsened,
103 // and adding a bit to the new table is never incorrect.
104 // If the table used to belong to a continues humongous region and is
105 // now reused for the corresponding start humongous region, we need to
106 // make sure that we detect this. Thus, we call is_in_reserved_raw()
107 // instead of just is_in_reserved() here.
108 if (loc_hr->is_in_reserved(from)) {
109 size_t hw_offset = pointer_delta((HeapWord*)from, loc_hr->bottom());
110 CardIdx_t from_card = (CardIdx_t)
111 hw_offset >> (CardTableModRefBS::card_shift - LogHeapWordSize);
112
113 assert(0 <= from_card && (size_t)from_card < HeapRegion::CardsPerRegion,
114 "Must be in range.");
115 add_card_work(from_card, par);
116 }
117 }
118
119 public:
120
121 HeapRegion* hr() const { return _hr; }
122
123 jint occupied() const {
124 // Overkill, but if we ever need it...
125 // guarantee(_occupied == _bm.count_one_bits(), "Check");
126 return _occupied;
127 }
128
416 : p2i(oopDesc::load_decode_heap_oop((oop*)from)));
417 }
418
419 int from_card = (int)(uintptr_t(from) >> CardTableModRefBS::card_shift);
420
421 if (G1TraceHeapRegionRememberedSet) {
422 gclog_or_tty->print_cr("Table for [" PTR_FORMAT "...): card %d (cache = %d)",
423 p2i(_hr->bottom()), from_card,
424 FromCardCache::at(tid, cur_hrm_ind));
425 }
426
427 if (FromCardCache::contains_or_replace(tid, cur_hrm_ind, from_card)) {
428 if (G1TraceHeapRegionRememberedSet) {
429 gclog_or_tty->print_cr(" from-card cache hit.");
430 }
431 assert(contains_reference(from), "We just added it!");
432 return;
433 }
434
435 // Note that this may be a continued H region.
436 HeapRegion* from_hr = _g1h->heap_region_containing(from);
437 RegionIdx_t from_hrm_ind = (RegionIdx_t) from_hr->hrm_index();
438
439 // If the region is already coarsened, return.
440 if (_coarse_map.at(from_hrm_ind)) {
441 if (G1TraceHeapRegionRememberedSet) {
442 gclog_or_tty->print_cr(" coarse map hit.");
443 }
444 assert(contains_reference(from), "We just added it!");
445 return;
446 }
447
448 // Otherwise find a per-region table to add it to.
449 size_t ind = from_hrm_ind & _mod_max_fine_entries_mask;
450 PerRegionTable* prt = find_region_table(ind, from_hr);
451 if (prt == NULL) {
452 MutexLockerEx x(_m, Mutex::_no_safepoint_check_flag);
453 // Confirm that it's really not there...
454 prt = find_region_table(ind, from_hr);
455 if (prt == NULL) {
456
769 } else {
770 guarantee(_first_all_fine_prts == NULL && _last_all_fine_prts == NULL, "just checking");
771 }
772
773 _first_all_fine_prts = _last_all_fine_prts = NULL;
774 _sparse_table.clear();
775 _coarse_map.clear();
776 _n_fine_entries = 0;
777 _n_coarse_entries = 0;
778
779 clear_fcc();
780 }
781
782 bool OtherRegionsTable::contains_reference(OopOrNarrowOopStar from) const {
783 // Cast away const in this case.
784 MutexLockerEx x((Mutex*)_m, Mutex::_no_safepoint_check_flag);
785 return contains_reference_locked(from);
786 }
787
788 bool OtherRegionsTable::contains_reference_locked(OopOrNarrowOopStar from) const {
789 HeapRegion* hr = _g1h->heap_region_containing(from);
790 RegionIdx_t hr_ind = (RegionIdx_t) hr->hrm_index();
791 // Is this region in the coarse map?
792 if (_coarse_map.at(hr_ind)) return true;
793
794 PerRegionTable* prt = find_region_table(hr_ind & _mod_max_fine_entries_mask,
795 hr);
796 if (prt != NULL) {
797 return prt->contains_reference(from);
798
799 } else {
800 uintptr_t from_card =
801 (uintptr_t(from) >> CardTableModRefBS::card_shift);
802 uintptr_t hr_bot_card_index =
803 uintptr_t(hr->bottom()) >> CardTableModRefBS::card_shift;
804 assert(from_card >= hr_bot_card_index, "Inv");
805 CardIdx_t card_index = from_card - hr_bot_card_index;
806 assert(0 <= card_index && (size_t)card_index < HeapRegion::CardsPerRegion,
807 "Must be in range.");
808 return _sparse_table.contains_card(hr_ind, card_index);
809 }
|