1031 // Now push any code roots we need to retain
1032 assert(to_be_retained.is_empty() || hr()->evacuation_failed(),
1033 "Retained nmethod list must be empty or "
1034 "evacuation of this region failed");
1035
1036 while (to_be_retained.is_nonempty()) {
1037 nmethod* nm = to_be_retained.pop();
1038 assert(nm != NULL, "sanity");
1039 add_strong_code_root(nm);
1040 }
1041 }
1042
1043 void HeapRegionRemSet::strong_code_roots_do(CodeBlobClosure* blk) const {
1044 _code_roots.nmethods_do(blk);
1045 }
1046
1047 size_t HeapRegionRemSet::strong_code_roots_mem_size() {
1048 return _code_roots.mem_size();
1049 }
1050
1051 //-------------------- Iteration --------------------
1052
1053 HeapRegionRemSetIterator:: HeapRegionRemSetIterator(HeapRegionRemSet* hrrs) :
1054 _hrrs(hrrs),
1055 _g1h(G1CollectedHeap::heap()),
1056 _coarse_map(&hrrs->_other_regions._coarse_map),
1057 _fine_grain_regions(hrrs->_other_regions._fine_grain_regions),
1058 _bosa(hrrs->bosa()),
1059 _is(Sparse),
1060 // Set these values so that we increment to the first region.
1061 _coarse_cur_region_index(-1),
1062 _coarse_cur_region_cur_card(HeapRegion::CardsPerRegion-1),
1063 _cur_region_cur_card(0),
1064 _fine_array_index(-1),
1065 _fine_cur_prt(NULL),
1066 _n_yielded_coarse(0),
1067 _n_yielded_fine(0),
1068 _n_yielded_sparse(0),
1069 _sparse_iter(&hrrs->_other_regions._sparse_table) {}
1070
1071 bool HeapRegionRemSetIterator::coarse_has_next(size_t& card_index) {
1072 if (_hrrs->_other_regions._n_coarse_entries == 0) return false;
1073 // Go to the next card.
1074 _coarse_cur_region_cur_card++;
1075 // Was the last the last card in the current region?
1076 if (_coarse_cur_region_cur_card == HeapRegion::CardsPerRegion) {
1077 // Yes: find the next region. This may leave _coarse_cur_region_index
1078 // Set to the last index, in which case there are no more coarse
1079 // regions.
1080 _coarse_cur_region_index =
1081 (int) _coarse_map->get_next_one_offset(_coarse_cur_region_index + 1);
1082 if ((size_t)_coarse_cur_region_index < _coarse_map->size()) {
1083 _coarse_cur_region_cur_card = 0;
1084 HeapWord* r_bot =
1085 _g1h->region_at((uint) _coarse_cur_region_index)->bottom();
1086 _cur_region_card_offset = _bosa->index_for(r_bot);
1087 } else {
1088 return false;
1089 }
1090 }
1091 // If we didn't return false above, then we can yield a card.
1092 card_index = _cur_region_card_offset + _coarse_cur_region_cur_card;
1093 return true;
1094 }
1095
1096 void HeapRegionRemSetIterator::fine_find_next_non_null_prt() {
1097 // Otherwise, find the next bucket list in the array.
1098 _fine_array_index++;
1099 while (_fine_array_index < (int) OtherRegionsTable::_max_fine_entries) {
1100 _fine_cur_prt = _fine_grain_regions[_fine_array_index];
1101 if (_fine_cur_prt != NULL) return;
1102 else _fine_array_index++;
1103 }
1104 assert(_fine_cur_prt == NULL, "Loop post");
1105 }
1106
1107 bool HeapRegionRemSetIterator::fine_has_next(size_t& card_index) {
1108 if (fine_has_next()) {
1109 _cur_region_cur_card =
1110 _fine_cur_prt->_bm.get_next_one_offset(_cur_region_cur_card + 1);
1111 }
1112 while (!fine_has_next()) {
1113 if (_cur_region_cur_card == (size_t) HeapRegion::CardsPerRegion) {
1114 _cur_region_cur_card = 0;
1115 _fine_cur_prt = _fine_cur_prt->collision_list_next();
1116 }
1117 if (_fine_cur_prt == NULL) {
1118 fine_find_next_non_null_prt();
1119 if (_fine_cur_prt == NULL) return false;
1120 }
1121 assert(_fine_cur_prt != NULL && _cur_region_cur_card == 0,
1122 "inv.");
1123 HeapWord* r_bot =
1124 _fine_cur_prt->hr()->bottom();
1125 _cur_region_card_offset = _bosa->index_for(r_bot);
1126 _cur_region_cur_card = _fine_cur_prt->_bm.get_next_one_offset(0);
1127 }
1128 assert(fine_has_next(), "Or else we exited the loop via the return.");
1129 card_index = _cur_region_card_offset + _cur_region_cur_card;
1130 return true;
1131 }
1132
1133 bool HeapRegionRemSetIterator::fine_has_next() {
1134 return
1135 _fine_cur_prt != NULL &&
1136 _cur_region_cur_card < HeapRegion::CardsPerRegion;
1137 }
1138
1139 bool HeapRegionRemSetIterator::has_next(size_t& card_index) {
1140 switch (_is) {
1141 case Sparse:
1142 if (_sparse_iter.has_next(card_index)) {
1143 _n_yielded_sparse++;
1144 return true;
1145 }
1146 // Otherwise, deliberate fall-through
1147 _is = Fine;
1148 case Fine:
1149 if (fine_has_next(card_index)) {
1150 _n_yielded_fine++;
1151 return true;
1152 }
1153 // Otherwise, deliberate fall-through
1154 _is = Coarse;
1155 case Coarse:
1156 if (coarse_has_next(card_index)) {
1157 _n_yielded_coarse++;
1158 return true;
1159 }
1160 // Otherwise...
1161 break;
1162 }
1163 assert(ParallelGCThreads > 1 ||
1164 n_yielded() == _hrrs->occupied(),
1165 "Should have yielded all the cards in the rem set "
1166 "(in the non-par case).");
1167 return false;
|
1031 // Now push any code roots we need to retain
1032 assert(to_be_retained.is_empty() || hr()->evacuation_failed(),
1033 "Retained nmethod list must be empty or "
1034 "evacuation of this region failed");
1035
1036 while (to_be_retained.is_nonempty()) {
1037 nmethod* nm = to_be_retained.pop();
1038 assert(nm != NULL, "sanity");
1039 add_strong_code_root(nm);
1040 }
1041 }
1042
1043 void HeapRegionRemSet::strong_code_roots_do(CodeBlobClosure* blk) const {
1044 _code_roots.nmethods_do(blk);
1045 }
1046
1047 size_t HeapRegionRemSet::strong_code_roots_mem_size() {
1048 return _code_roots.mem_size();
1049 }
1050
1051 HeapRegionRemSetIterator:: HeapRegionRemSetIterator(HeapRegionRemSet* hrrs) :
1052 _hrrs(hrrs),
1053 _g1h(G1CollectedHeap::heap()),
1054 _coarse_map(&hrrs->_other_regions._coarse_map),
1055 _bosa(hrrs->bosa()),
1056 _is(Sparse),
1057 // Set these values so that we increment to the first region.
1058 _coarse_cur_region_index(-1),
1059 _coarse_cur_region_cur_card(HeapRegion::CardsPerRegion-1),
1060 _cur_card_in_prt(HeapRegion::CardsPerRegion),
1061 _fine_cur_prt(NULL),
1062 _n_yielded_coarse(0),
1063 _n_yielded_fine(0),
1064 _n_yielded_sparse(0),
1065 _sparse_iter(&hrrs->_other_regions._sparse_table) {}
1066
1067 bool HeapRegionRemSetIterator::coarse_has_next(size_t& card_index) {
1068 if (_hrrs->_other_regions._n_coarse_entries == 0) return false;
1069 // Go to the next card.
1070 _coarse_cur_region_cur_card++;
1071 // Was the last the last card in the current region?
1072 if (_coarse_cur_region_cur_card == HeapRegion::CardsPerRegion) {
1073 // Yes: find the next region. This may leave _coarse_cur_region_index
1074 // Set to the last index, in which case there are no more coarse
1075 // regions.
1076 _coarse_cur_region_index =
1077 (int) _coarse_map->get_next_one_offset(_coarse_cur_region_index + 1);
1078 if ((size_t)_coarse_cur_region_index < _coarse_map->size()) {
1079 _coarse_cur_region_cur_card = 0;
1080 HeapWord* r_bot =
1081 _g1h->region_at((uint) _coarse_cur_region_index)->bottom();
1082 _cur_region_card_offset = _bosa->index_for(r_bot);
1083 } else {
1084 return false;
1085 }
1086 }
1087 // If we didn't return false above, then we can yield a card.
1088 card_index = _cur_region_card_offset + _coarse_cur_region_cur_card;
1089 return true;
1090 }
1091
1092 bool HeapRegionRemSetIterator::fine_has_next(size_t& card_index) {
1093 if (fine_has_next()) {
1094 _cur_card_in_prt =
1095 _fine_cur_prt->_bm.get_next_one_offset(_cur_card_in_prt + 1);
1096 }
1097 if (_cur_card_in_prt == HeapRegion::CardsPerRegion) {
1098 // _fine_cur_prt may still be NULL in case if there are not PRTs at all for
1099 // the remembered set.
1100 if (_fine_cur_prt == NULL || _fine_cur_prt->next() == NULL) {
1101 return false;
1102 }
1103 PerRegionTable* next_prt = _fine_cur_prt->next();
1104 switch_to_prt(next_prt);
1105 _cur_card_in_prt = _fine_cur_prt->_bm.get_next_one_offset(_cur_card_in_prt + 1);
1106 }
1107
1108 card_index = _cur_region_card_offset + _cur_card_in_prt;
1109 guarantee(_cur_card_in_prt < HeapRegion::CardsPerRegion,
1110 err_msg("Card index "SIZE_FORMAT" must be within the region", _cur_card_in_prt));
1111 return true;
1112 }
1113
1114 bool HeapRegionRemSetIterator::fine_has_next() {
1115 return _cur_card_in_prt != HeapRegion::CardsPerRegion;
1116 }
1117
1118 void HeapRegionRemSetIterator::switch_to_prt(PerRegionTable* prt) {
1119 assert(prt != NULL, "Cannot switch to NULL prt");
1120 _fine_cur_prt = prt;
1121
1122 HeapWord* r_bot = _fine_cur_prt->hr()->bottom();
1123 _cur_region_card_offset = _bosa->index_for(r_bot);
1124
1125 // The bitmap scan for the PRT always scans from _cur_region_cur_card + 1.
1126 // To avoid special-casing this start case, and not miss the first bitmap
1127 // entry, initialize _cur_region_cur_card with -1 instead of 0.
1128 _cur_card_in_prt = (size_t)-1;
1129 }
1130
1131 bool HeapRegionRemSetIterator::has_next(size_t& card_index) {
1132 switch (_is) {
1133 case Sparse: {
1134 if (_sparse_iter.has_next(card_index)) {
1135 _n_yielded_sparse++;
1136 return true;
1137 }
1138 // Otherwise, deliberate fall-through
1139 _is = Fine;
1140 PerRegionTable* initial_fine_prt = _hrrs->_other_regions._first_all_fine_prts;
1141 if (initial_fine_prt != NULL) {
1142 switch_to_prt(_hrrs->_other_regions._first_all_fine_prts);
1143 }
1144 }
1145 case Fine:
1146 if (fine_has_next(card_index)) {
1147 _n_yielded_fine++;
1148 return true;
1149 }
1150 // Otherwise, deliberate fall-through
1151 _is = Coarse;
1152 case Coarse:
1153 if (coarse_has_next(card_index)) {
1154 _n_yielded_coarse++;
1155 return true;
1156 }
1157 // Otherwise...
1158 break;
1159 }
1160 assert(ParallelGCThreads > 1 ||
1161 n_yielded() == _hrrs->occupied(),
1162 "Should have yielded all the cards in the rem set "
1163 "(in the non-par case).");
1164 return false;
|