123 uninstall_surv_rate_group();
124 set_free();
125 reset_pre_dummy_top();
126
127 if (!keep_remset) {
128 if (locked) {
129 rem_set()->clear_locked();
130 } else {
131 rem_set()->clear();
132 }
133 }
134
135 zero_marked_bytes();
136
137 init_top_at_mark_start();
138 if (clear_space) clear(SpaceDecorator::Mangle);
139
140 _evacuation_failed = false;
141 _gc_efficiency = 0.0;
142 _recorded_rs_length = 0;
143 _predicted_elapsed_time_ms = 0.0;
144 }
145
146 void HeapRegion::clear_cardtable() {
147 G1CardTable* ct = G1CollectedHeap::heap()->card_table();
148 ct->clear(MemRegion(bottom(), end()));
149 }
150
151 void HeapRegion::calc_gc_efficiency() {
152 // GC efficiency is the ratio of how much space would be
153 // reclaimed over how long we predict it would take to reclaim it.
154 G1CollectedHeap* g1h = G1CollectedHeap::heap();
155 G1Policy* policy = g1h->policy();
156
157 // Retrieve a prediction of the elapsed time for this region for
158 // a mixed gc because the region will only be evacuated during a
159 // mixed gc.
160 double region_elapsed_time_ms =
161 policy->predict_region_elapsed_time_ms(this, false /* for_young_gc */);
162 _gc_efficiency = (double) reclaimable_bytes() / region_elapsed_time_ms;
163 }
164
165 void HeapRegion::set_free() {
166 report_region_type_change(G1HeapRegionTraceType::Free);
167 _type.set_free();
168 }
169
170 void HeapRegion::set_eden() {
171 report_region_type_change(G1HeapRegionTraceType::Eden);
172 _type.set_eden();
173 }
174
175 void HeapRegion::set_eden_pre_gc() {
176 report_region_type_change(G1HeapRegionTraceType::Eden);
177 _type.set_eden_pre_gc();
178 }
179
180 void HeapRegion::set_survivor() {
181 report_region_type_change(G1HeapRegionTraceType::Survivor);
242 _end(mr.end()),
243 _top(NULL),
244 _compaction_top(NULL),
245 _bot_part(bot, this),
246 _par_alloc_lock(Mutex::leaf, "HeapRegion par alloc lock", true),
247 _pre_dummy_top(NULL),
248 _rem_set(NULL),
249 _hrm_index(hrm_index),
250 _type(),
251 _humongous_start_region(NULL),
252 _evacuation_failed(false),
253 _index_in_opt_cset(InvalidCSetIndex),
254 _next(NULL), _prev(NULL),
255 #ifdef ASSERT
256 _containing_set(NULL),
257 #endif
258 _prev_top_at_mark_start(NULL), _next_top_at_mark_start(NULL),
259 _prev_marked_bytes(0), _next_marked_bytes(0),
260 _young_index_in_cset(-1),
261 _surv_rate_group(NULL), _age_index(SurvRateGroup::InvalidAgeIndex), _gc_efficiency(0.0),
262 _recorded_rs_length(0), _predicted_elapsed_time_ms(0),
263 _node_index(G1NUMA::UnknownNodeIndex)
264 {
265 assert(Universe::on_page_boundary(mr.start()) && Universe::on_page_boundary(mr.end()),
266 "invalid space boundaries");
267
268 _rem_set = new HeapRegionRemSet(bot, this);
269 initialize();
270 }
271
272 void HeapRegion::initialize(bool clear_space, bool mangle_space) {
273 assert(_rem_set->is_empty(), "Remembered set must be empty");
274
275 if (clear_space) {
276 clear(mangle_space);
277 }
278
279 set_top(bottom());
280 set_compaction_top(bottom());
281 reset_bot();
282
|
123 uninstall_surv_rate_group();
124 set_free();
125 reset_pre_dummy_top();
126
127 if (!keep_remset) {
128 if (locked) {
129 rem_set()->clear_locked();
130 } else {
131 rem_set()->clear();
132 }
133 }
134
135 zero_marked_bytes();
136
137 init_top_at_mark_start();
138 if (clear_space) clear(SpaceDecorator::Mangle);
139
140 _evacuation_failed = false;
141 _gc_efficiency = 0.0;
142 _recorded_rs_length = 0;
143 _predicted_non_copy_time_ms = 0.0;
144 }
145
146 void HeapRegion::clear_cardtable() {
147 G1CardTable* ct = G1CollectedHeap::heap()->card_table();
148 ct->clear(MemRegion(bottom(), end()));
149 }
150
151 void HeapRegion::calc_gc_efficiency() {
152 // GC efficiency is the ratio of how much space would be
153 // reclaimed over how long we predict it would take to reclaim it.
154 G1Policy* policy = G1CollectedHeap::heap()->policy();
155
156 // Retrieve a prediction of the elapsed time for this region for
157 // a mixed gc because the region will only be evacuated during a
158 // mixed gc.
159 double region_elapsed_time_ms = policy->predict_region_total_time_ms(this, false /* for_young_gc */);
160 _gc_efficiency = (double) reclaimable_bytes() / region_elapsed_time_ms;
161 }
162
163 void HeapRegion::set_free() {
164 report_region_type_change(G1HeapRegionTraceType::Free);
165 _type.set_free();
166 }
167
168 void HeapRegion::set_eden() {
169 report_region_type_change(G1HeapRegionTraceType::Eden);
170 _type.set_eden();
171 }
172
173 void HeapRegion::set_eden_pre_gc() {
174 report_region_type_change(G1HeapRegionTraceType::Eden);
175 _type.set_eden_pre_gc();
176 }
177
178 void HeapRegion::set_survivor() {
179 report_region_type_change(G1HeapRegionTraceType::Survivor);
240 _end(mr.end()),
241 _top(NULL),
242 _compaction_top(NULL),
243 _bot_part(bot, this),
244 _par_alloc_lock(Mutex::leaf, "HeapRegion par alloc lock", true),
245 _pre_dummy_top(NULL),
246 _rem_set(NULL),
247 _hrm_index(hrm_index),
248 _type(),
249 _humongous_start_region(NULL),
250 _evacuation_failed(false),
251 _index_in_opt_cset(InvalidCSetIndex),
252 _next(NULL), _prev(NULL),
253 #ifdef ASSERT
254 _containing_set(NULL),
255 #endif
256 _prev_top_at_mark_start(NULL), _next_top_at_mark_start(NULL),
257 _prev_marked_bytes(0), _next_marked_bytes(0),
258 _young_index_in_cset(-1),
259 _surv_rate_group(NULL), _age_index(SurvRateGroup::InvalidAgeIndex), _gc_efficiency(0.0),
260 _recorded_rs_length(0), _predicted_non_copy_time_ms(0),
261 _node_index(G1NUMA::UnknownNodeIndex)
262 {
263 assert(Universe::on_page_boundary(mr.start()) && Universe::on_page_boundary(mr.end()),
264 "invalid space boundaries");
265
266 _rem_set = new HeapRegionRemSet(bot, this);
267 initialize();
268 }
269
270 void HeapRegion::initialize(bool clear_space, bool mangle_space) {
271 assert(_rem_set->is_empty(), "Remembered set must be empty");
272
273 if (clear_space) {
274 clear(mangle_space);
275 }
276
277 set_top(bottom());
278 set_compaction_top(bottom());
279 reset_bot();
280
|