< prev index next >

src/hotspot/share/gc/g1/heapRegion.cpp

Print this page
rev 60059 : imported patch 8210462-fix-remaining-mentions-of-im


 268 
 269   if (clear_space) {
 270     clear(mangle_space);
 271   }
 272 
 273   set_top(bottom());
 274   set_compaction_top(bottom());
 275   reset_bot();
 276 
 277   hr_clear(false /*clear_space*/);
 278 }
 279 
 280 void HeapRegion::report_region_type_change(G1HeapRegionTraceType::Type to) {
 281   HeapRegionTracer::send_region_type_change(_hrm_index,
 282                                             get_trace_type(),
 283                                             to,
 284                                             (uintptr_t)bottom(),
 285                                             used());
 286 }
 287 
 288 void HeapRegion::note_self_forwarding_removal_start(bool during_initial_mark,
 289                                                     bool during_conc_mark) {
 290   // We always recreate the prev marking info and we'll explicitly
 291   // mark all objects we find to be self-forwarded on the prev
 292   // bitmap. So all objects need to be below PTAMS.
 293   _prev_marked_bytes = 0;
 294 
 295   if (during_initial_mark) {
 296     // During initial-mark, we'll also explicitly mark all objects
 297     // we find to be self-forwarded on the next bitmap. So all
 298     // objects need to be below NTAMS.
 299     _next_top_at_mark_start = top();
 300     _next_marked_bytes = 0;
 301   } else if (during_conc_mark) {
 302     // During concurrent mark, all objects in the CSet (including
 303     // the ones we find to be self-forwarded) are implicitly live.
 304     // So all objects need to be above NTAMS.
 305     _next_top_at_mark_start = bottom();
 306     _next_marked_bytes = 0;
 307   }
 308 }
 309 
 310 void HeapRegion::note_self_forwarding_removal_end(size_t marked_bytes) {
 311   assert(marked_bytes <= used(),
 312          "marked: " SIZE_FORMAT " used: " SIZE_FORMAT, marked_bytes, used());
 313   _prev_top_at_mark_start = top();
 314   _prev_marked_bytes = marked_bytes;
 315 }
 316 




 268 
 269   if (clear_space) {
 270     clear(mangle_space);
 271   }
 272 
 273   set_top(bottom());
 274   set_compaction_top(bottom());
 275   reset_bot();
 276 
 277   hr_clear(false /*clear_space*/);
 278 }
 279 
 280 void HeapRegion::report_region_type_change(G1HeapRegionTraceType::Type to) {
 281   HeapRegionTracer::send_region_type_change(_hrm_index,
 282                                             get_trace_type(),
 283                                             to,
 284                                             (uintptr_t)bottom(),
 285                                             used());
 286 }
 287 
 288 void HeapRegion::note_self_forwarding_removal_start(bool during_concurrent_start,
 289                                                     bool during_conc_mark) {
 290   // We always recreate the prev marking info and we'll explicitly
 291   // mark all objects we find to be self-forwarded on the prev
 292   // bitmap. So all objects need to be below PTAMS.
 293   _prev_marked_bytes = 0;
 294 
 295   if (during_concurrent_start) {
 296     // During concurrent start, we'll also explicitly mark all objects
 297     // we find to be self-forwarded on the next bitmap. So all
 298     // objects need to be below NTAMS.
 299     _next_top_at_mark_start = top();
 300     _next_marked_bytes = 0;
 301   } else if (during_conc_mark) {
 302     // During concurrent mark, all objects in the CSet (including
 303     // the ones we find to be self-forwarded) are implicitly live.
 304     // So all objects need to be above NTAMS.
 305     _next_top_at_mark_start = bottom();
 306     _next_marked_bytes = 0;
 307   }
 308 }
 309 
 310 void HeapRegion::note_self_forwarding_removal_end(size_t marked_bytes) {
 311   assert(marked_bytes <= used(),
 312          "marked: " SIZE_FORMAT " used: " SIZE_FORMAT, marked_bytes, used());
 313   _prev_top_at_mark_start = top();
 314   _prev_marked_bytes = marked_bytes;
 315 }
 316 


< prev index next >