< prev index next >

src/share/vm/gc/g1/g1MarkSweep.cpp

Print this page




 262 
 263   GenMarkSweep::adjust_marks();
 264 
 265   G1AdjustPointersClosure blk;
 266   g1h->heap_region_iterate(&blk);
 267 }
 268 
 269 class G1SpaceCompactClosure: public HeapRegionClosure {
 270 public:
 271   G1SpaceCompactClosure() {}
 272 
 273   bool doHeapRegion(HeapRegion* hr) {
 274     if (hr->is_humongous()) {
 275       if (hr->is_starts_humongous()) {
 276         oop obj = oop(hr->bottom());
 277         if (obj->is_gc_marked()) {
 278           obj->init_mark();
 279         } else {
 280           assert(hr->is_empty(), "Should have been cleared in phase 2.");
 281         }
 282         hr->reset_during_compaction();
 283       }

 284     } else if (!hr->is_pinned()) {
 285       hr->compact();
 286     }
 287     return false;
 288   }
 289 };
 290 
 291 void G1MarkSweep::mark_sweep_phase4() {
 292   // All pointers are now adjusted, move objects accordingly
 293 
 294   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 295   // in the same order in phase2, phase3 and phase4. We don't quite do that
 296   // here (code and comment not fixed for perm removal), so we tell the validate code
 297   // to use a higher index (saved from phase2) when verifying perm_gen.
 298   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 299 
 300   GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer());
 301 
 302   G1SpaceCompactClosure blk;
 303   g1h->heap_region_iterate(&blk);


 317   assert(_archive_check_enabled, "archive range check not enabled");
 318   _archive_region_map.set_by_address(range, is_archive);
 319 }
 320 
 321 bool G1MarkSweep::in_archive_range(oop object) {
 322   // This is the out-of-line part of is_archive_object test, done separately
 323   // to avoid additional performance impact when the check is not enabled.
 324   return _archive_region_map.get_by_address((HeapWord*)object);
 325 }
 326 
 327 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) {
 328   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 329   g1h->heap_region_iterate(blk);
 330   blk->update_sets();
 331 }
 332 
 333 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) {
 334   HeapWord* end = hr->end();
 335   FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
 336 
 337   assert(hr->is_starts_humongous(),
 338          "Only the start of a humongous region should be freed.");
 339 
 340   hr->set_containing_set(NULL);
 341   _humongous_regions_removed.increment(1u, hr->capacity());
 342 
 343   _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */);
 344   prepare_for_compaction(hr, end);
 345   dummy_free_list.remove_all();
 346 }
 347 
 348 void G1PrepareCompactClosure::prepare_for_compaction(HeapRegion* hr, HeapWord* end) {
 349   // If this is the first live region that we came across which we can compact,
 350   // initialize the CompactPoint.
 351   if (!is_cp_initialized()) {
 352     _cp.space = hr;
 353     _cp.threshold = hr->initialize_threshold();
 354   }
 355   prepare_for_compaction_work(&_cp, hr, end);
 356 }
 357 
 358 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp,
 359                                                           HeapRegion* hr,
 360                                                           HeapWord* end) {
 361   hr->prepare_for_compaction(cp);
 362   // Also clear the part of the card table that will be unused after
 363   // compaction.
 364   _mrbs->clear(MemRegion(hr->compaction_top(), end));
 365 }
 366 
 367 void G1PrepareCompactClosure::update_sets() {
 368   // We'll recalculate total used bytes and recreate the free list
 369   // at the end of the GC, so no point in updating those values here.
 370   HeapRegionSetCount empty_set;
 371   _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed);
 372 }
 373 
 374 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) {
 375   if (hr->is_humongous()) {

 376     if (hr->is_starts_humongous()) {
 377       oop obj = oop(hr->bottom());
 378       if (obj->is_gc_marked()) {
 379         obj->forward_to(obj);
 380       } else  {
 381         free_humongous_region(hr);
 382       }
 383     } else {
 384       assert(hr->is_continues_humongous(), "Invalid humongous.");




 385     }
 386   } else if (!hr->is_pinned()) {
 387     prepare_for_compaction(hr, hr->end());
 388   }
 389   return false;
 390 }


 262 
 263   GenMarkSweep::adjust_marks();
 264 
 265   G1AdjustPointersClosure blk;
 266   g1h->heap_region_iterate(&blk);
 267 }
 268 
 269 class G1SpaceCompactClosure: public HeapRegionClosure {
 270 public:
 271   G1SpaceCompactClosure() {}
 272 
 273   bool doHeapRegion(HeapRegion* hr) {
 274     if (hr->is_humongous()) {
 275       if (hr->is_starts_humongous()) {
 276         oop obj = oop(hr->bottom());
 277         if (obj->is_gc_marked()) {
 278           obj->init_mark();
 279         } else {
 280           assert(hr->is_empty(), "Should have been cleared in phase 2.");
 281         }

 282       }
 283       hr->reset_during_compaction();
 284     } else if (!hr->is_pinned()) {
 285       hr->compact();
 286     }
 287     return false;
 288   }
 289 };
 290 
 291 void G1MarkSweep::mark_sweep_phase4() {
 292   // All pointers are now adjusted, move objects accordingly
 293 
 294   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 295   // in the same order in phase2, phase3 and phase4. We don't quite do that
 296   // here (code and comment not fixed for perm removal), so we tell the validate code
 297   // to use a higher index (saved from phase2) when verifying perm_gen.
 298   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 299 
 300   GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer());
 301 
 302   G1SpaceCompactClosure blk;
 303   g1h->heap_region_iterate(&blk);


 317   assert(_archive_check_enabled, "archive range check not enabled");
 318   _archive_region_map.set_by_address(range, is_archive);
 319 }
 320 
 321 bool G1MarkSweep::in_archive_range(oop object) {
 322   // This is the out-of-line part of is_archive_object test, done separately
 323   // to avoid additional performance impact when the check is not enabled.
 324   return _archive_region_map.get_by_address((HeapWord*)object);
 325 }
 326 
 327 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) {
 328   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 329   g1h->heap_region_iterate(blk);
 330   blk->update_sets();
 331 }
 332 
 333 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) {
 334   HeapWord* end = hr->end();
 335   FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
 336 



 337   hr->set_containing_set(NULL);
 338   _humongous_regions_removed.increment(1u, hr->capacity());
 339 
 340   _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */);
 341   prepare_for_compaction(hr, end);
 342   dummy_free_list.remove_all();
 343 }
 344 
 345 void G1PrepareCompactClosure::prepare_for_compaction(HeapRegion* hr, HeapWord* end) {
 346   // If this is the first live region that we came across which we can compact,
 347   // initialize the CompactPoint.
 348   if (!is_cp_initialized()) {
 349     _cp.space = hr;
 350     _cp.threshold = hr->initialize_threshold();
 351   }
 352   prepare_for_compaction_work(&_cp, hr, end);
 353 }
 354 
 355 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp,
 356                                                           HeapRegion* hr,
 357                                                           HeapWord* end) {
 358   hr->prepare_for_compaction(cp);
 359   // Also clear the part of the card table that will be unused after
 360   // compaction.
 361   _mrbs->clear(MemRegion(hr->compaction_top(), end));
 362 }
 363 
 364 void G1PrepareCompactClosure::update_sets() {
 365   // We'll recalculate total used bytes and recreate the free list
 366   // at the end of the GC, so no point in updating those values here.
 367   HeapRegionSetCount empty_set;
 368   _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed);
 369 }
 370 
 371 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) {
 372   if (hr->is_humongous()) {
 373     oop obj;
 374     if (hr->is_starts_humongous()) {
 375       obj = oop(hr->bottom());
 376       if (obj->is_gc_marked()) {
 377         obj->forward_to(obj);
 378       } else  {
 379         free_humongous_region(hr);
 380       }
 381     } else {
 382       assert(hr->is_continues_humongous(), "Invalid humongous.");
 383       obj = oop(hr->humongous_start_region()->bottom());
 384       if (!obj->is_gc_marked()) {
 385         free_humongous_region(hr);
 386       }
 387     }
 388   } else if (!hr->is_pinned()) {
 389     prepare_for_compaction(hr, hr->end());
 390   }
 391   return false;
 392 }
< prev index next >