src/share/vm/gc_implementation/g1/g1MarkSweep.cpp

Print this page
rev 3463 : 7114678: G1: various small fixes, code cleanup, and refactoring
Summary: Various cleanups as a prelude to introducing iterators for HeapRegions.
Reviewed-by: johnc
Contributed-by: tonyp


 245       if (hr->startsHumongous()) {
 246         oop obj = oop(hr->bottom());
 247         if (obj->is_gc_marked()) {
 248           obj->forward_to(obj);
 249         } else  {
 250           free_humongous_region(hr);
 251         }
 252       } else {
 253         assert(hr->continuesHumongous(), "Invalid humongous.");
 254       }
 255     } else {
 256       hr->prepare_for_compaction(&_cp);
 257       // Also clear the part of the card table that will be unused after
 258       // compaction.
 259       _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
 260     }
 261     return false;
 262   }
 263 };
 264 
 265 // Finds the first HeapRegion.
 266 class FindFirstRegionClosure: public HeapRegionClosure {
 267   HeapRegion* _a_region;
 268 public:
 269   FindFirstRegionClosure() : _a_region(NULL) {}
 270   bool doHeapRegion(HeapRegion* r) {
 271     _a_region = r;
 272     return true;
 273   }
 274   HeapRegion* result() { return _a_region; }
 275 };
 276 
 277 void G1MarkSweep::mark_sweep_phase2() {
 278   // Now all live objects are marked, compute the new object addresses.
 279 
 280   // It is imperative that we traverse perm_gen LAST. If dead space is
 281   // allowed a range of dead object may get overwritten by a dead int
 282   // array. If perm_gen is not traversed last a klassOop may get
 283   // overwritten. This is fine since it is dead, but if the class has dead
 284   // instances we have to skip them, and in order to find their size we
 285   // need the klassOop!
 286   //
 287   // It is not required that we traverse spaces in the same order in
 288   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
 289   // tracking expects us to do so. See comment under phase4.
 290 
 291   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 292   Generation* pg = g1h->perm_gen();
 293 
 294   TraceTime tm("phase 2", G1Log::fine() && Verbose, true, gclog_or_tty);
 295   GenMarkSweep::trace("2");
 296 
 297   FindFirstRegionClosure cl;
 298   g1h->heap_region_iterate(&cl);
 299   HeapRegion *r = cl.result();
 300   CompactibleSpace* sp = r;
 301   if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
 302     sp = r->next_compaction_space();
 303   }
 304 
 305   G1PrepareCompactClosure blk(sp);
 306   g1h->heap_region_iterate(&blk);
 307   blk.update_sets();
 308 
 309   CompactPoint perm_cp(pg, NULL, NULL);
 310   pg->prepare_for_compaction(&perm_cp);
 311 }
 312 
 313 class G1AdjustPointersClosure: public HeapRegionClosure {
 314  public:
 315   bool doHeapRegion(HeapRegion* r) {
 316     if (r->isHumongous()) {
 317       if (r->startsHumongous()) {
 318         // We must adjust the pointers on the single H object.
 319         oop obj = oop(r->bottom());


 391   // classes must be allocated earlier than their instances, and traversing
 392   // perm_gen first makes sure that all klassOops have moved to their new
 393   // location before any instance does a dispatch through it's klass!
 394 
 395   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 396   // in the same order in phase2, phase3 and phase4. We don't quite do that
 397   // here (perm_gen first rather than last), so we tell the validate code
 398   // to use a higher index (saved from phase2) when verifying perm_gen.
 399   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 400   Generation* pg = g1h->perm_gen();
 401 
 402   TraceTime tm("phase 4", G1Log::fine() && Verbose, true, gclog_or_tty);
 403   GenMarkSweep::trace("4");
 404 
 405   pg->compact();
 406 
 407   G1SpaceCompactClosure blk;
 408   g1h->heap_region_iterate(&blk);
 409 
 410 }
 411 
 412 // Local Variables: ***
 413 // c-indentation-style: gnu ***
 414 // End: ***


 245       if (hr->startsHumongous()) {
 246         oop obj = oop(hr->bottom());
 247         if (obj->is_gc_marked()) {
 248           obj->forward_to(obj);
 249         } else  {
 250           free_humongous_region(hr);
 251         }
 252       } else {
 253         assert(hr->continuesHumongous(), "Invalid humongous.");
 254       }
 255     } else {
 256       hr->prepare_for_compaction(&_cp);
 257       // Also clear the part of the card table that will be unused after
 258       // compaction.
 259       _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
 260     }
 261     return false;
 262   }
 263 };
 264 












 265 void G1MarkSweep::mark_sweep_phase2() {
 266   // Now all live objects are marked, compute the new object addresses.
 267 
 268   // It is imperative that we traverse perm_gen LAST. If dead space is
 269   // allowed a range of dead object may get overwritten by a dead int
 270   // array. If perm_gen is not traversed last a klassOop may get
 271   // overwritten. This is fine since it is dead, but if the class has dead
 272   // instances we have to skip them, and in order to find their size we
 273   // need the klassOop!
 274   //
 275   // It is not required that we traverse spaces in the same order in
 276   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
 277   // tracking expects us to do so. See comment under phase4.
 278 
 279   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 280   Generation* pg = g1h->perm_gen();
 281 
 282   TraceTime tm("phase 2", G1Log::fine() && Verbose, true, gclog_or_tty);
 283   GenMarkSweep::trace("2");
 284 
 285   // find the first region
 286   HeapRegion* r = g1h->region_at(0);

 287   CompactibleSpace* sp = r;
 288   if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
 289     sp = r->next_compaction_space();
 290   }
 291 
 292   G1PrepareCompactClosure blk(sp);
 293   g1h->heap_region_iterate(&blk);
 294   blk.update_sets();
 295 
 296   CompactPoint perm_cp(pg, NULL, NULL);
 297   pg->prepare_for_compaction(&perm_cp);
 298 }
 299 
 300 class G1AdjustPointersClosure: public HeapRegionClosure {
 301  public:
 302   bool doHeapRegion(HeapRegion* r) {
 303     if (r->isHumongous()) {
 304       if (r->startsHumongous()) {
 305         // We must adjust the pointers on the single H object.
 306         oop obj = oop(r->bottom());


 378   // classes must be allocated earlier than their instances, and traversing
 379   // perm_gen first makes sure that all klassOops have moved to their new
 380   // location before any instance does a dispatch through it's klass!
 381 
 382   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 383   // in the same order in phase2, phase3 and phase4. We don't quite do that
 384   // here (perm_gen first rather than last), so we tell the validate code
 385   // to use a higher index (saved from phase2) when verifying perm_gen.
 386   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 387   Generation* pg = g1h->perm_gen();
 388 
 389   TraceTime tm("phase 4", G1Log::fine() && Verbose, true, gclog_or_tty);
 390   GenMarkSweep::trace("4");
 391 
 392   pg->compact();
 393 
 394   G1SpaceCompactClosure blk;
 395   g1h->heap_region_iterate(&blk);
 396 
 397 }