< prev index next >

src/share/vm/gc/g1/g1MarkSweep.cpp

Print this page




 104   BiasedLocking::restore_marks();
 105   GenMarkSweep::deallocate_stacks();
 106 
 107   CodeCache::gc_epilogue();
 108   JvmtiExport::gc_epilogue();
 109 
 110   // refs processing: clean slate
 111   GenMarkSweep::set_ref_processor(NULL);
 112 }
 113 
 114 
 115 void G1MarkSweep::allocate_stacks() {
 116   GenMarkSweep::_preserved_count_max = 0;
 117   GenMarkSweep::_preserved_marks = NULL;
 118   GenMarkSweep::_preserved_count = 0;
 119 }
 120 
 121 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
 122                                     bool clear_all_softrefs) {
 123   // Recursively traverse all live objects and mark them
 124   GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
 125 
 126   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 127 
 128   // Need cleared claim bits for the roots processing
 129   ClassLoaderDataGraph::clear_claimed_marks();
 130 
 131   MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);
 132   {
 133     G1RootProcessor root_processor(g1h, 1);
 134     root_processor.process_strong_roots(&GenMarkSweep::follow_root_closure,
 135                                         &GenMarkSweep::follow_cld_closure,
 136                                         &follow_code_closure);
 137   }
 138 
 139   // Process reference objects found during marking
 140   ReferenceProcessor* rp = GenMarkSweep::ref_processor();
 141   assert(rp == g1h->ref_processor_stw(), "Sanity");
 142 
 143   rp->setup_policy(clear_all_softrefs);
 144   const ReferenceProcessorStats& stats =
 145     rp->process_discovered_references(&GenMarkSweep::is_alive,
 146                                       &GenMarkSweep::keep_alive,
 147                                       &GenMarkSweep::follow_stack_closure,
 148                                       NULL,
 149                                       gc_timer(),
 150                                       gc_tracer()->gc_id());
 151   gc_tracer()->report_gc_reference_stats(stats);
 152 
 153 
 154   // This is the point where the entire marking should have completed.
 155   assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");
 156 
 157   // Unload classes and purge the SystemDictionary.
 158   bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
 159 
 160   // Unload nmethods.
 161   CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);
 162 
 163   // Prune dead klasses from subklass/sibling/implementor lists.
 164   Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);
 165 
 166   // Delete entries for dead interned string and clean up unreferenced symbols in symbol table.
 167   g1h->unlink_string_and_symbol_table(&GenMarkSweep::is_alive);
 168 
 169   if (VerifyDuringGC) {
 170     HandleMark hm;  // handle scope


 183     if (!VerifySilently) {
 184       gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying ");
 185     }
 186     g1h->verify(VerifySilently, VerifyOption_G1UseMarkWord);
 187     if (!VerifySilently) {
 188       gclog_or_tty->print_cr("]");
 189     }
 190   }
 191 
 192   gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
 193 }
 194 
 195 
 196 void G1MarkSweep::mark_sweep_phase2() {
 197   // Now all live objects are marked, compute the new object addresses.
 198 
 199   // It is not required that we traverse spaces in the same order in
 200   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
 201   // tracking expects us to do so. See comment under phase4.
 202 
 203   GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
 204 
 205   prepare_compaction();
 206 }
 207 
 208 class G1AdjustPointersClosure: public HeapRegionClosure {
 209  public:
 210   bool doHeapRegion(HeapRegion* r) {
 211     if (r->is_humongous()) {
 212       if (r->is_starts_humongous()) {
 213         // We must adjust the pointers on the single H object.
 214         oop obj = oop(r->bottom());
 215         // point all the oops to the new location
 216         MarkSweep::adjust_pointers(obj);
 217       }
 218     } else if (!r->is_pinned()) {
 219       // This really ought to be "as_CompactibleSpace"...
 220       r->adjust_pointers();
 221     }
 222     return false;
 223   }
 224 };
 225 
 226 class G1AlwaysTrueClosure: public BoolObjectClosure {
 227 public:
 228   bool do_object_b(oop p) { return true; }
 229 };
 230 static G1AlwaysTrueClosure always_true;
 231 
 232 void G1MarkSweep::mark_sweep_phase3() {
 233   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 234 
 235   // Adjust the pointers to reflect the new locations
 236   GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
 237 
 238   // Need cleared claim bits for the roots processing
 239   ClassLoaderDataGraph::clear_claimed_marks();
 240 
 241   CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
 242   {
 243     G1RootProcessor root_processor(g1h, 1);
 244     root_processor.process_all_roots(&GenMarkSweep::adjust_pointer_closure,
 245                                      &GenMarkSweep::adjust_cld_closure,
 246                                      &adjust_code_closure);
 247   }
 248 
 249   assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
 250   g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
 251 
 252   // Now adjust pointers in remaining weak roots.  (All of which should
 253   // have been cleared if they pointed to non-surviving objects.)
 254   JNIHandles::weak_oops_do(&always_true, &GenMarkSweep::adjust_pointer_closure);
 255 
 256   if (G1StringDedup::is_enabled()) {


 277           assert(hr->is_empty(), "Should have been cleared in phase 2.");
 278         }
 279         hr->reset_during_compaction();
 280       }
 281     } else if (!hr->is_pinned()) {
 282       hr->compact();
 283     }
 284     return false;
 285   }
 286 };
 287 
 288 void G1MarkSweep::mark_sweep_phase4() {
 289   // All pointers are now adjusted, move objects accordingly
 290 
 291   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 292   // in the same order in phase2, phase3 and phase4. We don't quite do that
 293   // here (code and comment not fixed for perm removal), so we tell the validate code
 294   // to use a higher index (saved from phase2) when verifying perm_gen.
 295   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 296 
 297   GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
 298 
 299   G1SpaceCompactClosure blk;
 300   g1h->heap_region_iterate(&blk);
 301 
 302 }
 303 
 304 void G1MarkSweep::enable_archive_object_check() {
 305   assert(!_archive_check_enabled, "archive range check already enabled");
 306   _archive_check_enabled = true;
 307   size_t length = Universe::heap()->max_capacity();
 308   _archive_region_map.initialize((HeapWord*)Universe::heap()->base(),
 309                                  (HeapWord*)Universe::heap()->base() + length,
 310                                  HeapRegion::GrainBytes);
 311 }
 312 
 313 void G1MarkSweep::set_range_archive(MemRegion range, bool is_archive) {
 314   assert(_archive_check_enabled, "archive range check not enabled");
 315   _archive_region_map.set_by_address(range, is_archive);
 316 }
 317 




 104   BiasedLocking::restore_marks();
 105   GenMarkSweep::deallocate_stacks();
 106 
 107   CodeCache::gc_epilogue();
 108   JvmtiExport::gc_epilogue();
 109 
 110   // refs processing: clean slate
 111   GenMarkSweep::set_ref_processor(NULL);
 112 }
 113 
 114 
 115 void G1MarkSweep::allocate_stacks() {
 116   GenMarkSweep::_preserved_count_max = 0;
 117   GenMarkSweep::_preserved_marks = NULL;
 118   GenMarkSweep::_preserved_count = 0;
 119 }
 120 
 121 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
 122                                     bool clear_all_softrefs) {
 123   // Recursively traverse all live objects and mark them
 124   GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer());
 125 
 126   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 127 
 128   // Need cleared claim bits for the roots processing
 129   ClassLoaderDataGraph::clear_claimed_marks();
 130 
 131   MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);
 132   {
 133     G1RootProcessor root_processor(g1h, 1);
 134     root_processor.process_strong_roots(&GenMarkSweep::follow_root_closure,
 135                                         &GenMarkSweep::follow_cld_closure,
 136                                         &follow_code_closure);
 137   }
 138 
 139   // Process reference objects found during marking
 140   ReferenceProcessor* rp = GenMarkSweep::ref_processor();
 141   assert(rp == g1h->ref_processor_stw(), "Sanity");
 142 
 143   rp->setup_policy(clear_all_softrefs);
 144   const ReferenceProcessorStats& stats =
 145     rp->process_discovered_references(&GenMarkSweep::is_alive,
 146                                       &GenMarkSweep::keep_alive,
 147                                       &GenMarkSweep::follow_stack_closure,
 148                                       NULL,
 149                                       gc_timer());

 150   gc_tracer()->report_gc_reference_stats(stats);
 151 
 152 
 153   // This is the point where the entire marking should have completed.
 154   assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");
 155 
 156   // Unload classes and purge the SystemDictionary.
 157   bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
 158 
 159   // Unload nmethods.
 160   CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);
 161 
 162   // Prune dead klasses from subklass/sibling/implementor lists.
 163   Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);
 164 
 165   // Delete entries for dead interned string and clean up unreferenced symbols in symbol table.
 166   g1h->unlink_string_and_symbol_table(&GenMarkSweep::is_alive);
 167 
 168   if (VerifyDuringGC) {
 169     HandleMark hm;  // handle scope


 182     if (!VerifySilently) {
 183       gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying ");
 184     }
 185     g1h->verify(VerifySilently, VerifyOption_G1UseMarkWord);
 186     if (!VerifySilently) {
 187       gclog_or_tty->print_cr("]");
 188     }
 189   }
 190 
 191   gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
 192 }
 193 
 194 
 195 void G1MarkSweep::mark_sweep_phase2() {
 196   // Now all live objects are marked, compute the new object addresses.
 197 
 198   // It is not required that we traverse spaces in the same order in
 199   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
 200   // tracking expects us to do so. See comment under phase4.
 201 
 202   GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer());
 203 
 204   prepare_compaction();
 205 }
 206 
 207 class G1AdjustPointersClosure: public HeapRegionClosure {
 208  public:
 209   bool doHeapRegion(HeapRegion* r) {
 210     if (r->is_humongous()) {
 211       if (r->is_starts_humongous()) {
 212         // We must adjust the pointers on the single H object.
 213         oop obj = oop(r->bottom());
 214         // point all the oops to the new location
 215         MarkSweep::adjust_pointers(obj);
 216       }
 217     } else if (!r->is_pinned()) {
 218       // This really ought to be "as_CompactibleSpace"...
 219       r->adjust_pointers();
 220     }
 221     return false;
 222   }
 223 };
 224 
 225 class G1AlwaysTrueClosure: public BoolObjectClosure {
 226 public:
 227   bool do_object_b(oop p) { return true; }
 228 };
 229 static G1AlwaysTrueClosure always_true;
 230 
 231 void G1MarkSweep::mark_sweep_phase3() {
 232   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 233 
 234   // Adjust the pointers to reflect the new locations
 235   GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer());
 236 
 237   // Need cleared claim bits for the roots processing
 238   ClassLoaderDataGraph::clear_claimed_marks();
 239 
 240   CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
 241   {
 242     G1RootProcessor root_processor(g1h, 1);
 243     root_processor.process_all_roots(&GenMarkSweep::adjust_pointer_closure,
 244                                      &GenMarkSweep::adjust_cld_closure,
 245                                      &adjust_code_closure);
 246   }
 247 
 248   assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
 249   g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
 250 
 251   // Now adjust pointers in remaining weak roots.  (All of which should
 252   // have been cleared if they pointed to non-surviving objects.)
 253   JNIHandles::weak_oops_do(&always_true, &GenMarkSweep::adjust_pointer_closure);
 254 
 255   if (G1StringDedup::is_enabled()) {


 276           assert(hr->is_empty(), "Should have been cleared in phase 2.");
 277         }
 278         hr->reset_during_compaction();
 279       }
 280     } else if (!hr->is_pinned()) {
 281       hr->compact();
 282     }
 283     return false;
 284   }
 285 };
 286 
 287 void G1MarkSweep::mark_sweep_phase4() {
 288   // All pointers are now adjusted, move objects accordingly
 289 
 290   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 291   // in the same order in phase2, phase3 and phase4. We don't quite do that
 292   // here (code and comment not fixed for perm removal), so we tell the validate code
 293   // to use a higher index (saved from phase2) when verifying perm_gen.
 294   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 295 
 296   GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer());
 297 
 298   G1SpaceCompactClosure blk;
 299   g1h->heap_region_iterate(&blk);
 300 
 301 }
 302 
 303 void G1MarkSweep::enable_archive_object_check() {
 304   assert(!_archive_check_enabled, "archive range check already enabled");
 305   _archive_check_enabled = true;
 306   size_t length = Universe::heap()->max_capacity();
 307   _archive_region_map.initialize((HeapWord*)Universe::heap()->base(),
 308                                  (HeapWord*)Universe::heap()->base() + length,
 309                                  HeapRegion::GrainBytes);
 310 }
 311 
 312 void G1MarkSweep::set_range_archive(MemRegion range, bool is_archive) {
 313   assert(_archive_check_enabled, "archive range check not enabled");
 314   _archive_region_map.set_by_address(range, is_archive);
 315 }
 316 


< prev index next >