< prev index next >

src/share/vm/gc/serial/genMarkSweep.cpp

Print this page




  53 #include "utilities/copy.hpp"
  54 #include "utilities/events.hpp"
  55 #include "utilities/stack.inline.hpp"
  56 
  57 void GenMarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, bool clear_all_softrefs) {
  58   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
  59 
  60   GenCollectedHeap* gch = GenCollectedHeap::heap();
  61 #ifdef ASSERT
  62   if (gch->collector_policy()->should_clear_all_soft_refs()) {
  63     assert(clear_all_softrefs, "Policy should have been checked earlier");
  64   }
  65 #endif
  66 
  67   // hook up weak ref data so it can be used during Mark-Sweep
  68   assert(ref_processor() == NULL, "no stomping");
  69   assert(rp != NULL, "should be non-NULL");
  70   set_ref_processor(rp);
  71   rp->setup_policy(clear_all_softrefs);
  72 
  73   GCTraceTime t1(GCCauseString("Full GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL, _gc_tracer->gc_id());
  74 
  75   gch->trace_heap_before_gc(_gc_tracer);
  76 
  77   // When collecting the permanent generation Method*s may be moving,
  78   // so we either have to flush all bcp data or convert it into bci.
  79   CodeCache::gc_prologue();
  80 
  81   // Increment the invocation count
  82   _total_invocations++;
  83 
  84   // Capture heap size before collection for printing.
  85   size_t gch_prev_used = gch->used();
  86 
  87   // Capture used regions for each generation that will be
  88   // subject to collection, so that card table adjustments can
  89   // be made intelligently (see clear / invalidate further below).
  90   gch->save_used_regions();
  91 
  92   allocate_stacks();
  93 


 169 
 170   _preserved_marks = (PreservedMark*)scratch;
 171   _preserved_count = 0;
 172 }
 173 
 174 
 175 void GenMarkSweep::deallocate_stacks() {
 176   if (!UseG1GC) {
 177     GenCollectedHeap* gch = GenCollectedHeap::heap();
 178     gch->release_scratch();
 179   }
 180 
 181   _preserved_mark_stack.clear(true);
 182   _preserved_oop_stack.clear(true);
 183   _marking_stack.clear();
 184   _objarray_stack.clear(true);
 185 }
 186 
 187 void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
 188   // Recursively traverse all live objects and mark them
 189   GCTraceTime tm("phase 1", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
 190 
 191   GenCollectedHeap* gch = GenCollectedHeap::heap();
 192 
 193   // Because follow_root_closure is created statically, cannot
 194   // use OopsInGenClosure constructor which takes a generation,
 195   // as the Universe has not been created when the static constructors
 196   // are run.
 197   follow_root_closure.set_orig_generation(gch->old_gen());
 198 
 199   // Need new claim bits before marking starts.
 200   ClassLoaderDataGraph::clear_claimed_marks();
 201 
 202   {
 203     StrongRootsScope srs(1);
 204 
 205     gch->gen_process_roots(&srs,
 206                            GenCollectedHeap::OldGen,
 207                            false, // Younger gens are not roots.
 208                            GenCollectedHeap::SO_None,
 209                            ClassUnloading,
 210                            &follow_root_closure,
 211                            &follow_root_closure,
 212                            &follow_cld_closure);
 213   }
 214 
 215   // Process reference objects found during marking
 216   {
 217     ref_processor()->setup_policy(clear_all_softrefs);
 218     const ReferenceProcessorStats& stats =
 219       ref_processor()->process_discovered_references(
 220         &is_alive, &keep_alive, &follow_stack_closure, NULL, _gc_timer, _gc_tracer->gc_id());
 221     gc_tracer()->report_gc_reference_stats(stats);
 222   }
 223 
 224   // This is the point where the entire marking should have completed.
 225   assert(_marking_stack.is_empty(), "Marking should have completed");
 226 
 227   // Unload classes and purge the SystemDictionary.
 228   bool purged_class = SystemDictionary::do_unloading(&is_alive);
 229 
 230   // Unload nmethods.
 231   CodeCache::do_unloading(&is_alive, purged_class);
 232 
 233   // Prune dead klasses from subklass/sibling/implementor lists.
 234   Klass::clean_weak_klass_links(&is_alive);
 235 
 236   // Delete entries for dead interned strings.
 237   StringTable::unlink(&is_alive);
 238 
 239   // Clean up unreferenced symbols in symbol table.
 240   SymbolTable::unlink();


 242   gc_tracer()->report_object_count_after_gc(&is_alive);
 243 }
 244 
 245 
 246 void GenMarkSweep::mark_sweep_phase2() {
 247   // Now all live objects are marked, compute the new object addresses.
 248 
 249   // It is imperative that we traverse perm_gen LAST. If dead space is
 250   // allowed a range of dead object may get overwritten by a dead int
 251   // array. If perm_gen is not traversed last a Klass* may get
 252   // overwritten. This is fine since it is dead, but if the class has dead
 253   // instances we have to skip them, and in order to find their size we
 254   // need the Klass*!
 255   //
 256   // It is not required that we traverse spaces in the same order in
 257   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
 258   // tracking expects us to do so. See comment under phase4.
 259 
 260   GenCollectedHeap* gch = GenCollectedHeap::heap();
 261 
 262   GCTraceTime tm("phase 2", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
 263 
 264   gch->prepare_for_compaction();
 265 }
 266 
 267 class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure {
 268 public:
 269   void do_generation(Generation* gen) {
 270     gen->adjust_pointers();
 271   }
 272 };
 273 
 274 void GenMarkSweep::mark_sweep_phase3() {
 275   GenCollectedHeap* gch = GenCollectedHeap::heap();
 276 
 277   // Adjust the pointers to reflect the new locations
 278   GCTraceTime tm("phase 3", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
 279 
 280   // Need new claim bits for the pointer adjustment tracing.
 281   ClassLoaderDataGraph::clear_claimed_marks();
 282 
 283   // Because the closure below is created statically, we cannot
 284   // use OopsInGenClosure constructor which takes a generation,
 285   // as the Universe has not been created when the static constructors
 286   // are run.
 287   adjust_pointer_closure.set_orig_generation(gch->old_gen());
 288 
 289   {
 290     StrongRootsScope srs(1);
 291 
 292     gch->gen_process_roots(&srs,
 293                            GenCollectedHeap::OldGen,
 294                            false, // Younger gens are not roots.
 295                            GenCollectedHeap::SO_AllCodeCache,
 296                            GenCollectedHeap::StrongAndWeakRoots,
 297                            &adjust_pointer_closure,
 298                            &adjust_pointer_closure,


 310 public:
 311   void do_generation(Generation* gen) {
 312     gen->compact();
 313   }
 314 };
 315 
 316 void GenMarkSweep::mark_sweep_phase4() {
 317   // All pointers are now adjusted, move objects accordingly
 318 
 319   // It is imperative that we traverse perm_gen first in phase4. All
 320   // classes must be allocated earlier than their instances, and traversing
 321   // perm_gen first makes sure that all Klass*s have moved to their new
 322   // location before any instance does a dispatch through it's klass!
 323 
 324   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 325   // in the same order in phase2, phase3 and phase4. We don't quite do that
 326   // here (perm_gen first rather than last), so we tell the validate code
 327   // to use a higher index (saved from phase2) when verifying perm_gen.
 328   GenCollectedHeap* gch = GenCollectedHeap::heap();
 329 
 330   GCTraceTime tm("phase 4", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
 331 
 332   GenCompactClosure blk;
 333   gch->generation_iterate(&blk, true);
 334 }


  53 #include "utilities/copy.hpp"
  54 #include "utilities/events.hpp"
  55 #include "utilities/stack.inline.hpp"
  56 
  57 void GenMarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, bool clear_all_softrefs) {
  58   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
  59 
  60   GenCollectedHeap* gch = GenCollectedHeap::heap();
  61 #ifdef ASSERT
  62   if (gch->collector_policy()->should_clear_all_soft_refs()) {
  63     assert(clear_all_softrefs, "Policy should have been checked earlier");
  64   }
  65 #endif
  66 
  67   // hook up weak ref data so it can be used during Mark-Sweep
  68   assert(ref_processor() == NULL, "no stomping");
  69   assert(rp != NULL, "should be non-NULL");
  70   set_ref_processor(rp);
  71   rp->setup_policy(clear_all_softrefs);
  72 
  73   GCTraceTime t1(GCCauseString("Full GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL);
  74 
  75   gch->trace_heap_before_gc(_gc_tracer);
  76 
  77   // When collecting the permanent generation Method*s may be moving,
  78   // so we either have to flush all bcp data or convert it into bci.
  79   CodeCache::gc_prologue();
  80 
  81   // Increment the invocation count
  82   _total_invocations++;
  83 
  84   // Capture heap size before collection for printing.
  85   size_t gch_prev_used = gch->used();
  86 
  87   // Capture used regions for each generation that will be
  88   // subject to collection, so that card table adjustments can
  89   // be made intelligently (see clear / invalidate further below).
  90   gch->save_used_regions();
  91 
  92   allocate_stacks();
  93 


 169 
 170   _preserved_marks = (PreservedMark*)scratch;
 171   _preserved_count = 0;
 172 }
 173 
 174 
 175 void GenMarkSweep::deallocate_stacks() {
 176   if (!UseG1GC) {
 177     GenCollectedHeap* gch = GenCollectedHeap::heap();
 178     gch->release_scratch();
 179   }
 180 
 181   _preserved_mark_stack.clear(true);
 182   _preserved_oop_stack.clear(true);
 183   _marking_stack.clear();
 184   _objarray_stack.clear(true);
 185 }
 186 
 187 void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
 188   // Recursively traverse all live objects and mark them
 189   GCTraceTime tm("phase 1", PrintGC && Verbose, true, _gc_timer);
 190 
 191   GenCollectedHeap* gch = GenCollectedHeap::heap();
 192 
 193   // Because follow_root_closure is created statically, cannot
 194   // use OopsInGenClosure constructor which takes a generation,
 195   // as the Universe has not been created when the static constructors
 196   // are run.
 197   follow_root_closure.set_orig_generation(gch->old_gen());
 198 
 199   // Need new claim bits before marking starts.
 200   ClassLoaderDataGraph::clear_claimed_marks();
 201 
 202   {
 203     StrongRootsScope srs(1);
 204 
 205     gch->gen_process_roots(&srs,
 206                            GenCollectedHeap::OldGen,
 207                            false, // Younger gens are not roots.
 208                            GenCollectedHeap::SO_None,
 209                            ClassUnloading,
 210                            &follow_root_closure,
 211                            &follow_root_closure,
 212                            &follow_cld_closure);
 213   }
 214 
 215   // Process reference objects found during marking
 216   {
 217     ref_processor()->setup_policy(clear_all_softrefs);
 218     const ReferenceProcessorStats& stats =
 219       ref_processor()->process_discovered_references(
 220         &is_alive, &keep_alive, &follow_stack_closure, NULL, _gc_timer);
 221     gc_tracer()->report_gc_reference_stats(stats);
 222   }
 223 
 224   // This is the point where the entire marking should have completed.
 225   assert(_marking_stack.is_empty(), "Marking should have completed");
 226 
 227   // Unload classes and purge the SystemDictionary.
 228   bool purged_class = SystemDictionary::do_unloading(&is_alive);
 229 
 230   // Unload nmethods.
 231   CodeCache::do_unloading(&is_alive, purged_class);
 232 
 233   // Prune dead klasses from subklass/sibling/implementor lists.
 234   Klass::clean_weak_klass_links(&is_alive);
 235 
 236   // Delete entries for dead interned strings.
 237   StringTable::unlink(&is_alive);
 238 
 239   // Clean up unreferenced symbols in symbol table.
 240   SymbolTable::unlink();


 242   gc_tracer()->report_object_count_after_gc(&is_alive);
 243 }
 244 
 245 
 246 void GenMarkSweep::mark_sweep_phase2() {
 247   // Now all live objects are marked, compute the new object addresses.
 248 
 249   // It is imperative that we traverse perm_gen LAST. If dead space is
 250   // allowed a range of dead object may get overwritten by a dead int
 251   // array. If perm_gen is not traversed last a Klass* may get
 252   // overwritten. This is fine since it is dead, but if the class has dead
 253   // instances we have to skip them, and in order to find their size we
 254   // need the Klass*!
 255   //
 256   // It is not required that we traverse spaces in the same order in
 257   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
 258   // tracking expects us to do so. See comment under phase4.
 259 
 260   GenCollectedHeap* gch = GenCollectedHeap::heap();
 261 
 262   GCTraceTime tm("phase 2", PrintGC && Verbose, true, _gc_timer);
 263 
 264   gch->prepare_for_compaction();
 265 }
 266 
 267 class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure {
 268 public:
 269   void do_generation(Generation* gen) {
 270     gen->adjust_pointers();
 271   }
 272 };
 273 
 274 void GenMarkSweep::mark_sweep_phase3() {
 275   GenCollectedHeap* gch = GenCollectedHeap::heap();
 276 
 277   // Adjust the pointers to reflect the new locations
 278   GCTraceTime tm("phase 3", PrintGC && Verbose, true, _gc_timer);
 279 
 280   // Need new claim bits for the pointer adjustment tracing.
 281   ClassLoaderDataGraph::clear_claimed_marks();
 282 
 283   // Because the closure below is created statically, we cannot
 284   // use OopsInGenClosure constructor which takes a generation,
 285   // as the Universe has not been created when the static constructors
 286   // are run.
 287   adjust_pointer_closure.set_orig_generation(gch->old_gen());
 288 
 289   {
 290     StrongRootsScope srs(1);
 291 
 292     gch->gen_process_roots(&srs,
 293                            GenCollectedHeap::OldGen,
 294                            false, // Younger gens are not roots.
 295                            GenCollectedHeap::SO_AllCodeCache,
 296                            GenCollectedHeap::StrongAndWeakRoots,
 297                            &adjust_pointer_closure,
 298                            &adjust_pointer_closure,


 310 public:
 311   void do_generation(Generation* gen) {
 312     gen->compact();
 313   }
 314 };
 315 
 316 void GenMarkSweep::mark_sweep_phase4() {
 317   // All pointers are now adjusted, move objects accordingly
 318 
 319   // It is imperative that we traverse perm_gen first in phase4. All
 320   // classes must be allocated earlier than their instances, and traversing
 321   // perm_gen first makes sure that all Klass*s have moved to their new
 322   // location before any instance does a dispatch through it's klass!
 323 
 324   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
 325   // in the same order in phase2, phase3 and phase4. We don't quite do that
 326   // here (perm_gen first rather than last), so we tell the validate code
 327   // to use a higher index (saved from phase2) when verifying perm_gen.
 328   GenCollectedHeap* gch = GenCollectedHeap::heap();
 329 
 330   GCTraceTime tm("phase 4", PrintGC && Verbose, true, _gc_timer);
 331 
 332   GenCompactClosure blk;
 333   gch->generation_iterate(&blk, true);
 334 }
< prev index next >