< prev index next >

src/share/vm/gc/g1/g1MarkSweep.cpp

Print this page




  57 
  58 class HeapRegion;
  59 
  60 bool G1MarkSweep::_archive_check_enabled = false;
  61 G1ArchiveRegionMap G1MarkSweep::_archive_region_map;
  62 
  63 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
  64                                       bool clear_all_softrefs) {
  65   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
  66 
  67 #ifdef ASSERT
  68   if (G1CollectedHeap::heap()->collector_policy()->should_clear_all_soft_refs()) {
  69     assert(clear_all_softrefs, "Policy should have been checked earler");
  70   }
  71 #endif
  72   // hook up weak ref data so it can be used during Mark-Sweep
  73   assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
  74   assert(rp != NULL, "should be non-NULL");
  75   assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition");
  76 
  77   GenMarkSweep::_ref_processor = rp;
  78   rp->setup_policy(clear_all_softrefs);
  79 
  80   // When collecting the permanent generation Method*s may be moving,
  81   // so we either have to flush all bcp data or convert it into bci.
  82   CodeCache::gc_prologue();
  83 
  84   bool marked_for_unloading = false;
  85 
  86   allocate_stacks();
  87 
  88   // We should save the marks of the currently locked biased monitors.
  89   // The marking doesn't preserve the marks of biased objects.
  90   BiasedLocking::preserve_marks();
  91 
  92   mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
  93 
  94   mark_sweep_phase2();
  95 
  96   // Don't add any more derived pointers during phase3
  97   COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
  98 
  99   mark_sweep_phase3();
 100 
 101   mark_sweep_phase4();
 102 
 103   GenMarkSweep::restore_marks();
 104   BiasedLocking::restore_marks();
 105   GenMarkSweep::deallocate_stacks();
 106 
 107   CodeCache::gc_epilogue();
 108   JvmtiExport::gc_epilogue();
 109 
 110   // refs processing: clean slate
 111   GenMarkSweep::_ref_processor = NULL;
 112 }
 113 
 114 
 115 void G1MarkSweep::allocate_stacks() {
 116   GenMarkSweep::_preserved_count_max = 0;
 117   GenMarkSweep::_preserved_marks = NULL;
 118   GenMarkSweep::_preserved_count = 0;
 119 }
 120 
 121 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
 122                                     bool clear_all_softrefs) {
 123   // Recursively traverse all live objects and mark them
 124   GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
 125 
 126   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 127 
 128   // Need cleared claim bits for the roots processing
 129   ClassLoaderDataGraph::clear_claimed_marks();
 130 
 131   MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);




  57 
  58 class HeapRegion;
  59 
  60 bool G1MarkSweep::_archive_check_enabled = false;
  61 G1ArchiveRegionMap G1MarkSweep::_archive_region_map;
  62 
  63 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
  64                                       bool clear_all_softrefs) {
  65   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
  66 
  67 #ifdef ASSERT
  68   if (G1CollectedHeap::heap()->collector_policy()->should_clear_all_soft_refs()) {
  69     assert(clear_all_softrefs, "Policy should have been checked earler");
  70   }
  71 #endif
  72   // hook up weak ref data so it can be used during Mark-Sweep
  73   assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
  74   assert(rp != NULL, "should be non-NULL");
  75   assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition");
  76 
  77   GenMarkSweep::set_ref_processor(rp);
  78   rp->setup_policy(clear_all_softrefs);
  79 
  80   // When collecting the permanent generation Method*s may be moving,
  81   // so we either have to flush all bcp data or convert it into bci.
  82   CodeCache::gc_prologue();
  83 
  84   bool marked_for_unloading = false;
  85 
  86   allocate_stacks();
  87 
  88   // We should save the marks of the currently locked biased monitors.
  89   // The marking doesn't preserve the marks of biased objects.
  90   BiasedLocking::preserve_marks();
  91 
  92   mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
  93 
  94   mark_sweep_phase2();
  95 
  96   // Don't add any more derived pointers during phase3
  97   COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
  98 
  99   mark_sweep_phase3();
 100 
 101   mark_sweep_phase4();
 102 
 103   GenMarkSweep::restore_marks();
 104   BiasedLocking::restore_marks();
 105   GenMarkSweep::deallocate_stacks();
 106 
 107   CodeCache::gc_epilogue();
 108   JvmtiExport::gc_epilogue();
 109 
 110   // refs processing: clean slate
 111   GenMarkSweep::set_ref_processor(NULL);
 112 }
 113 
 114 
 115 void G1MarkSweep::allocate_stacks() {
 116   GenMarkSweep::_preserved_count_max = 0;
 117   GenMarkSweep::_preserved_marks = NULL;
 118   GenMarkSweep::_preserved_count = 0;
 119 }
 120 
 121 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
 122                                     bool clear_all_softrefs) {
 123   // Recursively traverse all live objects and mark them
 124   GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
 125 
 126   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 127 
 128   // Need cleared claim bits for the roots processing
 129   ClassLoaderDataGraph::clear_claimed_marks();
 130 
 131   MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);


< prev index next >