< prev index next >

src/share/vm/gc/g1/g1MarkSweep.hpp

Print this page




  27 
  28 #include "gc/g1/g1CollectedHeap.hpp"
  29 #include "gc/g1/heapRegion.hpp"
  30 #include "gc/serial/genMarkSweep.hpp"
  31 #include "gc/shared/generation.hpp"
  32 #include "memory/universe.hpp"
  33 #include "oops/markOop.hpp"
  34 #include "oops/oop.hpp"
  35 #include "runtime/timer.hpp"
  36 #include "utilities/growableArray.hpp"
  37 
  38 class ReferenceProcessor;
  39 
  40 // G1MarkSweep takes care of global mark-compact garbage collection for a
  41 // G1CollectedHeap using a four-phase pointer forwarding algorithm.  All
  42 // generations are assumed to support marking; those that can also support
  43 // compaction.
  44 //
  45 // Class unloading will only occur when a full gc is invoked.
  46 class G1PrepareCompactClosure;

  47 
  48 class G1MarkSweep : AllStatic {
  49  public:
  50 
  51   static void invoke_at_safepoint(ReferenceProcessor* rp,
  52                                   bool clear_all_softrefs);
  53 
  54   static STWGCTimer* gc_timer() { return GenMarkSweep::_gc_timer; }
  55   static SerialOldTracer* gc_tracer() { return GenMarkSweep::_gc_tracer; }
  56 







  57  private:


  58 
  59   // Mark live objects
  60   static void mark_sweep_phase1(bool& marked_for_deopt,
  61                                 bool clear_all_softrefs);
  62   // Calculate new addresses
  63   static void mark_sweep_phase2();
  64   // Update pointers
  65   static void mark_sweep_phase3();
  66   // Move objects to new positions
  67   static void mark_sweep_phase4();
  68 
  69   static void allocate_stacks();
  70   static void prepare_compaction();
  71   static void prepare_compaction_work(G1PrepareCompactClosure* blk);
  72 };
  73 
  74 class G1PrepareCompactClosure : public HeapRegionClosure {
  75  protected:
  76   G1CollectedHeap* _g1h;
  77   ModRefBarrierSet* _mrbs;
  78   CompactPoint _cp;
  79   HeapRegionSetCount _humongous_regions_removed;
  80 
  81   virtual void prepare_for_compaction(HeapRegion* hr, HeapWord* end);
  82   void prepare_for_compaction_work(CompactPoint* cp, HeapRegion* hr, HeapWord* end);
  83   void free_humongous_region(HeapRegion* hr);
  84   bool is_cp_initialized() const { return _cp.space != NULL; }
  85 
  86  public:
  87   G1PrepareCompactClosure() :
  88     _g1h(G1CollectedHeap::heap()),
  89     _mrbs(_g1h->g1_barrier_set()),
  90     _humongous_regions_removed() { }
  91 
  92   void update_sets();
  93   bool doHeapRegion(HeapRegion* hr);





  94 };
  95 
  96 #endif // SHARE_VM_GC_G1_G1MARKSWEEP_HPP


  27 
  28 #include "gc/g1/g1CollectedHeap.hpp"
  29 #include "gc/g1/heapRegion.hpp"
  30 #include "gc/serial/genMarkSweep.hpp"
  31 #include "gc/shared/generation.hpp"
  32 #include "memory/universe.hpp"
  33 #include "oops/markOop.hpp"
  34 #include "oops/oop.hpp"
  35 #include "runtime/timer.hpp"
  36 #include "utilities/growableArray.hpp"
  37 
  38 class ReferenceProcessor;
  39 
  40 // G1MarkSweep takes care of global mark-compact garbage collection for a
  41 // G1CollectedHeap using a four-phase pointer forwarding algorithm.  All
  42 // generations are assumed to support marking; those that can also support
  43 // compaction.
  44 //
  45 // Class unloading will only occur when a full gc is invoked.
  46 class G1PrepareCompactClosure;
  47 class G1ArchiveRegionMap;
  48 
  49 class G1MarkSweep : AllStatic {
  50  public:
  51 
  52   static void invoke_at_safepoint(ReferenceProcessor* rp,
  53                                   bool clear_all_softrefs);
  54 
  55   static STWGCTimer* gc_timer() { return GenMarkSweep::_gc_timer; }
  56   static SerialOldTracer* gc_tracer() { return GenMarkSweep::_gc_tracer; }
  57 
  58   // Support for 'archive' objects, to prevent objects in archive regions
  59   // from being marked by full GCs.
  60   static void enable_archive_object_check();
  61   static void mark_range_archive(HeapWord* start, HeapWord* end);
  62   static bool in_archive_range(oop object);
  63   static bool archive_check_enabled() { return G1MarkSweep::_archive_check_enabled; }
  64 
  65  private:
  66   static bool _archive_check_enabled;
  67   static G1ArchiveRegionMap  _archive_region_map;
  68 
  69   // Mark live objects
  70   static void mark_sweep_phase1(bool& marked_for_deopt,
  71                                 bool clear_all_softrefs);
  72   // Calculate new addresses
  73   static void mark_sweep_phase2();
  74   // Update pointers
  75   static void mark_sweep_phase3();
  76   // Move objects to new positions
  77   static void mark_sweep_phase4();
  78 
  79   static void allocate_stacks();
  80   static void prepare_compaction();
  81   static void prepare_compaction_work(G1PrepareCompactClosure* blk);
  82 };
  83 
  84 class G1PrepareCompactClosure : public HeapRegionClosure {
  85  protected:
  86   G1CollectedHeap* _g1h;
  87   ModRefBarrierSet* _mrbs;
  88   CompactPoint _cp;
  89   HeapRegionSetCount _humongous_regions_removed;
  90 
  91   virtual void prepare_for_compaction(HeapRegion* hr, HeapWord* end);
  92   void prepare_for_compaction_work(CompactPoint* cp, HeapRegion* hr, HeapWord* end);
  93   void free_humongous_region(HeapRegion* hr);
  94   bool is_cp_initialized() const { return _cp.space != NULL; }
  95 
  96  public:
  97   G1PrepareCompactClosure() :
  98     _g1h(G1CollectedHeap::heap()),
  99     _mrbs(_g1h->g1_barrier_set()),
 100     _humongous_regions_removed() { }
 101 
 102   void update_sets();
 103   bool doHeapRegion(HeapRegion* hr);
 104 };
 105 
 106 class G1ArchiveRegionMap : public G1BiasedMappedArray<bool> {
 107 protected:
 108   bool default_value() const { return false; }
 109 };
 110 
 111 #endif // SHARE_VM_GC_G1_G1MARKSWEEP_HPP
< prev index next >