< prev index next >

src/share/vm/gc/g1/g1MarkSweep.hpp

Print this page
rev 12504 : 8171235: Move archive object code from G1MarkSweep into G1ArchiveAllocator
Reviewed-by:


  38 class ReferenceProcessor;
  39 
  40 // G1MarkSweep takes care of global mark-compact garbage collection for a
  41 // G1CollectedHeap using a four-phase pointer forwarding algorithm.  All
  42 // generations are assumed to support marking; those that can also support
  43 // compaction.
  44 //
  45 // Class unloading will only occur when a full gc is invoked.
  46 class G1PrepareCompactClosure;
  47 class G1ArchiveRegionMap;
  48 
  49 class G1MarkSweep : AllStatic {
  50  public:
  51 
  52   static void invoke_at_safepoint(ReferenceProcessor* rp,
  53                                   bool clear_all_softrefs);
  54 
  55   static STWGCTimer* gc_timer() { return GenMarkSweep::_gc_timer; }
  56   static SerialOldTracer* gc_tracer() { return GenMarkSweep::_gc_tracer; }
  57 
  58   // Create the _archive_region_map which is used to identify archive objects.
  59   static void enable_archive_object_check();
  60 
  61   // Set the regions containing the specified address range as archive/non-archive.
  62   static void set_range_archive(MemRegion range, bool is_archive);
  63 
  64   // Check if an object is in an archive region using the _archive_region_map.
  65   static bool in_archive_range(oop object);
  66 
  67   // Check if archive object checking is enabled, to avoid calling in_archive_range
  68   // unnecessarily.
  69   static bool archive_check_enabled() { return G1MarkSweep::_archive_check_enabled; }
  70 
  71  private:
  72   static bool _archive_check_enabled;
  73   static G1ArchiveRegionMap  _archive_region_map;
  74 
  75   // Mark live objects
  76   static void mark_sweep_phase1(bool& marked_for_deopt,
  77                                 bool clear_all_softrefs);
  78   // Calculate new addresses
  79   static void mark_sweep_phase2();
  80   // Update pointers
  81   static void mark_sweep_phase3();
  82   // Move objects to new positions
  83   static void mark_sweep_phase4();
  84 
  85   static void allocate_stacks();
  86   static void prepare_compaction();
  87   static void prepare_compaction_work(G1PrepareCompactClosure* blk);
  88 };
  89 
  90 class G1PrepareCompactClosure : public HeapRegionClosure {
  91  protected:
  92   G1CollectedHeap* _g1h;
  93   ModRefBarrierSet* _mrbs;
  94   CompactPoint _cp;
  95   uint _humongous_regions_removed;
  96 
  97   virtual void prepare_for_compaction(HeapRegion* hr, HeapWord* end);
  98   void prepare_for_compaction_work(CompactPoint* cp, HeapRegion* hr, HeapWord* end);
  99   void free_humongous_region(HeapRegion* hr);
 100   bool is_cp_initialized() const { return _cp.space != NULL; }
 101 
 102  public:
 103   G1PrepareCompactClosure() :
 104     _g1h(G1CollectedHeap::heap()),
 105     _mrbs(_g1h->g1_barrier_set()),
 106     _humongous_regions_removed(0) { }
 107 
 108   void update_sets();
 109   bool doHeapRegion(HeapRegion* hr);
 110 };
 111 
 112 // G1ArchiveRegionMap is a boolean array used to mark G1 regions as
 113 // archive regions.  This allows a quick check for whether an object
 114 // should not be marked because it is in an archive region.
 115 class G1ArchiveRegionMap : public G1BiasedMappedArray<bool> {
 116 protected:
 117   bool default_value() const { return false; }
 118 };
 119 
 120 #endif // SHARE_VM_GC_G1_G1MARKSWEEP_HPP


  38 class ReferenceProcessor;
  39 
  40 // G1MarkSweep takes care of global mark-compact garbage collection for a
  41 // G1CollectedHeap using a four-phase pointer forwarding algorithm.  All
  42 // generations are assumed to support marking; those that can also support
  43 // compaction.
  44 //
  45 // Class unloading will only occur when a full gc is invoked.
  46 class G1PrepareCompactClosure;
  47 class G1ArchiveRegionMap;
  48 
  49 class G1MarkSweep : AllStatic {
  50  public:
  51 
  52   static void invoke_at_safepoint(ReferenceProcessor* rp,
  53                                   bool clear_all_softrefs);
  54 
  55   static STWGCTimer* gc_timer() { return GenMarkSweep::_gc_timer; }
  56   static SerialOldTracer* gc_tracer() { return GenMarkSweep::_gc_tracer; }
  57 
  58 private:
















  59   // Mark live objects
  60   static void mark_sweep_phase1(bool& marked_for_deopt,
  61                                 bool clear_all_softrefs);
  62   // Calculate new addresses
  63   static void mark_sweep_phase2();
  64   // Update pointers
  65   static void mark_sweep_phase3();
  66   // Move objects to new positions
  67   static void mark_sweep_phase4();
  68 
  69   static void allocate_stacks();
  70   static void prepare_compaction();
  71   static void prepare_compaction_work(G1PrepareCompactClosure* blk);
  72 };
  73 
  74 class G1PrepareCompactClosure : public HeapRegionClosure {
  75  protected:
  76   G1CollectedHeap* _g1h;
  77   ModRefBarrierSet* _mrbs;
  78   CompactPoint _cp;
  79   uint _humongous_regions_removed;
  80 
  81   virtual void prepare_for_compaction(HeapRegion* hr, HeapWord* end);
  82   void prepare_for_compaction_work(CompactPoint* cp, HeapRegion* hr, HeapWord* end);
  83   void free_humongous_region(HeapRegion* hr);
  84   bool is_cp_initialized() const { return _cp.space != NULL; }
  85 
  86  public:
  87   G1PrepareCompactClosure() :
  88     _g1h(G1CollectedHeap::heap()),
  89     _mrbs(_g1h->g1_barrier_set()),
  90     _humongous_regions_removed(0) { }
  91 
  92   void update_sets();
  93   bool doHeapRegion(HeapRegion* hr);








  94 };
  95 
  96 #endif // SHARE_VM_GC_G1_G1MARKSWEEP_HPP
< prev index next >