< prev index next >

src/share/vm/gc/g1/g1MarkSweep.hpp

Print this page
rev 8461 : imported patch webrev1.patch
rev 8462 : [mq]: version3


  38 class ReferenceProcessor;
  39 
  40 // G1MarkSweep takes care of global mark-compact garbage collection for a
  41 // G1CollectedHeap using a four-phase pointer forwarding algorithm.  All
  42 // generations are assumed to support marking; those that can also support
  43 // compaction.
  44 //
  45 // Class unloading will only occur when a full gc is invoked.
  46 class G1PrepareCompactClosure;
  47 class G1ArchiveRegionMap;
  48 
  49 class G1MarkSweep : AllStatic {
  50  public:
  51 
  52   static void invoke_at_safepoint(ReferenceProcessor* rp,
  53                                   bool clear_all_softrefs);
  54 
  55   static STWGCTimer* gc_timer() { return GenMarkSweep::_gc_timer; }
  56   static SerialOldTracer* gc_tracer() { return GenMarkSweep::_gc_tracer; }
  57 
  58   // Support for 'archive' objects, to prevent objects in archive regions
  59   // from being marked by full GCs.
  60   static void enable_archive_object_check();
  61   static void mark_range_archive(HeapWord* start, HeapWord* end);




  62   static bool in_archive_range(oop object);



  63   static bool archive_check_enabled() { return G1MarkSweep::_archive_check_enabled; }
  64 
  65  private:
  66   static bool _archive_check_enabled;
  67   static G1ArchiveRegionMap  _archive_region_map;
  68 
  69   // Mark live objects
  70   static void mark_sweep_phase1(bool& marked_for_deopt,
  71                                 bool clear_all_softrefs);
  72   // Calculate new addresses
  73   static void mark_sweep_phase2();
  74   // Update pointers
  75   static void mark_sweep_phase3();
  76   // Move objects to new positions
  77   static void mark_sweep_phase4();
  78 
  79   static void allocate_stacks();
  80   static void prepare_compaction();
  81   static void prepare_compaction_work(G1PrepareCompactClosure* blk);
  82 };


  86   G1CollectedHeap* _g1h;
  87   ModRefBarrierSet* _mrbs;
  88   CompactPoint _cp;
  89   HeapRegionSetCount _humongous_regions_removed;
  90 
  91   virtual void prepare_for_compaction(HeapRegion* hr, HeapWord* end);
  92   void prepare_for_compaction_work(CompactPoint* cp, HeapRegion* hr, HeapWord* end);
  93   void free_humongous_region(HeapRegion* hr);
  94   bool is_cp_initialized() const { return _cp.space != NULL; }
  95 
  96  public:
  97   G1PrepareCompactClosure() :
  98     _g1h(G1CollectedHeap::heap()),
  99     _mrbs(_g1h->g1_barrier_set()),
 100     _humongous_regions_removed() { }
 101 
 102   void update_sets();
 103   bool doHeapRegion(HeapRegion* hr);
 104 };
 105 



 106 class G1ArchiveRegionMap : public G1BiasedMappedArray<bool> {
 107 protected:
 108   bool default_value() const { return false; }
 109 };
 110 
 111 #endif // SHARE_VM_GC_G1_G1MARKSWEEP_HPP


  38 class ReferenceProcessor;
  39 
  40 // G1MarkSweep takes care of global mark-compact garbage collection for a
  41 // G1CollectedHeap using a four-phase pointer forwarding algorithm.  All
  42 // generations are assumed to support marking; those that can also support
  43 // compaction.
  44 //
  45 // Class unloading will only occur when a full gc is invoked.
  46 class G1PrepareCompactClosure;
  47 class G1ArchiveRegionMap;
  48 
  49 class G1MarkSweep : AllStatic {
  50  public:
  51 
  52   static void invoke_at_safepoint(ReferenceProcessor* rp,
  53                                   bool clear_all_softrefs);
  54 
  55   static STWGCTimer* gc_timer() { return GenMarkSweep::_gc_timer; }
  56   static SerialOldTracer* gc_tracer() { return GenMarkSweep::_gc_tracer; }
  57 
  58   // Create the _archive_region_map which is used to identify archive objects.

  59   static void enable_archive_object_check();
  60 
  61   // Mark the regions containing the specified address range as archive regions.
  62   static void mark_range_archive(MemRegion range);
  63 
  64   // Check if an object is in an archive region using the _archive_region_map.
  65   static bool in_archive_range(oop object);
  66 
  67   // Check if archive object checking is enabled, to avoid calling in_archive_range
  68   // unnecessarily.
  69   static bool archive_check_enabled() { return G1MarkSweep::_archive_check_enabled; }
  70 
  71  private:
  72   static bool _archive_check_enabled;
  73   static G1ArchiveRegionMap  _archive_region_map;
  74 
  75   // Mark live objects
  76   static void mark_sweep_phase1(bool& marked_for_deopt,
  77                                 bool clear_all_softrefs);
  78   // Calculate new addresses
  79   static void mark_sweep_phase2();
  80   // Update pointers
  81   static void mark_sweep_phase3();
  82   // Move objects to new positions
  83   static void mark_sweep_phase4();
  84 
  85   static void allocate_stacks();
  86   static void prepare_compaction();
  87   static void prepare_compaction_work(G1PrepareCompactClosure* blk);
  88 };


  92   G1CollectedHeap* _g1h;
  93   ModRefBarrierSet* _mrbs;
  94   CompactPoint _cp;
  95   HeapRegionSetCount _humongous_regions_removed;
  96 
  97   virtual void prepare_for_compaction(HeapRegion* hr, HeapWord* end);
  98   void prepare_for_compaction_work(CompactPoint* cp, HeapRegion* hr, HeapWord* end);
  99   void free_humongous_region(HeapRegion* hr);
 100   bool is_cp_initialized() const { return _cp.space != NULL; }
 101 
 102  public:
 103   G1PrepareCompactClosure() :
 104     _g1h(G1CollectedHeap::heap()),
 105     _mrbs(_g1h->g1_barrier_set()),
 106     _humongous_regions_removed() { }
 107 
 108   void update_sets();
 109   bool doHeapRegion(HeapRegion* hr);
 110 };
 111 
 112 // G1ArchiveRegionMap is a boolean array used to mark G1 regions as 
 113 // archive regions.  This allows a quick check for whether an object
 114 // should not be marked because it is in an archive region.
 115 class G1ArchiveRegionMap : public G1BiasedMappedArray<bool> {
 116 protected:
 117   bool default_value() const { return false; }
 118 };
 119 
 120 #endif // SHARE_VM_GC_G1_G1MARKSWEEP_HPP
< prev index next >