src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.hpp

Print this page
rev 4773 : 8005849: JEP 167: Event-Based JVM Tracing
Reviewed-by: acorn, coleenp, sla
Contributed-by: Karen Kinnear <karen.kinnear@oracle.com>, Bengt Rutisson <bengt.rutisson@oracle.com>, Calvin Cheung <calvin.cheung@oracle.com>, Erik Gahlin <erik.gahlin@oracle.com>, Erik Helin <erik.helin@oracle.com>, Jesper Wilhelmsson <jesper.wilhelmsson@oracle.com>, Keith McGuigan <keith.mcguigan@oracle.com>, Mattias Tobiasson <mattias.tobiasson@oracle.com>, Markus Gronlund <markus.gronlund@oracle.com>, Mikael Auno <mikael.auno@oracle.com>, Nils Eliasson <nils.eliasson@oracle.com>, Nils Loodin <nils.loodin@oracle.com>, Rickard Backman <rickard.backman@oracle.com>, Staffan Larsen <staffan.larsen@oracle.com>, Stefan Karlsson <stefan.karlsson@oracle.com>, Yekaterina Kantserova <yekaterina.kantserova@oracle.com>


  29 #include "gc_implementation/parallelScavenge/parMarkBitMap.hpp"
  30 #include "gc_implementation/parallelScavenge/psCompactionManager.hpp"
  31 #include "gc_implementation/shared/collectorCounters.hpp"
  32 #include "gc_implementation/shared/markSweep.hpp"
  33 #include "gc_implementation/shared/mutableSpace.hpp"
  34 #include "memory/sharedHeap.hpp"
  35 #include "oops/oop.hpp"
  36 
  37 class ParallelScavengeHeap;
  38 class PSAdaptiveSizePolicy;
  39 class PSYoungGen;
  40 class PSOldGen;
  41 class ParCompactionManager;
  42 class ParallelTaskTerminator;
  43 class PSParallelCompact;
  44 class GCTaskManager;
  45 class GCTaskQueue;
  46 class PreGCValues;
  47 class MoveAndUpdateClosure;
  48 class RefProcTaskExecutor;


  49 
  50 // The SplitInfo class holds the information needed to 'split' a source region
  51 // so that the live data can be copied to two destination *spaces*.  Normally,
  52 // all the live data in a region is copied to a single destination space (e.g.,
  53 // everything live in a region in eden is copied entirely into the old gen).
  54 // However, when the heap is nearly full, all the live data in eden may not fit
  55 // into the old gen.  Copying only some of the regions from eden to old gen
  56 // requires finding a region that does not contain a partial object (i.e., no
  57 // live object crosses the region boundary) somewhere near the last object that
  58 // does fit into the old gen.  Since it's not always possible to find such a
  59 // region, splitting is necessary for predictable behavior.
  60 //
  61 // A region is always split at the end of the partial object.  This avoids
  62 // additional tests when calculating the new location of a pointer, which is a
  63 // very hot code path.  The partial object and everything to its left will be
  64 // copied to another space (call it dest_space_1).  The live data to the right
  65 // of the partial object will be copied either within the space itself, or to a
  66 // different destination space (distinct from dest_space_1).
  67 //
  68 // Split points are identified during the summary phase, when region


 813     virtual void do_oop(oop* p);
 814     virtual void do_oop(narrowOop* p);
 815     // do not walk from thread stacks to the code cache on this phase
 816     virtual void do_code_blob(CodeBlob* cb) const { }
 817   };
 818 
 819   class AdjustKlassClosure : public KlassClosure {
 820    public:
 821     void do_klass(Klass* klass);
 822   };
 823 
 824   friend class KeepAliveClosure;
 825   friend class FollowStackClosure;
 826   friend class AdjustPointerClosure;
 827   friend class AdjustKlassClosure;
 828   friend class FollowKlassClosure;
 829   friend class InstanceClassLoaderKlass;
 830   friend class RefProcTaskProxy;
 831 
 832  private:


 833   static elapsedTimer         _accumulated_time;
 834   static unsigned int         _total_invocations;
 835   static unsigned int         _maximum_compaction_gc_num;
 836   static jlong                _time_of_last_gc;   // ms
 837   static CollectorCounters*   _counters;
 838   static ParMarkBitMap        _mark_bitmap;
 839   static ParallelCompactData  _summary_data;
 840   static IsAliveClosure       _is_alive_closure;
 841   static SpaceInfo            _space_info[last_space_id];
 842   static bool                 _print_phases;
 843   static AdjustPointerClosure _adjust_pointer_closure;
 844   static AdjustKlassClosure   _adjust_klass_closure;
 845 
 846   // Reference processing (used in ...follow_contents)
 847   static ReferenceProcessor*  _ref_processor;
 848 
 849   // Updated location of intArrayKlassObj.
 850   static Klass* _updated_int_array_klass_obj;
 851 
 852   // Values computed at initialization and used by dead_wood_limiter().


 856   static double _dwl_adjustment;
 857 #ifdef  ASSERT
 858   static bool   _dwl_initialized;
 859 #endif  // #ifdef ASSERT
 860 
 861  private:
 862 
 863   static void initialize_space_info();
 864 
 865   // Return true if details about individual phases should be printed.
 866   static inline bool print_phases();
 867 
 868   // Clear the marking bitmap and summary data that cover the specified space.
 869   static void clear_data_covering_space(SpaceId id);
 870 
 871   static void pre_compact(PreGCValues* pre_gc_values);
 872   static void post_compact();
 873 
 874   // Mark live objects
 875   static void marking_phase(ParCompactionManager* cm,
 876                             bool maximum_heap_compaction);

 877 
 878   template <class T>
 879   static inline void follow_root(ParCompactionManager* cm, T* p);
 880 
 881   // Compute the dense prefix for the designated space.  This is an experimental
 882   // implementation currently not used in production.
 883   static HeapWord* compute_dense_prefix_via_density(const SpaceId id,
 884                                                     bool maximum_compaction);
 885 
 886   // Methods used to compute the dense prefix.
 887 
 888   // Compute the value of the normal distribution at x = density.  The mean and
 889   // standard deviation are values saved by initialize_dead_wood_limiter().
 890   static inline double normal_distribution(double density);
 891 
 892   // Initialize the static vars used by dead_wood_limiter().
 893   static void initialize_dead_wood_limiter();
 894 
 895   // Return the percentage of space that can be treated as "dead wood" (i.e.,
 896   // not reclaimed).


1120   static void decrement_destination_counts(ParCompactionManager* cm,
1121                                            SpaceId src_space_id,
1122                                            size_t beg_region,
1123                                            HeapWord* end_addr);
1124 
1125   // Fill a region, copying objects from one or more source regions.
1126   static void fill_region(ParCompactionManager* cm, size_t region_idx);
1127   static void fill_and_update_region(ParCompactionManager* cm, size_t region) {
1128     fill_region(cm, region);
1129   }
1130 
1131   // Update the deferred objects in the space.
1132   static void update_deferred_objects(ParCompactionManager* cm, SpaceId id);
1133 
1134   static ParMarkBitMap* mark_bitmap() { return &_mark_bitmap; }
1135   static ParallelCompactData& summary_data() { return _summary_data; }
1136 
1137   // Reference Processing
1138   static ReferenceProcessor* const ref_processor() { return _ref_processor; }
1139 


1140   // Return the SpaceId for the given address.
1141   static SpaceId space_id(HeapWord* addr);
1142 
1143   // Time since last full gc (in milliseconds).
1144   static jlong millis_since_last_gc();
1145 
1146   static void print_on_error(outputStream* st);
1147 
1148 #ifndef PRODUCT
1149   // Debugging support.
1150   static const char* space_names[last_space_id];
1151   static void print_region_ranges();
1152   static void print_dense_prefix_stats(const char* const algorithm,
1153                                        const SpaceId id,
1154                                        const bool maximum_compaction,
1155                                        HeapWord* const addr);
1156   static void summary_phase_msg(SpaceId dst_space_id,
1157                                 HeapWord* dst_beg, HeapWord* dst_end,
1158                                 SpaceId src_space_id,
1159                                 HeapWord* src_beg, HeapWord* src_end);




  29 #include "gc_implementation/parallelScavenge/parMarkBitMap.hpp"
  30 #include "gc_implementation/parallelScavenge/psCompactionManager.hpp"
  31 #include "gc_implementation/shared/collectorCounters.hpp"
  32 #include "gc_implementation/shared/markSweep.hpp"
  33 #include "gc_implementation/shared/mutableSpace.hpp"
  34 #include "memory/sharedHeap.hpp"
  35 #include "oops/oop.hpp"
  36 
  37 class ParallelScavengeHeap;
  38 class PSAdaptiveSizePolicy;
  39 class PSYoungGen;
  40 class PSOldGen;
  41 class ParCompactionManager;
  42 class ParallelTaskTerminator;
  43 class PSParallelCompact;
  44 class GCTaskManager;
  45 class GCTaskQueue;
  46 class PreGCValues;
  47 class MoveAndUpdateClosure;
  48 class RefProcTaskExecutor;
  49 class ParallelOldTracer;
  50 class STWGCTimer;
  51 
  52 // The SplitInfo class holds the information needed to 'split' a source region
  53 // so that the live data can be copied to two destination *spaces*.  Normally,
  54 // all the live data in a region is copied to a single destination space (e.g.,
  55 // everything live in a region in eden is copied entirely into the old gen).
  56 // However, when the heap is nearly full, all the live data in eden may not fit
  57 // into the old gen.  Copying only some of the regions from eden to old gen
  58 // requires finding a region that does not contain a partial object (i.e., no
  59 // live object crosses the region boundary) somewhere near the last object that
  60 // does fit into the old gen.  Since it's not always possible to find such a
  61 // region, splitting is necessary for predictable behavior.
  62 //
  63 // A region is always split at the end of the partial object.  This avoids
  64 // additional tests when calculating the new location of a pointer, which is a
  65 // very hot code path.  The partial object and everything to its left will be
  66 // copied to another space (call it dest_space_1).  The live data to the right
  67 // of the partial object will be copied either within the space itself, or to a
  68 // different destination space (distinct from dest_space_1).
  69 //
  70 // Split points are identified during the summary phase, when region


 815     virtual void do_oop(oop* p);
 816     virtual void do_oop(narrowOop* p);
 817     // do not walk from thread stacks to the code cache on this phase
 818     virtual void do_code_blob(CodeBlob* cb) const { }
 819   };
 820 
 821   class AdjustKlassClosure : public KlassClosure {
 822    public:
 823     void do_klass(Klass* klass);
 824   };
 825 
 826   friend class KeepAliveClosure;
 827   friend class FollowStackClosure;
 828   friend class AdjustPointerClosure;
 829   friend class AdjustKlassClosure;
 830   friend class FollowKlassClosure;
 831   friend class InstanceClassLoaderKlass;
 832   friend class RefProcTaskProxy;
 833 
 834  private:
 835   static STWGCTimer           _gc_timer;
 836   static ParallelOldTracer    _gc_tracer;
 837   static elapsedTimer         _accumulated_time;
 838   static unsigned int         _total_invocations;
 839   static unsigned int         _maximum_compaction_gc_num;
 840   static jlong                _time_of_last_gc;   // ms
 841   static CollectorCounters*   _counters;
 842   static ParMarkBitMap        _mark_bitmap;
 843   static ParallelCompactData  _summary_data;
 844   static IsAliveClosure       _is_alive_closure;
 845   static SpaceInfo            _space_info[last_space_id];
 846   static bool                 _print_phases;
 847   static AdjustPointerClosure _adjust_pointer_closure;
 848   static AdjustKlassClosure   _adjust_klass_closure;
 849 
 850   // Reference processing (used in ...follow_contents)
 851   static ReferenceProcessor*  _ref_processor;
 852 
 853   // Updated location of intArrayKlassObj.
 854   static Klass* _updated_int_array_klass_obj;
 855 
 856   // Values computed at initialization and used by dead_wood_limiter().


 860   static double _dwl_adjustment;
 861 #ifdef  ASSERT
 862   static bool   _dwl_initialized;
 863 #endif  // #ifdef ASSERT
 864 
 865  private:
 866 
 867   static void initialize_space_info();
 868 
 869   // Return true if details about individual phases should be printed.
 870   static inline bool print_phases();
 871 
 872   // Clear the marking bitmap and summary data that cover the specified space.
 873   static void clear_data_covering_space(SpaceId id);
 874 
 875   static void pre_compact(PreGCValues* pre_gc_values);
 876   static void post_compact();
 877 
 878   // Mark live objects
 879   static void marking_phase(ParCompactionManager* cm,
 880                             bool maximum_heap_compaction,
 881                             ParallelOldTracer *gc_tracer);
 882 
 883   template <class T>
 884   static inline void follow_root(ParCompactionManager* cm, T* p);
 885 
 886   // Compute the dense prefix for the designated space.  This is an experimental
 887   // implementation currently not used in production.
 888   static HeapWord* compute_dense_prefix_via_density(const SpaceId id,
 889                                                     bool maximum_compaction);
 890 
 891   // Methods used to compute the dense prefix.
 892 
 893   // Compute the value of the normal distribution at x = density.  The mean and
 894   // standard deviation are values saved by initialize_dead_wood_limiter().
 895   static inline double normal_distribution(double density);
 896 
 897   // Initialize the static vars used by dead_wood_limiter().
 898   static void initialize_dead_wood_limiter();
 899 
 900   // Return the percentage of space that can be treated as "dead wood" (i.e.,
 901   // not reclaimed).


1125   static void decrement_destination_counts(ParCompactionManager* cm,
1126                                            SpaceId src_space_id,
1127                                            size_t beg_region,
1128                                            HeapWord* end_addr);
1129 
1130   // Fill a region, copying objects from one or more source regions.
1131   static void fill_region(ParCompactionManager* cm, size_t region_idx);
1132   static void fill_and_update_region(ParCompactionManager* cm, size_t region) {
1133     fill_region(cm, region);
1134   }
1135 
1136   // Update the deferred objects in the space.
1137   static void update_deferred_objects(ParCompactionManager* cm, SpaceId id);
1138 
1139   static ParMarkBitMap* mark_bitmap() { return &_mark_bitmap; }
1140   static ParallelCompactData& summary_data() { return _summary_data; }
1141 
1142   // Reference Processing
1143   static ReferenceProcessor* const ref_processor() { return _ref_processor; }
1144 
1145   static STWGCTimer* gc_timer() { return &_gc_timer; }
1146 
1147   // Return the SpaceId for the given address.
1148   static SpaceId space_id(HeapWord* addr);
1149 
1150   // Time since last full gc (in milliseconds).
1151   static jlong millis_since_last_gc();
1152 
1153   static void print_on_error(outputStream* st);
1154 
1155 #ifndef PRODUCT
1156   // Debugging support.
1157   static const char* space_names[last_space_id];
1158   static void print_region_ranges();
1159   static void print_dense_prefix_stats(const char* const algorithm,
1160                                        const SpaceId id,
1161                                        const bool maximum_compaction,
1162                                        HeapWord* const addr);
1163   static void summary_phase_msg(SpaceId dst_space_id,
1164                                 HeapWord* dst_beg, HeapWord* dst_end,
1165                                 SpaceId src_space_id,
1166                                 HeapWord* src_beg, HeapWord* src_end);