< prev index next >

src/hotspot/share/gc/g1/g1ParScanThreadState.hpp

Print this page
rev 56830 : imported patch 8220311.sur.2
rev 56831 : imported patch 8220311.sur.3
rev 56832 : imported patch 8220311.sur.4
rev 56834 : imported patch 8220312.stat.2
rev 56836 : imported patch 8220312.stat.4


  78   // Indicates whether in the last generation (old) there is no more space
  79   // available for allocation.
  80   bool _old_gen_is_full;
  81 
  82 #define PADDING_ELEM_NUM (DEFAULT_CACHE_LINE_SIZE / sizeof(size_t))
  83 
  84   G1RedirtyCardsQueue& redirty_cards_queue()     { return _rdcq; }
  85   G1CardTable* ct()                              { return _ct; }
  86 
  87   G1HeapRegionAttr dest(G1HeapRegionAttr original) const {
  88     assert(original.is_valid(),
  89            "Original region attr invalid: %s", original.get_type_str());
  90     assert(_dest[original.type()].is_valid_gen(),
  91            "Dest region attr is invalid: %s", _dest[original.type()].get_type_str());
  92     return _dest[original.type()];
  93   }
  94 
  95   size_t _num_optional_regions;
  96   G1OopStarChunkedList* _oops_into_optional_regions;
  97 







  98 public:
  99   G1ParScanThreadState(G1CollectedHeap* g1h,
 100                        G1RedirtyCardsQueueSet* rdcqs,
 101                        uint worker_id,
 102                        size_t young_cset_length,
 103                        size_t optional_cset_length);
 104   virtual ~G1ParScanThreadState();
 105 
 106   void set_ref_discoverer(ReferenceDiscoverer* rd) { _scanner.set_ref_discoverer(rd); }
 107 
 108 #ifdef ASSERT
 109   bool queue_is_empty() const { return _refs->is_empty(); }
 110 
 111   bool verify_ref(narrowOop* ref) const;
 112   bool verify_ref(oop* ref) const;
 113   bool verify_task(StarTask ref) const;
 114 #endif // ASSERT
 115 
 116   template <class T> void do_oop_ext(T* ref);
 117   template <class T> void push_on_queue(T* ref);


 190   // allocate into dest. Previous_plab_refill_failed indicates whether previous
 191   // PLAB refill for the original (source) object failed.
 192   // Returns a non-NULL pointer if successful, and updates dest if required.
 193   // Also determines whether we should continue to try to allocate into the various
 194   // generations or just end trying to allocate.
 195   HeapWord* allocate_in_next_plab(G1HeapRegionAttr* dest,
 196                                   size_t word_sz,
 197                                   bool previous_plab_refill_failed,
 198                                   uint node_index);
 199 
 200   inline G1HeapRegionAttr next_region_attr(G1HeapRegionAttr const region_attr, markWord const m, uint& age);
 201 
 202   void report_promotion_event(G1HeapRegionAttr const dest_attr,
 203                               oop const old, size_t word_sz, uint age,
 204                               HeapWord * const obj_ptr, uint node_index) const;
 205 
 206   inline bool needs_partial_trimming() const;
 207   inline bool is_partially_trimmed() const;
 208 
 209   inline void trim_queue_to_threshold(uint threshold);






 210 public:
 211   oop copy_to_survivor_space(G1HeapRegionAttr const region_attr, oop const obj, markWord const old_mark);
 212 
 213   void trim_queue();
 214   void trim_queue_partially();
 215 
 216   Tickspan trim_ticks() const;
 217   void reset_trim_ticks();
 218 
 219   inline void steal_and_trim_queue(RefToScanQueueSet *task_queues);
 220 
 221   // An attempt to evacuate "obj" has failed; take necessary steps.
 222   oop handle_evacuation_failure_par(oop obj, markWord m);
 223 
 224   template <typename T>
 225   inline void remember_root_into_optional_region(T* p);
 226   template <typename T>
 227   inline void remember_reference_into_optional_region(T* p);
 228 
 229   inline G1OopStarChunkedList* oops_into_optional_region(const HeapRegion* hr);




  78   // Indicates whether in the last generation (old) there is no more space
  79   // available for allocation.
  80   bool _old_gen_is_full;
  81 
  82 #define PADDING_ELEM_NUM (DEFAULT_CACHE_LINE_SIZE / sizeof(size_t))
  83 
  84   G1RedirtyCardsQueue& redirty_cards_queue()     { return _rdcq; }
  85   G1CardTable* ct()                              { return _ct; }
  86 
  87   G1HeapRegionAttr dest(G1HeapRegionAttr original) const {
  88     assert(original.is_valid(),
  89            "Original region attr invalid: %s", original.get_type_str());
  90     assert(_dest[original.type()].is_valid_gen(),
  91            "Dest region attr is invalid: %s", _dest[original.type()].get_type_str());
  92     return _dest[original.type()];
  93   }
  94 
  95   size_t _num_optional_regions;
  96   G1OopStarChunkedList* _oops_into_optional_regions;
  97 
  98   G1NUMA* _numa;
  99 
 100   // Records how many object allocations happened at each node during copy to survivor.
 101   // Only starts recording when log of gc+heap+numa is enabled and its data is
 102   // transferred when flushed.
 103   size_t* _obj_alloc_stat;
 104 
 105 public:
 106   G1ParScanThreadState(G1CollectedHeap* g1h,
 107                        G1RedirtyCardsQueueSet* rdcqs,
 108                        uint worker_id,
 109                        size_t young_cset_length,
 110                        size_t optional_cset_length);
 111   virtual ~G1ParScanThreadState();
 112 
 113   void set_ref_discoverer(ReferenceDiscoverer* rd) { _scanner.set_ref_discoverer(rd); }
 114 
 115 #ifdef ASSERT
 116   bool queue_is_empty() const { return _refs->is_empty(); }
 117 
 118   bool verify_ref(narrowOop* ref) const;
 119   bool verify_ref(oop* ref) const;
 120   bool verify_task(StarTask ref) const;
 121 #endif // ASSERT
 122 
 123   template <class T> void do_oop_ext(T* ref);
 124   template <class T> void push_on_queue(T* ref);


 197   // allocate into dest. Previous_plab_refill_failed indicates whether previous
 198   // PLAB refill for the original (source) object failed.
 199   // Returns a non-NULL pointer if successful, and updates dest if required.
 200   // Also determines whether we should continue to try to allocate into the various
 201   // generations or just end trying to allocate.
 202   HeapWord* allocate_in_next_plab(G1HeapRegionAttr* dest,
 203                                   size_t word_sz,
 204                                   bool previous_plab_refill_failed,
 205                                   uint node_index);
 206 
 207   inline G1HeapRegionAttr next_region_attr(G1HeapRegionAttr const region_attr, markWord const m, uint& age);
 208 
 209   void report_promotion_event(G1HeapRegionAttr const dest_attr,
 210                               oop const old, size_t word_sz, uint age,
 211                               HeapWord * const obj_ptr, uint node_index) const;
 212 
 213   inline bool needs_partial_trimming() const;
 214   inline bool is_partially_trimmed() const;
 215 
 216   inline void trim_queue_to_threshold(uint threshold);
 217 
 218   // NUMA statistics related methods.
 219   inline void initialize_numa_stats();
 220   inline void flush_numa_stats();
 221   inline void update_numa_stats(uint node_index);
 222 
 223 public:
 224   oop copy_to_survivor_space(G1HeapRegionAttr const region_attr, oop const obj, markWord const old_mark);
 225 
 226   void trim_queue();
 227   void trim_queue_partially();
 228 
 229   Tickspan trim_ticks() const;
 230   void reset_trim_ticks();
 231 
 232   inline void steal_and_trim_queue(RefToScanQueueSet *task_queues);
 233 
 234   // An attempt to evacuate "obj" has failed; take necessary steps.
 235   oop handle_evacuation_failure_par(oop obj, markWord m);
 236 
 237   template <typename T>
 238   inline void remember_root_into_optional_region(T* p);
 239   template <typename T>
 240   inline void remember_reference_into_optional_region(T* p);
 241 
 242   inline G1OopStarChunkedList* oops_into_optional_region(const HeapRegion* hr);


< prev index next >