< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp

Print this page
rev 50076 : Fold Partial GC into Traversal GC


 204   static size_t RegionSizeWordsShift;
 205   static size_t RegionSizeBytesMask;
 206   static size_t RegionSizeWordsMask;
 207   static size_t HumongousThresholdBytes;
 208   static size_t HumongousThresholdWords;
 209   static size_t MaxTLABSizeBytes;
 210 
 211   // Global alloaction counter, increased for each allocation
 212   // under Shenandoah heap lock
 213   static uint64_t AllocSeqNum;
 214 
 215   ShenandoahHeap* _heap;
 216   size_t _region_number;
 217   volatile size_t _live_data;
 218   MemRegion _reserved;
 219 
 220   size_t _tlab_allocs;
 221   size_t _gclab_allocs;
 222   size_t _shared_allocs;
 223 
 224   bool _root;
 225 
 226   HeapWord* _new_top;
 227 
 228   size_t _critical_pins;
 229 
 230   // Seq numbers are used to drive heuristics decisions for collection.
 231   // They are set when the region is used for allocation.
 232   uint64_t  _seqnum_first_alloc_mutator;
 233   uint64_t  _seqnum_first_alloc_gc;
 234   uint64_t  _seqnum_last_alloc_mutator;
 235   uint64_t  _seqnum_last_alloc_gc;
 236 
 237   RegionState _state;
 238   double _empty_time;
 239 
 240   // If the region has been initially committed. It has been committed before
 241   // it can be idled
 242   bool   _initialized;
 243 
 244   ShenandoahPacer* _pacer;
 245 


 358 
 359   HeapWord* block_start_const(const void* p) const;
 360 
 361   // Just before GC we need to fill the current region.
 362   void fill_region();
 363 
 364   bool in_collection_set() const;
 365 
 366   // Find humongous start region that this region belongs to
 367   ShenandoahHeapRegion* humongous_start_region() const;
 368 
 369   virtual CompactibleSpace* next_compaction_space() const;
 370 
 371   // Override for scan_and_forward support.
 372   void prepare_for_compaction(CompactPoint* cp);
 373   void adjust_pointers();
 374   void compact();
 375 
 376   void set_new_top(HeapWord* new_top) { _new_top = new_top; }
 377   HeapWord* new_top() const { return _new_top; }
 378 
 379   void set_root(bool r) {
 380     _root = r;
 381   }
 382   bool is_root() const {
 383     return _root;
 384   }
 385 
 386   inline void adjust_alloc_metadata(ShenandoahHeap::AllocType type, size_t);
 387   void reset_alloc_metadata_to_shared();
 388   void reset_alloc_metadata();
 389   size_t get_shared_allocs() const;
 390   size_t get_tlab_allocs() const;
 391   size_t get_gclab_allocs() const;
 392 
 393   uint64_t seqnum_first_alloc() const {
 394     if (_seqnum_first_alloc_mutator == 0) return _seqnum_first_alloc_gc;
 395     if (_seqnum_first_alloc_gc == 0)      return _seqnum_first_alloc_mutator;
 396     return MIN2(_seqnum_first_alloc_mutator, _seqnum_first_alloc_gc);
 397   }
 398 
 399   uint64_t seqnum_last_alloc() const {
 400     return MAX2(_seqnum_last_alloc_mutator, _seqnum_last_alloc_gc);
 401   }
 402 
 403   uint64_t seqnum_first_alloc_mutator() const {
 404     return _seqnum_first_alloc_mutator;




 204   static size_t RegionSizeWordsShift;
 205   static size_t RegionSizeBytesMask;
 206   static size_t RegionSizeWordsMask;
 207   static size_t HumongousThresholdBytes;
 208   static size_t HumongousThresholdWords;
 209   static size_t MaxTLABSizeBytes;
 210 
 211   // Global alloaction counter, increased for each allocation
 212   // under Shenandoah heap lock
 213   static uint64_t AllocSeqNum;
 214 
 215   ShenandoahHeap* _heap;
 216   size_t _region_number;
 217   volatile size_t _live_data;
 218   MemRegion _reserved;
 219 
 220   size_t _tlab_allocs;
 221   size_t _gclab_allocs;
 222   size_t _shared_allocs;
 223 


 224   HeapWord* _new_top;
 225 
 226   size_t _critical_pins;
 227 
 228   // Seq numbers are used to drive heuristics decisions for collection.
 229   // They are set when the region is used for allocation.
 230   uint64_t  _seqnum_first_alloc_mutator;
 231   uint64_t  _seqnum_first_alloc_gc;
 232   uint64_t  _seqnum_last_alloc_mutator;
 233   uint64_t  _seqnum_last_alloc_gc;
 234 
 235   RegionState _state;
 236   double _empty_time;
 237 
 238   // If the region has been initially committed. It has been committed before
 239   // it can be idled
 240   bool   _initialized;
 241 
 242   ShenandoahPacer* _pacer;
 243 


 356 
 357   HeapWord* block_start_const(const void* p) const;
 358 
 359   // Just before GC we need to fill the current region.
 360   void fill_region();
 361 
 362   bool in_collection_set() const;
 363 
 364   // Find humongous start region that this region belongs to
 365   ShenandoahHeapRegion* humongous_start_region() const;
 366 
 367   virtual CompactibleSpace* next_compaction_space() const;
 368 
 369   // Override for scan_and_forward support.
 370   void prepare_for_compaction(CompactPoint* cp);
 371   void adjust_pointers();
 372   void compact();
 373 
 374   void set_new_top(HeapWord* new_top) { _new_top = new_top; }
 375   HeapWord* new_top() const { return _new_top; }







 376 
 377   inline void adjust_alloc_metadata(ShenandoahHeap::AllocType type, size_t);
 378   void reset_alloc_metadata_to_shared();
 379   void reset_alloc_metadata();
 380   size_t get_shared_allocs() const;
 381   size_t get_tlab_allocs() const;
 382   size_t get_gclab_allocs() const;
 383 
 384   uint64_t seqnum_first_alloc() const {
 385     if (_seqnum_first_alloc_mutator == 0) return _seqnum_first_alloc_gc;
 386     if (_seqnum_first_alloc_gc == 0)      return _seqnum_first_alloc_mutator;
 387     return MIN2(_seqnum_first_alloc_mutator, _seqnum_first_alloc_gc);
 388   }
 389 
 390   uint64_t seqnum_last_alloc() const {
 391     return MAX2(_seqnum_last_alloc_mutator, _seqnum_last_alloc_gc);
 392   }
 393 
 394   uint64_t seqnum_first_alloc_mutator() const {
 395     return _seqnum_first_alloc_mutator;


< prev index next >