< prev index next >

src/hotspot/share/gc/shared/generation.hpp

Print this page
rev 60257 : [mq]: 8248401-unify-millis-since-last-gc


  58 class OopsInGenClosure;
  59 class OopClosure;
  60 class ScanClosure;
  61 class FastScanClosure;
  62 class GenCollectedHeap;
  63 class GCStats;
  64 
  65 // A "ScratchBlock" represents a block of memory in one generation usable by
  66 // another.  It represents "num_words" free words, starting at and including
  67 // the address of "this".
  68 struct ScratchBlock {
  69   ScratchBlock* next;
  70   size_t num_words;
  71   HeapWord scratch_space[1];  // Actually, of size "num_words-2" (assuming
  72                               // first two fields are word-sized.)
  73 };
  74 
  75 class Generation: public CHeapObj<mtGC> {
  76   friend class VMStructs;
  77  private:
  78   jlong _time_of_last_gc; // time when last gc on this generation happened (ms)
  79   MemRegion _prev_used_region; // for collectors that want to "remember" a value for
  80                                // used region at some specific point during collection.
  81 
  82   GCMemoryManager* _gc_manager;
  83 
  84  protected:
  85   // Minimum and maximum addresses for memory reserved (not necessarily
  86   // committed) for generation.
  87   // Used by card marking code. Must not overlap with address ranges of
  88   // other generations.
  89   MemRegion _reserved;
  90 
  91   // Memory area reserved for generation
  92   VirtualSpace _virtual_space;
  93 
  94   // ("Weak") Reference processing support
  95   SpanSubjectToDiscoveryClosure _span_based_discoverer;
  96   ReferenceProcessor* _ref_processor;
  97 
  98   // Performance Counters


 345   // "oop" (initializing the allocated block). If the allocation is
 346   // still unsuccessful, return "NULL".
 347   virtual HeapWord* expand_and_allocate(size_t word_size,
 348                                         bool is_tlab,
 349                                         bool parallel = false) = 0;
 350 
 351   // Some generations may require some cleanup or preparation actions before
 352   // allowing a collection.  The default is to do nothing.
 353   virtual void gc_prologue(bool full) {}
 354 
 355   // Some generations may require some cleanup actions after a collection.
 356   // The default is to do nothing.
 357   virtual void gc_epilogue(bool full) {}
 358 
 359   // Save the high water marks for the used space in a generation.
 360   virtual void record_spaces_top() {}
 361 
 362   // Some generations may need to be "fixed-up" after some allocation
 363   // activity to make them parsable again. The default is to do nothing.
 364   virtual void ensure_parsability() {}
 365 
 366   // Time (in ms) when we were last collected or now if a collection is
 367   // in progress.
 368   virtual jlong time_of_last_gc(jlong now) {
 369     // Both _time_of_last_gc and now are set using a time source
 370     // that guarantees monotonically non-decreasing values provided
 371     // the underlying platform provides such a source. So we still
 372     // have to guard against non-monotonicity.
 373     NOT_PRODUCT(
 374       if (now < _time_of_last_gc) {
 375         log_warning(gc)("time warp: " JLONG_FORMAT " to " JLONG_FORMAT, _time_of_last_gc, now);
 376       }
 377     )
 378     return _time_of_last_gc;
 379   }
 380 
 381   virtual void update_time_of_last_gc(jlong now)  {
 382     _time_of_last_gc = now;
 383   }
 384 
 385   // Generations may keep statistics about collection. This method
 386   // updates those statistics. current_generation is the generation
 387   // that was most recently collected. This allows the generation to
 388   // decide what statistics are valid to collect. For example, the
 389   // generation can decide to gather the amount of promoted data if
 390   // the collection of the young generation has completed.
 391   GCStats* gc_stats() const { return _gc_stats; }
 392   virtual void update_gc_stats(Generation* current_generation, bool full) {}
 393 
 394 #if INCLUDE_SERIALGC
 395   // Mark sweep support phase2
 396   virtual void prepare_for_compaction(CompactPoint* cp);
 397   // Mark sweep support phase3
 398   virtual void adjust_pointers();
 399   // Mark sweep support phase4
 400   virtual void compact();
 401   virtual void post_compact() { ShouldNotReachHere(); }
 402 #endif
 403 




  58 class OopsInGenClosure;
  59 class OopClosure;
  60 class ScanClosure;
  61 class FastScanClosure;
  62 class GenCollectedHeap;
  63 class GCStats;
  64 
  65 // A "ScratchBlock" represents a block of memory in one generation usable by
  66 // another.  It represents "num_words" free words, starting at and including
  67 // the address of "this".
  68 struct ScratchBlock {
  69   ScratchBlock* next;
  70   size_t num_words;
  71   HeapWord scratch_space[1];  // Actually, of size "num_words-2" (assuming
  72                               // first two fields are word-sized.)
  73 };
  74 
  75 class Generation: public CHeapObj<mtGC> {
  76   friend class VMStructs;
  77  private:

  78   MemRegion _prev_used_region; // for collectors that want to "remember" a value for
  79                                // used region at some specific point during collection.
  80 
  81   GCMemoryManager* _gc_manager;
  82 
  83  protected:
  84   // Minimum and maximum addresses for memory reserved (not necessarily
  85   // committed) for generation.
  86   // Used by card marking code. Must not overlap with address ranges of
  87   // other generations.
  88   MemRegion _reserved;
  89 
  90   // Memory area reserved for generation
  91   VirtualSpace _virtual_space;
  92 
  93   // ("Weak") Reference processing support
  94   SpanSubjectToDiscoveryClosure _span_based_discoverer;
  95   ReferenceProcessor* _ref_processor;
  96 
  97   // Performance Counters


 344   // "oop" (initializing the allocated block). If the allocation is
 345   // still unsuccessful, return "NULL".
 346   virtual HeapWord* expand_and_allocate(size_t word_size,
 347                                         bool is_tlab,
 348                                         bool parallel = false) = 0;
 349 
 350   // Some generations may require some cleanup or preparation actions before
 351   // allowing a collection.  The default is to do nothing.
 352   virtual void gc_prologue(bool full) {}
 353 
 354   // Some generations may require some cleanup actions after a collection.
 355   // The default is to do nothing.
 356   virtual void gc_epilogue(bool full) {}
 357 
 358   // Save the high water marks for the used space in a generation.
 359   virtual void record_spaces_top() {}
 360 
 361   // Some generations may need to be "fixed-up" after some allocation
 362   // activity to make them parsable again. The default is to do nothing.
 363   virtual void ensure_parsability() {}



















 364 
 365   // Generations may keep statistics about collection. This method
 366   // updates those statistics. current_generation is the generation
 367   // that was most recently collected. This allows the generation to
 368   // decide what statistics are valid to collect. For example, the
 369   // generation can decide to gather the amount of promoted data if
 370   // the collection of the young generation has completed.
 371   GCStats* gc_stats() const { return _gc_stats; }
 372   virtual void update_gc_stats(Generation* current_generation, bool full) {}
 373 
 374 #if INCLUDE_SERIALGC
 375   // Mark sweep support phase2
 376   virtual void prepare_for_compaction(CompactPoint* cp);
 377   // Mark sweep support phase3
 378   virtual void adjust_pointers();
 379   // Mark sweep support phase4
 380   virtual void compact();
 381   virtual void post_compact() { ShouldNotReachHere(); }
 382 #endif
 383 


< prev index next >