< prev index next >

src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.hpp

Print this page
rev 7318 : imported patch foreground
rev 7319 : [mq]: foreground-review-stefank
rev 7320 : [mq]: foreground-review-kim


1103 
1104   // Grow generation by specified size (returns false if unable to grow)
1105   bool grow_by(size_t bytes);
1106   // Grow generation to reserved size.
1107   bool grow_to_reserved();
1108 
1109   void clear_expansion_cause() { _expansion_cause = CMSExpansionCause::_no_expansion; }
1110 
1111   // Space enquiries
1112   size_t capacity() const;
1113   size_t used() const;
1114   size_t free() const;
1115   double occupancy() const { return ((double)used())/((double)capacity()); }
1116   size_t contiguous_available() const;
1117   size_t unsafe_max_alloc_nogc() const;
1118 
1119   // over-rides
1120   MemRegion used_region() const;
1121   MemRegion used_region_at_save_marks() const;
1122 




1123   virtual bool full_collects_younger_generations() const {
1124     return !ScavengeBeforeFullGC;
1125   }
1126 
1127   void space_iterate(SpaceClosure* blk, bool usedOnly = false);
1128 
1129   // Support for compaction
1130   CompactibleSpace* first_compaction_space() const;
1131   // Adjust quantities in the generation affected by
1132   // the compaction.
1133   void reset_after_compaction();
1134 
1135   // Allocation support
1136   HeapWord* allocate(size_t size, bool tlab);
1137   HeapWord* have_lock_and_allocate(size_t size, bool tlab);
1138   oop       promote(oop obj, size_t obj_size);
1139   HeapWord* par_allocate(size_t size, bool tlab) {
1140     return allocate(size, tlab);
1141   }
1142 




1103 
1104   // Grow generation by specified size (returns false if unable to grow)
1105   bool grow_by(size_t bytes);
1106   // Grow generation to reserved size.
1107   bool grow_to_reserved();
1108 
1109   void clear_expansion_cause() { _expansion_cause = CMSExpansionCause::_no_expansion; }
1110 
1111   // Space enquiries
1112   size_t capacity() const;
1113   size_t used() const;
1114   size_t free() const;
1115   double occupancy() const { return ((double)used())/((double)capacity()); }
1116   size_t contiguous_available() const;
1117   size_t unsafe_max_alloc_nogc() const;
1118 
1119   // over-rides
1120   MemRegion used_region() const;
1121   MemRegion used_region_at_save_marks() const;
1122 
1123   // Does a "full" (forced) collection invoked on this generation collect
1124   // all younger generations as well? Note that the second conjunct is a
1125   // hack to allow the collection of the younger gen first if the flag is
1126   // set.
1127   virtual bool full_collects_younger_generations() const {
1128     return !ScavengeBeforeFullGC;
1129   }
1130 
1131   void space_iterate(SpaceClosure* blk, bool usedOnly = false);
1132 
1133   // Support for compaction
1134   CompactibleSpace* first_compaction_space() const;
1135   // Adjust quantities in the generation affected by
1136   // the compaction.
1137   void reset_after_compaction();
1138 
1139   // Allocation support
1140   HeapWord* allocate(size_t size, bool tlab);
1141   HeapWord* have_lock_and_allocate(size_t size, bool tlab);
1142   oop       promote(oop obj, size_t obj_size);
1143   HeapWord* par_allocate(size_t size, bool tlab) {
1144     return allocate(size, tlab);
1145   }
1146 


< prev index next >