< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.hpp

Print this page




 217 
 218   // We may support a shared contiguous allocation area, if the youngest
 219   // generation does.
 220   bool supports_inline_contig_alloc() const;
 221   HeapWord* volatile* top_addr() const;
 222   HeapWord** end_addr() const;
 223 
 224   // Perform a full collection of the heap; intended for use in implementing
 225   // "System.gc". This implies as full a collection as the CollectedHeap
 226   // supports. Caller does not hold the Heap_lock on entry.
 227   virtual void collect(GCCause::Cause cause);
 228 
 229   // The same as above but assume that the caller holds the Heap_lock.
 230   void collect_locked(GCCause::Cause cause);
 231 
 232   // Perform a full collection of generations up to and including max_generation.
 233   // Mostly used for testing purposes. Caller does not hold the Heap_lock on entry.
 234   void collect(GCCause::Cause cause, GenerationType max_generation);
 235 
 236   // Returns "TRUE" iff "p" points into the committed areas of the heap.
 237   // The methods is_in(), is_in_closed_subset() and is_in_youngest() may
 238   // be expensive to compute in general, so, to prevent
 239   // their inadvertent use in product jvm's, we restrict their use to
 240   // assertion checking or verification only.
 241   bool is_in(const void* p) const;
 242 
 243   // Returns true if the reference is to an object in the reserved space
 244   // for the young generation.
 245   // Assumes the the young gen address range is less than that of the old gen.
 246   bool is_in_young(oop p);
 247 
 248 #ifdef ASSERT
 249   bool is_in_partial_collection(const void* p);
 250 #endif
 251 
 252   // Optimized nmethod scanning support routines
 253   virtual void register_nmethod(nmethod* nm);
 254   virtual void unregister_nmethod(nmethod* nm);
 255   virtual void verify_nmethod(nmethod* nm);
 256   virtual void flush_nmethod(nmethod* nm);
 257 
 258   void prune_scavengable_nmethods();
 259 
 260   // Iteration functions.




 217 
 218   // We may support a shared contiguous allocation area, if the youngest
 219   // generation does.
 220   bool supports_inline_contig_alloc() const;
 221   HeapWord* volatile* top_addr() const;
 222   HeapWord** end_addr() const;
 223 
 224   // Perform a full collection of the heap; intended for use in implementing
 225   // "System.gc". This implies as full a collection as the CollectedHeap
 226   // supports. Caller does not hold the Heap_lock on entry.
 227   virtual void collect(GCCause::Cause cause);
 228 
 229   // The same as above but assume that the caller holds the Heap_lock.
 230   void collect_locked(GCCause::Cause cause);
 231 
 232   // Perform a full collection of generations up to and including max_generation.
 233   // Mostly used for testing purposes. Caller does not hold the Heap_lock on entry.
 234   void collect(GCCause::Cause cause, GenerationType max_generation);
 235 
 236   // Returns "TRUE" iff "p" points into the committed areas of the heap.
 237   // The methods is_in() and is_in_youngest() may be expensive to compute
 238   // in general, so, to prevent their inadvertent use in product jvm's, we
 239   // restrict their use to assertion checking or verification only.

 240   bool is_in(const void* p) const;
 241 
 242   // Returns true if the reference is to an object in the reserved space
 243   // for the young generation.
 244   // Assumes the the young gen address range is less than that of the old gen.
 245   bool is_in_young(oop p);
 246 
 247 #ifdef ASSERT
 248   bool is_in_partial_collection(const void* p);
 249 #endif
 250 
 251   // Optimized nmethod scanning support routines
 252   virtual void register_nmethod(nmethod* nm);
 253   virtual void unregister_nmethod(nmethod* nm);
 254   virtual void verify_nmethod(nmethod* nm);
 255   virtual void flush_nmethod(nmethod* nm);
 256 
 257   void prune_scavengable_nmethods();
 258 
 259   // Iteration functions.


< prev index next >