< prev index next >

src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp

Print this page




 189   void collect(GCCause::Cause cause);
 190 
 191   // These also should be called by the vm thread at a safepoint (e.g., from a
 192   // VM operation).
 193   //
 194   // The first collects the young generation only, unless the scavenge fails; it
 195   // will then attempt a full gc.  The second collects the entire heap; if
 196   // maximum_compaction is true, it will compact everything and clear all soft
 197   // references.
 198   inline void invoke_scavenge();
 199 
 200   // Perform a full collection
 201   virtual void do_full_collection(bool clear_all_soft_refs);
 202 
 203   bool supports_inline_contig_alloc() const { return !UseNUMA; }
 204 
 205   HeapWord* volatile* top_addr() const { return !UseNUMA ? young_gen()->top_addr() : (HeapWord* volatile*)-1; }
 206   HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
 207 
 208   void ensure_parsability(bool retire_tlabs);
 209   void accumulate_statistics_all_tlabs();
 210   void resize_all_tlabs();
 211 
 212   bool supports_tlab_allocation() const { return true; }
 213 
 214   size_t tlab_capacity(Thread* thr) const;
 215   size_t tlab_used(Thread* thr) const;
 216   size_t unsafe_max_tlab_alloc(Thread* thr) const;
 217 
 218   void object_iterate(ObjectClosure* cl);
 219   void safe_object_iterate(ObjectClosure* cl) { object_iterate(cl); }
 220 
 221   HeapWord* block_start(const void* addr) const;
 222   size_t block_size(const HeapWord* addr) const;
 223   bool block_is_obj(const HeapWord* addr) const;
 224 
 225   jlong millis_since_last_gc();
 226 
 227   void prepare_for_verify();
 228   PSHeapSummary create_ps_heap_summary();
 229   virtual void print_on(outputStream* st) const;




 189   void collect(GCCause::Cause cause);
 190 
 191   // These also should be called by the vm thread at a safepoint (e.g., from a
 192   // VM operation).
 193   //
 194   // The first collects the young generation only, unless the scavenge fails; it
 195   // will then attempt a full gc.  The second collects the entire heap; if
 196   // maximum_compaction is true, it will compact everything and clear all soft
 197   // references.
 198   inline void invoke_scavenge();
 199 
 200   // Perform a full collection
 201   virtual void do_full_collection(bool clear_all_soft_refs);
 202 
 203   bool supports_inline_contig_alloc() const { return !UseNUMA; }
 204 
 205   HeapWord* volatile* top_addr() const { return !UseNUMA ? young_gen()->top_addr() : (HeapWord* volatile*)-1; }
 206   HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
 207 
 208   void ensure_parsability(bool retire_tlabs);

 209   void resize_all_tlabs();
 210 
 211   bool supports_tlab_allocation() const { return true; }
 212 
 213   size_t tlab_capacity(Thread* thr) const;
 214   size_t tlab_used(Thread* thr) const;
 215   size_t unsafe_max_tlab_alloc(Thread* thr) const;
 216 
 217   void object_iterate(ObjectClosure* cl);
 218   void safe_object_iterate(ObjectClosure* cl) { object_iterate(cl); }
 219 
 220   HeapWord* block_start(const void* addr) const;
 221   size_t block_size(const HeapWord* addr) const;
 222   bool block_is_obj(const HeapWord* addr) const;
 223 
 224   jlong millis_since_last_gc();
 225 
 226   void prepare_for_verify();
 227   PSHeapSummary create_ps_heap_summary();
 228   virtual void print_on(outputStream* st) const;


< prev index next >