< prev index next >

src/share/vm/gc_interface/collectedHeap.hpp

Print this page




 273   }
 274 
 275   bool is_in_closed_subset_or_null(const void* p) const {
 276     return p == NULL || is_in_closed_subset(p);
 277   }
 278 
 279   // An object is scavengable if its location may move during a scavenge.
 280   // (A scavenge is a GC which is not a full GC.)
 281   virtual bool is_scavengable(const void *p) = 0;
 282 
 283   void set_gc_cause(GCCause::Cause v) {
 284      if (UsePerfData) {
 285        _gc_lastcause = _gc_cause;
 286        _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause));
 287        _perf_gc_cause->set_value(GCCause::to_string(v));
 288      }
 289     _gc_cause = v;
 290   }
 291   GCCause::Cause gc_cause() { return _gc_cause; }
 292 
 293   // May be overridden to set additional parallelism.
 294   virtual void set_par_threads(uint t) { (void)t; };
 295 
 296   // General obj/array allocation facilities.
 297   inline static oop obj_allocate(KlassHandle klass, int size, TRAPS);
 298   inline static oop array_allocate(KlassHandle klass, int size, int length, TRAPS);
 299   inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS);
 300 
 301   inline static void post_allocation_install_obj_klass(KlassHandle klass,
 302                                                        oop obj);
 303 
 304   // Raw memory allocation facilities
 305   // The obj and array allocate methods are covers for these methods.
 306   // mem_allocate() should never be
 307   // called to allocate TLABs, only individual objects.
 308   virtual HeapWord* mem_allocate(size_t size,
 309                                  bool* gc_overhead_limit_was_exceeded) = 0;
 310 
 311   // Utilities for turning raw memory into filler objects.
 312   //
 313   // min_fill_size() is the smallest region that can be filled.
 314   // fill_with_objects() can fill arbitrary-sized regions of the heap using
 315   // multiple objects.  fill_with_object() is for regions known to be smaller




 273   }
 274 
 275   bool is_in_closed_subset_or_null(const void* p) const {
 276     return p == NULL || is_in_closed_subset(p);
 277   }
 278 
 279   // An object is scavengable if its location may move during a scavenge.
 280   // (A scavenge is a GC which is not a full GC.)
 281   virtual bool is_scavengable(const void *p) = 0;
 282 
 283   void set_gc_cause(GCCause::Cause v) {
 284      if (UsePerfData) {
 285        _gc_lastcause = _gc_cause;
 286        _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause));
 287        _perf_gc_cause->set_value(GCCause::to_string(v));
 288      }
 289     _gc_cause = v;
 290   }
 291   GCCause::Cause gc_cause() { return _gc_cause; }
 292 



 293   // General obj/array allocation facilities.
 294   inline static oop obj_allocate(KlassHandle klass, int size, TRAPS);
 295   inline static oop array_allocate(KlassHandle klass, int size, int length, TRAPS);
 296   inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS);
 297 
 298   inline static void post_allocation_install_obj_klass(KlassHandle klass,
 299                                                        oop obj);
 300 
 301   // Raw memory allocation facilities
 302   // The obj and array allocate methods are covers for these methods.
 303   // mem_allocate() should never be
 304   // called to allocate TLABs, only individual objects.
 305   virtual HeapWord* mem_allocate(size_t size,
 306                                  bool* gc_overhead_limit_was_exceeded) = 0;
 307 
 308   // Utilities for turning raw memory into filler objects.
 309   //
 310   // min_fill_size() is the smallest region that can be filled.
 311   // fill_with_objects() can fill arbitrary-sized regions of the heap using
 312   // multiple objects.  fill_with_object() is for regions known to be smaller


< prev index next >