84 friend class IsGCActiveMark; // Block structured external access to _is_gc_active
85
86 private:
87 #ifdef ASSERT
88 static int _fire_out_of_memory_count;
89 #endif
90
91 // Used for filler objects (static, but initialized in ctor).
92 static size_t _filler_array_max_size;
93
94 GCHeapLog* _gc_heap_log;
95
96 // Used in support of ReduceInitialCardMarks; only consulted if COMPILER2 is being used
97 bool _defer_initial_card_mark;
98
99 MemRegion _reserved;
100
101 protected:
102 BarrierSet* _barrier_set;
103 bool _is_gc_active;
104 uint _n_par_threads;
105
106 unsigned int _total_collections; // ... started
107 unsigned int _total_full_collections; // ... started
108 NOT_PRODUCT(volatile size_t _promotion_failure_alot_count;)
109 NOT_PRODUCT(volatile size_t _promotion_failure_alot_gc_number;)
110
111 // Reason for current garbage collection. Should be set to
112 // a value reflecting no collection between collections.
113 GCCause::Cause _gc_cause;
114 GCCause::Cause _gc_lastcause;
115 PerfStringVariable* _perf_gc_cause;
116 PerfStringVariable* _perf_gc_lastcause;
117
118 // Constructor
119 CollectedHeap();
120
121 // Do common initializations that must follow instance construction,
122 // for example, those needing virtual calls.
123 // This code could perhaps be moved into initialize() but would
124 // be slightly more awkward because we want the latter to be a
273 return is_in_reserved(p);
274 }
275
276 bool is_in_closed_subset_or_null(const void* p) const {
277 return p == NULL || is_in_closed_subset(p);
278 }
279
280 // An object is scavengable if its location may move during a scavenge.
281 // (A scavenge is a GC which is not a full GC.)
282 virtual bool is_scavengable(const void *p) = 0;
283
284 void set_gc_cause(GCCause::Cause v) {
285 if (UsePerfData) {
286 _gc_lastcause = _gc_cause;
287 _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause));
288 _perf_gc_cause->set_value(GCCause::to_string(v));
289 }
290 _gc_cause = v;
291 }
292 GCCause::Cause gc_cause() { return _gc_cause; }
293
294 // Number of threads currently working on GC tasks.
295 uint n_par_threads() { return _n_par_threads; }
296
297 // May be overridden to set additional parallelism.
298 virtual void set_par_threads(uint t) { _n_par_threads = t; };
299
300 // General obj/array allocation facilities.
301 inline static oop obj_allocate(KlassHandle klass, int size, TRAPS);
302 inline static oop array_allocate(KlassHandle klass, int size, int length, TRAPS);
303 inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS);
304
305 inline static void post_allocation_install_obj_klass(KlassHandle klass,
306 oop obj);
307
308 // Raw memory allocation facilities
309 // The obj and array allocate methods are covers for these methods.
310 // mem_allocate() should never be
311 // called to allocate TLABs, only individual objects.
312 virtual HeapWord* mem_allocate(size_t size,
313 bool* gc_overhead_limit_was_exceeded) = 0;
314
315 // Utilities for turning raw memory into filler objects.
316 //
317 // min_fill_size() is the smallest region that can be filled.
318 // fill_with_objects() can fill arbitrary-sized regions of the heap using
|
84 friend class IsGCActiveMark; // Block structured external access to _is_gc_active
85
86 private:
87 #ifdef ASSERT
88 static int _fire_out_of_memory_count;
89 #endif
90
91 // Used for filler objects (static, but initialized in ctor).
92 static size_t _filler_array_max_size;
93
94 GCHeapLog* _gc_heap_log;
95
96 // Used in support of ReduceInitialCardMarks; only consulted if COMPILER2 is being used
97 bool _defer_initial_card_mark;
98
99 MemRegion _reserved;
100
101 protected:
102 BarrierSet* _barrier_set;
103 bool _is_gc_active;
104
105 unsigned int _total_collections; // ... started
106 unsigned int _total_full_collections; // ... started
107 NOT_PRODUCT(volatile size_t _promotion_failure_alot_count;)
108 NOT_PRODUCT(volatile size_t _promotion_failure_alot_gc_number;)
109
110 // Reason for current garbage collection. Should be set to
111 // a value reflecting no collection between collections.
112 GCCause::Cause _gc_cause;
113 GCCause::Cause _gc_lastcause;
114 PerfStringVariable* _perf_gc_cause;
115 PerfStringVariable* _perf_gc_lastcause;
116
117 // Constructor
118 CollectedHeap();
119
120 // Do common initializations that must follow instance construction,
121 // for example, those needing virtual calls.
122 // This code could perhaps be moved into initialize() but would
123 // be slightly more awkward because we want the latter to be a
272 return is_in_reserved(p);
273 }
274
275 bool is_in_closed_subset_or_null(const void* p) const {
276 return p == NULL || is_in_closed_subset(p);
277 }
278
279 // An object is scavengable if its location may move during a scavenge.
280 // (A scavenge is a GC which is not a full GC.)
281 virtual bool is_scavengable(const void *p) = 0;
282
283 void set_gc_cause(GCCause::Cause v) {
284 if (UsePerfData) {
285 _gc_lastcause = _gc_cause;
286 _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause));
287 _perf_gc_cause->set_value(GCCause::to_string(v));
288 }
289 _gc_cause = v;
290 }
291 GCCause::Cause gc_cause() { return _gc_cause; }
292
293 // General obj/array allocation facilities.
294 inline static oop obj_allocate(KlassHandle klass, int size, TRAPS);
295 inline static oop array_allocate(KlassHandle klass, int size, int length, TRAPS);
296 inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS);
297
298 inline static void post_allocation_install_obj_klass(KlassHandle klass,
299 oop obj);
300
301 // Raw memory allocation facilities
302 // The obj and array allocate methods are covers for these methods.
303 // mem_allocate() should never be
304 // called to allocate TLABs, only individual objects.
305 virtual HeapWord* mem_allocate(size_t size,
306 bool* gc_overhead_limit_was_exceeded) = 0;
307
308 // Utilities for turning raw memory into filler objects.
309 //
310 // min_fill_size() is the smallest region that can be filled.
311 // fill_with_objects() can fill arbitrary-sized regions of the heap using
|