121 // Constructor 122 CollectedHeap(); 123 124 // Do common initializations that must follow instance construction, 125 // for example, those needing virtual calls. 126 // This code could perhaps be moved into initialize() but would 127 // be slightly more awkward because we want the latter to be a 128 // pure virtual. 129 void pre_initialize(); 130 131 // Create a new tlab. All TLAB allocations must go through this. 132 virtual HeapWord* allocate_new_tlab(size_t size); 133 134 // Accumulate statistics on all tlabs. 135 virtual void accumulate_statistics_all_tlabs(); 136 137 // Reinitialize tlabs before resuming mutators. 138 virtual void resize_all_tlabs(); 139 140 // Allocate from the current thread's TLAB, with broken-out slow path. 141 inline static HeapWord* allocate_from_tlab(KlassHandle klass, Thread* thread, size_t size); 142 static HeapWord* allocate_from_tlab_slow(KlassHandle klass, Thread* thread, size_t size); 143 144 // Allocate an uninitialized block of the given size, or returns NULL if 145 // this is impossible. 146 inline static HeapWord* common_mem_allocate_noinit(KlassHandle klass, size_t size, TRAPS); 147 148 // Like allocate_init, but the block returned by a successful allocation 149 // is guaranteed initialized to zeros. 150 inline static HeapWord* common_mem_allocate_init(KlassHandle klass, size_t size, TRAPS); 151 152 // Helper functions for (VM) allocation. 153 inline static void post_allocation_setup_common(KlassHandle klass, HeapWord* obj); 154 inline static void post_allocation_setup_no_klass_install(KlassHandle klass, 155 HeapWord* objPtr); 156 157 inline static void post_allocation_setup_obj(KlassHandle klass, HeapWord* obj, int size); 158 159 inline static void post_allocation_setup_array(KlassHandle klass, 160 HeapWord* obj, int length); 161 162 inline static void post_allocation_setup_class(KlassHandle klass, HeapWord* obj, int size); 163 164 // Clears an allocated object. 165 inline static void init_obj(HeapWord* obj, size_t size); 166 167 // Filler object utilities. 168 static inline size_t filler_array_hdr_size(); 169 static inline size_t filler_array_min_size(); 170 171 DEBUG_ONLY(static void fill_args_check(HeapWord* start, size_t words);) 172 DEBUG_ONLY(static void zap_filler_array(HeapWord* start, size_t words, bool zap = true);) 173 174 // Fill with a single array; caller must ensure filler_array_min_size() <= 175 // words <= filler_array_max_size(). 176 static inline void fill_with_array(HeapWord* start, size_t words, bool zap = true); 177 178 // Fill with a single object (either an int array or a java.lang.Object). 179 static inline void fill_with_object_impl(HeapWord* start, size_t words, bool zap = true); 180 181 virtual void trace_heap(GCWhen::Type when, const GCTracer* tracer); 182 282 283 bool is_in_closed_subset_or_null(const void* p) const { 284 return p == NULL || is_in_closed_subset(p); 285 } 286 287 // An object is scavengable if its location may move during a scavenge. 288 // (A scavenge is a GC which is not a full GC.) 289 virtual bool is_scavengable(const void *p) = 0; 290 291 void set_gc_cause(GCCause::Cause v) { 292 if (UsePerfData) { 293 _gc_lastcause = _gc_cause; 294 _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause)); 295 _perf_gc_cause->set_value(GCCause::to_string(v)); 296 } 297 _gc_cause = v; 298 } 299 GCCause::Cause gc_cause() { return _gc_cause; } 300 301 // General obj/array allocation facilities. 302 inline static oop obj_allocate(KlassHandle klass, int size, TRAPS); 303 inline static oop array_allocate(KlassHandle klass, int size, int length, TRAPS); 304 inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS); 305 inline static oop class_allocate(KlassHandle klass, int size, TRAPS); 306 307 // Raw memory allocation facilities 308 // The obj and array allocate methods are covers for these methods. 309 // mem_allocate() should never be 310 // called to allocate TLABs, only individual objects. 311 virtual HeapWord* mem_allocate(size_t size, 312 bool* gc_overhead_limit_was_exceeded) = 0; 313 314 // Utilities for turning raw memory into filler objects. 315 // 316 // min_fill_size() is the smallest region that can be filled. 317 // fill_with_objects() can fill arbitrary-sized regions of the heap using 318 // multiple objects. fill_with_object() is for regions known to be smaller 319 // than the largest array of integers; it uses a single object to fill the 320 // region and has slightly less overhead. 321 static size_t min_fill_size() { 322 return size_t(align_object_size(oopDesc::header_size())); 323 } 324 325 static void fill_with_objects(HeapWord* start, size_t words, bool zap = true); | 121 // Constructor 122 CollectedHeap(); 123 124 // Do common initializations that must follow instance construction, 125 // for example, those needing virtual calls. 126 // This code could perhaps be moved into initialize() but would 127 // be slightly more awkward because we want the latter to be a 128 // pure virtual. 129 void pre_initialize(); 130 131 // Create a new tlab. All TLAB allocations must go through this. 132 virtual HeapWord* allocate_new_tlab(size_t size); 133 134 // Accumulate statistics on all tlabs. 135 virtual void accumulate_statistics_all_tlabs(); 136 137 // Reinitialize tlabs before resuming mutators. 138 virtual void resize_all_tlabs(); 139 140 // Allocate from the current thread's TLAB, with broken-out slow path. 141 inline static HeapWord* allocate_from_tlab(Klass* klass, Thread* thread, size_t size); 142 static HeapWord* allocate_from_tlab_slow(Klass* klass, Thread* thread, size_t size); 143 144 // Allocate an uninitialized block of the given size, or returns NULL if 145 // this is impossible. 146 inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS); 147 148 // Like allocate_init, but the block returned by a successful allocation 149 // is guaranteed initialized to zeros. 150 inline static HeapWord* common_mem_allocate_init(Klass* klass, size_t size, TRAPS); 151 152 // Helper functions for (VM) allocation. 153 inline static void post_allocation_setup_common(Klass* klass, HeapWord* obj); 154 inline static void post_allocation_setup_no_klass_install(Klass* klass, 155 HeapWord* objPtr); 156 157 inline static void post_allocation_setup_obj(Klass* klass, HeapWord* obj, int size); 158 159 inline static void post_allocation_setup_array(Klass* klass, 160 HeapWord* obj, int length); 161 162 inline static void post_allocation_setup_class(Klass* klass, HeapWord* obj, int size); 163 164 // Clears an allocated object. 165 inline static void init_obj(HeapWord* obj, size_t size); 166 167 // Filler object utilities. 168 static inline size_t filler_array_hdr_size(); 169 static inline size_t filler_array_min_size(); 170 171 DEBUG_ONLY(static void fill_args_check(HeapWord* start, size_t words);) 172 DEBUG_ONLY(static void zap_filler_array(HeapWord* start, size_t words, bool zap = true);) 173 174 // Fill with a single array; caller must ensure filler_array_min_size() <= 175 // words <= filler_array_max_size(). 176 static inline void fill_with_array(HeapWord* start, size_t words, bool zap = true); 177 178 // Fill with a single object (either an int array or a java.lang.Object). 179 static inline void fill_with_object_impl(HeapWord* start, size_t words, bool zap = true); 180 181 virtual void trace_heap(GCWhen::Type when, const GCTracer* tracer); 182 282 283 bool is_in_closed_subset_or_null(const void* p) const { 284 return p == NULL || is_in_closed_subset(p); 285 } 286 287 // An object is scavengable if its location may move during a scavenge. 288 // (A scavenge is a GC which is not a full GC.) 289 virtual bool is_scavengable(const void *p) = 0; 290 291 void set_gc_cause(GCCause::Cause v) { 292 if (UsePerfData) { 293 _gc_lastcause = _gc_cause; 294 _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause)); 295 _perf_gc_cause->set_value(GCCause::to_string(v)); 296 } 297 _gc_cause = v; 298 } 299 GCCause::Cause gc_cause() { return _gc_cause; } 300 301 // General obj/array allocation facilities. 302 inline static oop obj_allocate(Klass* klass, int size, TRAPS); 303 inline static oop array_allocate(Klass* klass, int size, int length, TRAPS); 304 inline static oop array_allocate_nozero(Klass* klass, int size, int length, TRAPS); 305 inline static oop class_allocate(Klass* klass, int size, TRAPS); 306 307 // Raw memory allocation facilities 308 // The obj and array allocate methods are covers for these methods. 309 // mem_allocate() should never be 310 // called to allocate TLABs, only individual objects. 311 virtual HeapWord* mem_allocate(size_t size, 312 bool* gc_overhead_limit_was_exceeded) = 0; 313 314 // Utilities for turning raw memory into filler objects. 315 // 316 // min_fill_size() is the smallest region that can be filled. 317 // fill_with_objects() can fill arbitrary-sized regions of the heap using 318 // multiple objects. fill_with_object() is for regions known to be smaller 319 // than the largest array of integers; it uses a single object to fill the 320 // region and has slightly less overhead. 321 static size_t min_fill_size() { 322 return size_t(align_object_size(oopDesc::header_size())); 323 } 324 325 static void fill_with_objects(HeapWord* start, size_t words, bool zap = true); |