123
124 // Constructor
125 CollectedHeap();
126
127 // Create a new tlab. All TLAB allocations must go through this.
128 // To allow more flexible TLAB allocations min_size specifies
129 // the minimum size needed, while requested_size is the requested
130 // size based on ergonomics. The actually allocated size will be
131 // returned in actual_size.
132 virtual HeapWord* allocate_new_tlab(size_t min_size,
133 size_t requested_size,
134 size_t* actual_size);
135
136 // Accumulate statistics on all tlabs.
137 virtual void accumulate_statistics_all_tlabs();
138
139 // Reinitialize tlabs before resuming mutators.
140 virtual void resize_all_tlabs();
141
142 // Allocate from the current thread's TLAB, with broken-out slow path.
143 inline static HeapWord* allocate_from_tlab(Klass* klass, Thread* thread, size_t size);
144 static HeapWord* allocate_from_tlab_slow(Klass* klass, Thread* thread, size_t size);
145
146 // Allocate an uninitialized block of the given size, or returns NULL if
147 // this is impossible.
148 inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS);
149
150 // Like allocate_init, but the block returned by a successful allocation
151 // is guaranteed initialized to zeros.
152 inline static HeapWord* common_mem_allocate_init(Klass* klass, size_t size, TRAPS);
153
154 // Helper functions for (VM) allocation.
155 inline static void post_allocation_setup_common(Klass* klass, HeapWord* obj);
156 inline static void post_allocation_setup_no_klass_install(Klass* klass,
157 HeapWord* objPtr);
158
159 inline static void post_allocation_setup_obj(Klass* klass, HeapWord* obj, int size);
160
161 inline static void post_allocation_setup_array(Klass* klass,
162 HeapWord* obj, int length);
163
|
123
124 // Constructor
125 CollectedHeap();
126
127 // Create a new tlab. All TLAB allocations must go through this.
128 // To allow more flexible TLAB allocations min_size specifies
129 // the minimum size needed, while requested_size is the requested
130 // size based on ergonomics. The actually allocated size will be
131 // returned in actual_size.
132 virtual HeapWord* allocate_new_tlab(size_t min_size,
133 size_t requested_size,
134 size_t* actual_size);
135
136 // Accumulate statistics on all tlabs.
137 virtual void accumulate_statistics_all_tlabs();
138
139 // Reinitialize tlabs before resuming mutators.
140 virtual void resize_all_tlabs();
141
142 // Allocate from the current thread's TLAB, with broken-out slow path.
143 inline static HeapWord* allocate_from_tlab(Klass* klass, size_t size, TRAPS);
144 static HeapWord* allocate_from_tlab_slow(Klass* klass, size_t size, TRAPS);
145
146 // Allocate an uninitialized block of the given size, or returns NULL if
147 // this is impossible.
148 inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS);
149
150 // Like allocate_init, but the block returned by a successful allocation
151 // is guaranteed initialized to zeros.
152 inline static HeapWord* common_mem_allocate_init(Klass* klass, size_t size, TRAPS);
153
154 // Helper functions for (VM) allocation.
155 inline static void post_allocation_setup_common(Klass* klass, HeapWord* obj);
156 inline static void post_allocation_setup_no_klass_install(Klass* klass,
157 HeapWord* objPtr);
158
159 inline static void post_allocation_setup_obj(Klass* klass, HeapWord* obj, int size);
160
161 inline static void post_allocation_setup_array(Klass* klass,
162 HeapWord* obj, int length);
163
|
297 _gc_lastcause = _gc_cause;
298 _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause));
299 _perf_gc_cause->set_value(GCCause::to_string(v));
300 }
301 _gc_cause = v;
302 }
303 GCCause::Cause gc_cause() { return _gc_cause; }
304
305 // General obj/array allocation facilities.
306 inline static oop obj_allocate(Klass* klass, int size, TRAPS);
307 inline static oop array_allocate(Klass* klass, int size, int length, TRAPS);
308 inline static oop array_allocate_nozero(Klass* klass, int size, int length, TRAPS);
309 inline static oop class_allocate(Klass* klass, int size, TRAPS);
310
311 // Raw memory allocation facilities
312 // The obj and array allocate methods are covers for these methods.
313 // mem_allocate() should never be
314 // called to allocate TLABs, only individual objects.
315 virtual HeapWord* mem_allocate(size_t size,
316 bool* gc_overhead_limit_was_exceeded) = 0;
317
318 // Utilities for turning raw memory into filler objects.
319 //
320 // min_fill_size() is the smallest region that can be filled.
321 // fill_with_objects() can fill arbitrary-sized regions of the heap using
322 // multiple objects. fill_with_object() is for regions known to be smaller
323 // than the largest array of integers; it uses a single object to fill the
324 // region and has slightly less overhead.
325 static size_t min_fill_size() {
326 return size_t(align_object_size(oopDesc::header_size()));
327 }
328
329 static void fill_with_objects(HeapWord* start, size_t words, bool zap = true);
330
331 static void fill_with_object(HeapWord* start, size_t words, bool zap = true);
332 static void fill_with_object(MemRegion region, bool zap = true) {
333 fill_with_object(region.start(), region.word_size(), zap);
334 }
335 static void fill_with_object(HeapWord* start, HeapWord* end, bool zap = true) {
|
297 _gc_lastcause = _gc_cause;
298 _perf_gc_lastcause->set_value(GCCause::to_string(_gc_lastcause));
299 _perf_gc_cause->set_value(GCCause::to_string(v));
300 }
301 _gc_cause = v;
302 }
303 GCCause::Cause gc_cause() { return _gc_cause; }
304
305 // General obj/array allocation facilities.
306 inline static oop obj_allocate(Klass* klass, int size, TRAPS);
307 inline static oop array_allocate(Klass* klass, int size, int length, TRAPS);
308 inline static oop array_allocate_nozero(Klass* klass, int size, int length, TRAPS);
309 inline static oop class_allocate(Klass* klass, int size, TRAPS);
310
311 // Raw memory allocation facilities
312 // The obj and array allocate methods are covers for these methods.
313 // mem_allocate() should never be
314 // called to allocate TLABs, only individual objects.
315 virtual HeapWord* mem_allocate(size_t size,
316 bool* gc_overhead_limit_was_exceeded) = 0;
317
318 // Raw memory allocation. This may or may not use TLAB allocations to satisfy the
319 // allocation. A GC implementation may override this function to satisfy the allocation
320 // in any way. But the default is to try a TLAB allocation, and otherwise perform
321 // mem_allocate.
322 virtual HeapWord* obj_allocate_raw(Klass* klass, size_t size,
323 bool* gc_overhead_limit_was_exceeded, TRAPS);
324
325 // Utilities for turning raw memory into filler objects.
326 //
327 // min_fill_size() is the smallest region that can be filled.
328 // fill_with_objects() can fill arbitrary-sized regions of the heap using
329 // multiple objects. fill_with_object() is for regions known to be smaller
330 // than the largest array of integers; it uses a single object to fill the
331 // region and has slightly less overhead.
332 static size_t min_fill_size() {
333 return size_t(align_object_size(oopDesc::header_size()));
334 }
335
336 static void fill_with_objects(HeapWord* start, size_t words, bool zap = true);
337
338 static void fill_with_object(HeapWord* start, size_t words, bool zap = true);
339 static void fill_with_object(MemRegion region, bool zap = true) {
340 fill_with_object(region.start(), region.word_size(), zap);
341 }
342 static void fill_with_object(HeapWord* start, HeapWord* end, bool zap = true) {
|