< prev index next >

src/hotspot/share/gc/shared/collectedHeap.hpp

erik allocation
 
   // Reinitialize tlabs before resuming mutators.
   virtual void resize_all_tlabs();
 
   // Allocate from the current thread's TLAB, with broken-out slow path.
-  inline static HeapWord* allocate_from_tlab(Klass* klass, Thread* thread, size_t size);
-  static HeapWord* allocate_from_tlab_slow(Klass* klass, Thread* thread, size_t size);
+  inline static HeapWord* allocate_from_tlab(Klass* klass, size_t size, TRAPS);
+  static HeapWord* allocate_from_tlab_slow(Klass* klass, size_t size, TRAPS);
 
   // Allocate an uninitialized block of the given size, or returns NULL if
   // this is impossible.
   inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS);
 

@@ -314,10 +314,17 // mem_allocate() should never be // called to allocate TLABs, only individual objects. virtual HeapWord* mem_allocate(size_t size, bool* gc_overhead_limit_was_exceeded) = 0; + // Raw memory allocation. This may or may not use TLAB allocations to satisfy the + // allocation. A GC implementation may override this function to satisfy the allocation + // in any way. But the default is to try a TLAB allocation, and otherwise perform + // mem_allocate. + virtual HeapWord* obj_allocate_raw(Klass* klass, size_t size, + bool* gc_overhead_limit_was_exceeded, TRAPS); + // Utilities for turning raw memory into filler objects. // // min_fill_size() is the smallest region that can be filled. // fill_with_objects() can fill arbitrary-sized regions of the heap using // multiple objects. fill_with_object() is for regions known to be smaller
< prev index next >