1 #ifdef USE_PRAGMA_IDENT_HDR 2 #pragma ident "@(#)allocation.hpp 1.77 07/05/05 17:05:42 JVM" 3 #endif 4 /* 5 * Copyright 1997-2005 Sun Microsystems, Inc. All Rights Reserved. 6 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 7 * 8 * This code is free software; you can redistribute it and/or modify it 9 * under the terms of the GNU General Public License version 2 only, as 10 * published by the Free Software Foundation. 11 * 12 * This code is distributed in the hope that it will be useful, but WITHOUT 13 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 * version 2 for more details (a copy is included in the LICENSE file that 16 * accompanied this code). 17 * 18 * You should have received a copy of the GNU General Public License version 19 * 2 along with this work; if not, write to the Free Software Foundation, 20 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 21 * 22 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, 23 * CA 95054 USA or visit www.sun.com if you need additional information or 24 * have any questions. 25 * 26 */ 27 28 #define ARENA_ALIGN_M1 (((size_t)(ARENA_AMALLOC_ALIGNMENT)) - 1) 29 #define ARENA_ALIGN_MASK (~((size_t)ARENA_ALIGN_M1)) 30 #define ARENA_ALIGN(x) ((((size_t)(x)) + ARENA_ALIGN_M1) & ARENA_ALIGN_MASK) 31 32 // All classes in the virtual machine must be subclassed 33 // by one of the following allocation classes: 34 // 35 // For objects allocated in the resource area (see resourceArea.hpp). 36 // - ResourceObj 37 // 38 // For objects allocated in the C-heap (managed by: free & malloc). 39 // - CHeapObj 40 // 41 // For objects allocated on the stack. 42 // - StackObj 43 // 44 // For embedded objects. 45 // - ValueObj 46 // 47 // For classes used as name spaces. 48 // - AllStatic 49 // 50 // The printable subclasses are used for debugging and define virtual 51 // member functions for printing. Classes that avoid allocating the 52 // vtbl entries in the objects should therefore not be the printable 53 // subclasses. 54 // 55 // The following macros and function should be used to allocate memory 56 // directly in the resource area or in the C-heap: 57 // 58 // NEW_RESOURCE_ARRAY(type,size) 59 // NEW_RESOURCE_OBJ(type) 60 // NEW_C_HEAP_ARRAY(type,size) 61 // NEW_C_HEAP_OBJ(type) 62 // char* AllocateHeap(size_t size, const char* name); 63 // void FreeHeap(void* p); 64 // 65 // C-heap allocation can be traced using +PrintHeapAllocation. 66 // malloc and free should therefore never called directly. 67 68 // Base class for objects allocated in the C-heap. 69 70 // In non product mode we introduce a super class for all allocation classes 71 // that supports printing. 72 // We avoid the superclass in product mode since some C++ compilers add 73 // a word overhead for empty super classes. 74 75 #ifdef PRODUCT 76 #define ALLOCATION_SUPER_CLASS_SPEC 77 #else 78 #define ALLOCATION_SUPER_CLASS_SPEC : public AllocatedObj 79 class AllocatedObj { 80 public: 81 // Printing support 82 void print() const; 83 void print_value() const; 84 85 virtual void print_on(outputStream* st) const; 86 virtual void print_value_on(outputStream* st) const; 87 }; 88 #endif 89 90 class CHeapObj ALLOCATION_SUPER_CLASS_SPEC { 91 public: 92 void* operator new(size_t size); 93 void operator delete(void* p); 94 void* new_array(size_t size); 95 }; 96 97 // Base class for objects allocated on the stack only. 98 // Calling new or delete will result in fatal error. 99 100 class StackObj ALLOCATION_SUPER_CLASS_SPEC { 101 public: 102 void* operator new(size_t size); 103 void operator delete(void* p); 104 }; 105 106 // Base class for objects used as value objects. 107 // Calling new or delete will result in fatal error. 108 // 109 // Portability note: Certain compilers (e.g. gcc) will 110 // always make classes bigger if it has a superclass, even 111 // if the superclass does not have any virtual methods or 112 // instance fields. The HotSpot implementation relies on this 113 // not to happen. So never make a ValueObj class a direct subclass 114 // of this object, but use the VALUE_OBJ_CLASS_SPEC class instead, e.g., 115 // like this: 116 // 117 // class A VALUE_OBJ_CLASS_SPEC { 118 // ... 119 // } 120 // 121 // With gcc and possible other compilers the VALUE_OBJ_CLASS_SPEC can 122 // be defined as a an empty string "". 123 // 124 class _ValueObj { 125 public: 126 void* operator new(size_t size); 127 void operator delete(void* p); 128 }; 129 130 // Base class for classes that constitute name spaces. 131 132 class AllStatic { 133 public: 134 AllStatic() { ShouldNotCallThis(); } 135 ~AllStatic() { ShouldNotCallThis(); } 136 }; 137 138 139 //------------------------------Chunk------------------------------------------ 140 // Linked list of raw memory chunks 141 class Chunk: public CHeapObj { 142 protected: 143 Chunk* _next; // Next Chunk in list 144 const size_t _len; // Size of this Chunk 145 public: 146 void* operator new(size_t size, size_t length); 147 void operator delete(void* p); 148 Chunk(size_t length); 149 150 enum { 151 // default sizes; make them slightly smaller than 2**k to guard against 152 // buddy-system style malloc implementations 153 #ifdef _LP64 154 slack = 40, // [RGV] Not sure if this is right, but make it 155 // a multiple of 8. 156 #else 157 slack = 20, // suspected sizeof(Chunk) + internal malloc headers 158 #endif 159 160 init_size = 1*K - slack, // Size of first chunk 161 medium_size= 10*K - slack, // Size of medium-sized chunk 162 size = 32*K - slack, // Default size of an Arena chunk (following the first) 163 non_pool_size = init_size + 32 // An initial size which is not one of above 164 }; 165 166 void chop(); // Chop this chunk 167 void next_chop(); // Chop next chunk 168 static size_t aligned_overhead_size(void) { return ARENA_ALIGN(sizeof(Chunk)); } 169 170 size_t length() const { return _len; } 171 Chunk* next() const { return _next; } 172 void set_next(Chunk* n) { _next = n; } 173 // Boundaries of data area (possibly unused) 174 char* bottom() const { return ((char*) this) + aligned_overhead_size(); } 175 char* top() const { return bottom() + _len; } 176 bool contains(char* p) const { return bottom() <= p && p <= top(); } 177 178 // Start the chunk_pool cleaner task 179 static void start_chunk_pool_cleaner_task(); 180 }; 181 182 183 //------------------------------Arena------------------------------------------ 184 // Fast allocation of memory 185 class Arena: public CHeapObj { 186 protected: 187 friend class ResourceMark; 188 friend class HandleMark; 189 friend class NoHandleMark; 190 Chunk *_first; // First chunk 191 Chunk *_chunk; // current chunk 192 char *_hwm, *_max; // High water mark and max in current chunk 193 void* grow(size_t x); // Get a new Chunk of at least size x 194 NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing) 195 NOT_PRODUCT(static size_t _bytes_allocated;) // total #bytes allocated since start 196 friend class AllocStats; 197 debug_only(void* malloc(size_t size);) 198 debug_only(void* internal_malloc_4(size_t x);) 199 public: 200 Arena(); 201 Arena(size_t init_size); 202 Arena(Arena *old); 203 ~Arena(); 204 void destruct_contents(); 205 char* hwm() const { return _hwm; } 206 207 // Fast allocate in the arena. Common case is: pointer test + increment. 208 void* Amalloc(size_t x) { 209 assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2"); 210 x = ARENA_ALIGN(x); 211 debug_only(if (UseMallocOnly) return malloc(x);) 212 NOT_PRODUCT(_bytes_allocated += x); 213 if (_hwm + x > _max) { 214 return grow(x); 215 } else { 216 char *old = _hwm; 217 _hwm += x; 218 return old; 219 } 220 } 221 // Further assume size is padded out to words 222 void *Amalloc_4(size_t x) { 223 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" ); 224 debug_only(if (UseMallocOnly) return malloc(x);) 225 NOT_PRODUCT(_bytes_allocated += x); 226 if (_hwm + x > _max) { 227 return grow(x); 228 } else { 229 char *old = _hwm; 230 _hwm += x; 231 return old; 232 } 233 } 234 235 // Allocate with 'double' alignment. It is 8 bytes on sparc. 236 // In other cases Amalloc_D() should be the same as Amalloc_4(). 237 void* Amalloc_D(size_t x) { 238 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" ); 239 debug_only(if (UseMallocOnly) return malloc(x);) 240 #if defined(SPARC) && !defined(_LP64) 241 #define DALIGN_M1 7 242 size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm; 243 x += delta; 244 #endif 245 NOT_PRODUCT(_bytes_allocated += x); 246 if (_hwm + x > _max) { 247 return grow(x); // grow() returns a result aligned >= 8 bytes. 248 } else { 249 char *old = _hwm; 250 _hwm += x; 251 #if defined(SPARC) && !defined(_LP64) 252 old += delta; // align to 8-bytes 253 #endif 254 return old; 255 } 256 } 257 258 // Fast delete in area. Common case is: NOP (except for storage reclaimed) 259 void Afree(void *ptr, size_t size) { 260 #ifdef ASSERT 261 if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory 262 if (UseMallocOnly) return; 263 #endif 264 if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr; 265 } 266 267 void *Arealloc( void *old_ptr, size_t old_size, size_t new_size ); 268 269 // Move contents of this arena into an empty arena 270 Arena *move_contents(Arena *empty_arena); 271 272 // Determine if pointer belongs to this Arena or not. 273 bool contains( const void *ptr ) const; 274 275 // Total of all chunks in use (not thread-safe) 276 size_t used() const; 277 278 // Total # of bytes used 279 size_t size_in_bytes() const NOT_PRODUCT({ return _size_in_bytes; }) PRODUCT_RETURN0; 280 void set_size_in_bytes(size_t size) NOT_PRODUCT({ _size_in_bytes = size; }) PRODUCT_RETURN; 281 static void free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2) PRODUCT_RETURN; 282 static void free_all(char** start, char** end) PRODUCT_RETURN; 283 284 private: 285 // Reset this Arena to empty, access will trigger grow if necessary 286 void reset(void) { 287 _first = _chunk = NULL; 288 _hwm = _max = NULL; 289 } 290 }; 291 292 // One of the following macros must be used when allocating 293 // an array or object from an arena 294 #define NEW_ARENA_ARRAY(arena, type, size)\ 295 (type*) arena->Amalloc((size) * sizeof(type)) 296 297 #define REALLOC_ARENA_ARRAY(arena, type, old, old_size, new_size)\ 298 (type*) arena->Arealloc((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type) ) 299 300 #define FREE_ARENA_ARRAY(arena, type, old, size)\ 301 arena->Afree((char*)(old), (size) * sizeof(type)) 302 303 #define NEW_ARENA_OBJ(arena, type)\ 304 NEW_ARENA_ARRAY(arena, type, 1) 305 306 307 //%note allocation_1 308 extern char* resource_allocate_bytes(size_t size); 309 extern char* resource_allocate_bytes(Thread* thread, size_t size); 310 extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size); 311 extern void resource_free_bytes( char *old, size_t size ); 312 313 //---------------------------------------------------------------------- 314 // Base class for objects allocated in the resource area per default. 315 // Optionally, objects may be allocated on the C heap with 316 // new(ResourceObj::C_HEAP) Foo(...) or in an Arena with new (&arena) 317 // ResourceObj's can be allocated within other objects, but don't use 318 // new or delete (allocation_type is unknown). If new is used to allocate, 319 // use delete to deallocate. 320 class ResourceObj ALLOCATION_SUPER_CLASS_SPEC { 321 public: 322 enum allocation_type { UNKNOWN = 0, C_HEAP, RESOURCE_AREA, ARENA }; 323 #ifdef ASSERT 324 private: 325 allocation_type _allocation; 326 public: 327 bool allocated_on_C_heap() { return _allocation == C_HEAP; } 328 #endif // ASSERT 329 330 public: 331 void* operator new(size_t size, allocation_type type); 332 void* operator new(size_t size, Arena *arena) { 333 address res = (address)arena->Amalloc(size); 334 // Set allocation type in the resource object 335 DEBUG_ONLY(((ResourceObj *)res)->_allocation = ARENA;) 336 return res; 337 } 338 void* operator new(size_t size) { 339 address res = (address)resource_allocate_bytes(size); 340 // Set allocation type in the resource object 341 DEBUG_ONLY(((ResourceObj *)res)->_allocation = RESOURCE_AREA;) 342 return res; 343 } 344 void operator delete(void* p); 345 }; 346 347 // One of the following macros must be used when allocating an array 348 // or object to determine whether it should reside in the C heap on in 349 // the resource area. 350 351 #define NEW_RESOURCE_ARRAY(type, size)\ 352 (type*) resource_allocate_bytes((size) * sizeof(type)) 353 354 #define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\ 355 (type*) resource_allocate_bytes(thread, (size) * sizeof(type)) 356 357 #define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\ 358 (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type) ) 359 360 #define FREE_RESOURCE_ARRAY(type, old, size)\ 361 resource_free_bytes((char*)(old), (size) * sizeof(type)) 362 363 #define FREE_FAST(old)\ 364 /* nop */ 365 366 #define NEW_RESOURCE_OBJ(type)\ 367 NEW_RESOURCE_ARRAY(type, 1) 368 369 #define NEW_C_HEAP_ARRAY(type, size)\ 370 (type*) (AllocateHeap((size) * sizeof(type), XSTR(type) " in " __FILE__)) 371 372 #define REALLOC_C_HEAP_ARRAY(type, old, size)\ 373 (type*) (ReallocateHeap((char*)old, (size) * sizeof(type), XSTR(type) " in " __FILE__)) 374 375 #define FREE_C_HEAP_ARRAY(type,old) \ 376 FreeHeap((char*)(old)) 377 378 #define NEW_C_HEAP_OBJ(type)\ 379 NEW_C_HEAP_ARRAY(type, 1) 380 381 extern bool warn_new_operator; 382 383 // for statistics 384 #ifndef PRODUCT 385 class AllocStats : StackObj { 386 int start_mallocs, start_frees; 387 size_t start_malloc_bytes, start_res_bytes; 388 public: 389 AllocStats(); 390 391 int num_mallocs(); // since creation of receiver 392 size_t alloc_bytes(); 393 size_t resource_bytes(); 394 int num_frees(); 395 void print(); 396 }; 397 #endif 398 399 400 //------------------------------ReallocMark--------------------------------- 401 // Code which uses REALLOC_RESOURCE_ARRAY should check an associated 402 // ReallocMark, which is declared in the same scope as the reallocated 403 // pointer. Any operation that could __potentially__ cause a reallocation 404 // should check the ReallocMark. 405 class ReallocMark: public StackObj { 406 protected: 407 NOT_PRODUCT(int _nesting;) 408 409 public: 410 ReallocMark() PRODUCT_RETURN; 411 void check() PRODUCT_RETURN; 412 }; 413