src/share/vm/memory/allocation.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7017124 Sdiff src/share/vm/memory

src/share/vm/memory/allocation.hpp

Print this page




 185   bool contains(char* p) const  { return bottom() <= p && p <= top(); }
 186 
 187   // Start the chunk_pool cleaner task
 188   static void start_chunk_pool_cleaner_task();
 189 
 190   static void clean_chunk_pool();
 191 };
 192 
 193 //------------------------------Arena------------------------------------------
 194 // Fast allocation of memory
 195 class Arena: public CHeapObj {
 196 protected:
 197   friend class ResourceMark;
 198   friend class HandleMark;
 199   friend class NoHandleMark;
 200   Chunk *_first;                // First chunk
 201   Chunk *_chunk;                // current chunk
 202   char *_hwm, *_max;            // High water mark and max in current chunk
 203   void* grow(size_t x);         // Get a new Chunk of at least size x
 204   NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing)
 205   NOT_PRODUCT(static size_t _bytes_allocated;) // total #bytes allocated since start
 206   friend class AllocStats;
 207   debug_only(void* malloc(size_t size);)
 208   debug_only(void* internal_malloc_4(size_t x);)

 209  public:
 210   Arena();
 211   Arena(size_t init_size);
 212   Arena(Arena *old);
 213   ~Arena();
 214   void  destruct_contents();
 215   char* hwm() const             { return _hwm; }
 216 
 217   // Fast allocate in the arena.  Common case is: pointer test + increment.
 218   void* Amalloc(size_t x) {
 219     assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
 220     x = ARENA_ALIGN(x);
 221     debug_only(if (UseMallocOnly) return malloc(x);)
 222     NOT_PRODUCT(_bytes_allocated += x);
 223     if (_hwm + x > _max) {
 224       return grow(x);
 225     } else {
 226       char *old = _hwm;
 227       _hwm += x;
 228       return old;
 229     }
 230   }
 231   // Further assume size is padded out to words
 232   void *Amalloc_4(size_t x) {
 233     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 234     debug_only(if (UseMallocOnly) return malloc(x);)
 235     NOT_PRODUCT(_bytes_allocated += x);
 236     if (_hwm + x > _max) {
 237       return grow(x);
 238     } else {
 239       char *old = _hwm;
 240       _hwm += x;
 241       return old;
 242     }
 243   }
 244 
 245   // Allocate with 'double' alignment. It is 8 bytes on sparc.
 246   // In other cases Amalloc_D() should be the same as Amalloc_4().
 247   void* Amalloc_D(size_t x) {
 248     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 249     debug_only(if (UseMallocOnly) return malloc(x);)
 250 #if defined(SPARC) && !defined(_LP64)
 251 #define DALIGN_M1 7
 252     size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
 253     x += delta;
 254 #endif
 255     NOT_PRODUCT(_bytes_allocated += x);
 256     if (_hwm + x > _max) {
 257       return grow(x); // grow() returns a result aligned >= 8 bytes.
 258     } else {
 259       char *old = _hwm;
 260       _hwm += x;
 261 #if defined(SPARC) && !defined(_LP64)
 262       old += delta; // align to 8-bytes
 263 #endif
 264       return old;
 265     }
 266   }
 267 
 268   // Fast delete in area.  Common case is: NOP (except for storage reclaimed)
 269   void Afree(void *ptr, size_t size) {
 270 #ifdef ASSERT
 271     if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory
 272     if (UseMallocOnly) return;
 273 #endif
 274     if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;
 275   }


 389 #define NEW_RESOURCE_OBJ(type)\
 390   NEW_RESOURCE_ARRAY(type, 1)
 391 
 392 #define NEW_C_HEAP_ARRAY(type, size)\
 393   (type*) (AllocateHeap((size) * sizeof(type), XSTR(type) " in " __FILE__))
 394 
 395 #define REALLOC_C_HEAP_ARRAY(type, old, size)\
 396   (type*) (ReallocateHeap((char*)old, (size) * sizeof(type), XSTR(type) " in " __FILE__))
 397 
 398 #define FREE_C_HEAP_ARRAY(type,old) \
 399   FreeHeap((char*)(old))
 400 
 401 #define NEW_C_HEAP_OBJ(type)\
 402   NEW_C_HEAP_ARRAY(type, 1)
 403 
 404 extern bool warn_new_operator;
 405 
 406 // for statistics
 407 #ifndef PRODUCT
 408 class AllocStats : StackObj {
 409   int    start_mallocs, start_frees;
 410   size_t start_malloc_bytes, start_res_bytes;
 411  public:
 412   AllocStats();
 413 
 414   int    num_mallocs();    // since creation of receiver
 415   size_t alloc_bytes();
 416   size_t resource_bytes();
 417   int    num_frees();

 418   void   print();
 419 };
 420 #endif
 421 
 422 
 423 //------------------------------ReallocMark---------------------------------
 424 // Code which uses REALLOC_RESOURCE_ARRAY should check an associated
 425 // ReallocMark, which is declared in the same scope as the reallocated
 426 // pointer.  Any operation that could __potentially__ cause a reallocation
 427 // should check the ReallocMark.
 428 class ReallocMark: public StackObj {
 429 protected:
 430   NOT_PRODUCT(int _nesting;)
 431 
 432 public:
 433   ReallocMark()   PRODUCT_RETURN;
 434   void check()    PRODUCT_RETURN;
 435 };
 436 
 437 #endif // SHARE_VM_MEMORY_ALLOCATION_HPP


 185   bool contains(char* p) const  { return bottom() <= p && p <= top(); }
 186 
 187   // Start the chunk_pool cleaner task
 188   static void start_chunk_pool_cleaner_task();
 189 
 190   static void clean_chunk_pool();
 191 };
 192 
 193 //------------------------------Arena------------------------------------------
 194 // Fast allocation of memory
 195 class Arena: public CHeapObj {
 196 protected:
 197   friend class ResourceMark;
 198   friend class HandleMark;
 199   friend class NoHandleMark;
 200   Chunk *_first;                // First chunk
 201   Chunk *_chunk;                // current chunk
 202   char *_hwm, *_max;            // High water mark and max in current chunk
 203   void* grow(size_t x);         // Get a new Chunk of at least size x
 204   NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing)
 205   NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
 206   friend class AllocStats;
 207   debug_only(void* malloc(size_t size);)
 208   debug_only(void* internal_malloc_4(size_t x);)
 209   NOT_PRODUCT(void inc_bytes_allocated(size_t x);)
 210  public:
 211   Arena();
 212   Arena(size_t init_size);
 213   Arena(Arena *old);
 214   ~Arena();
 215   void  destruct_contents();
 216   char* hwm() const             { return _hwm; }
 217 
 218   // Fast allocate in the arena.  Common case is: pointer test + increment.
 219   void* Amalloc(size_t x) {
 220     assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
 221     x = ARENA_ALIGN(x);
 222     debug_only(if (UseMallocOnly) return malloc(x);)
 223     NOT_PRODUCT(inc_bytes_allocated(x);)
 224     if (_hwm + x > _max) {
 225       return grow(x);
 226     } else {
 227       char *old = _hwm;
 228       _hwm += x;
 229       return old;
 230     }
 231   }
 232   // Further assume size is padded out to words
 233   void *Amalloc_4(size_t x) {
 234     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 235     debug_only(if (UseMallocOnly) return malloc(x);)
 236     NOT_PRODUCT(inc_bytes_allocated(x);)
 237     if (_hwm + x > _max) {
 238       return grow(x);
 239     } else {
 240       char *old = _hwm;
 241       _hwm += x;
 242       return old;
 243     }
 244   }
 245 
 246   // Allocate with 'double' alignment. It is 8 bytes on sparc.
 247   // In other cases Amalloc_D() should be the same as Amalloc_4().
 248   void* Amalloc_D(size_t x) {
 249     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 250     debug_only(if (UseMallocOnly) return malloc(x);)
 251 #if defined(SPARC) && !defined(_LP64)
 252 #define DALIGN_M1 7
 253     size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
 254     x += delta;
 255 #endif
 256     NOT_PRODUCT(inc_bytes_allocated(x);)
 257     if (_hwm + x > _max) {
 258       return grow(x); // grow() returns a result aligned >= 8 bytes.
 259     } else {
 260       char *old = _hwm;
 261       _hwm += x;
 262 #if defined(SPARC) && !defined(_LP64)
 263       old += delta; // align to 8-bytes
 264 #endif
 265       return old;
 266     }
 267   }
 268 
 269   // Fast delete in area.  Common case is: NOP (except for storage reclaimed)
 270   void Afree(void *ptr, size_t size) {
 271 #ifdef ASSERT
 272     if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory
 273     if (UseMallocOnly) return;
 274 #endif
 275     if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;
 276   }


 390 #define NEW_RESOURCE_OBJ(type)\
 391   NEW_RESOURCE_ARRAY(type, 1)
 392 
 393 #define NEW_C_HEAP_ARRAY(type, size)\
 394   (type*) (AllocateHeap((size) * sizeof(type), XSTR(type) " in " __FILE__))
 395 
 396 #define REALLOC_C_HEAP_ARRAY(type, old, size)\
 397   (type*) (ReallocateHeap((char*)old, (size) * sizeof(type), XSTR(type) " in " __FILE__))
 398 
 399 #define FREE_C_HEAP_ARRAY(type,old) \
 400   FreeHeap((char*)(old))
 401 
 402 #define NEW_C_HEAP_OBJ(type)\
 403   NEW_C_HEAP_ARRAY(type, 1)
 404 
 405 extern bool warn_new_operator;
 406 
 407 // for statistics
 408 #ifndef PRODUCT
 409 class AllocStats : StackObj {
 410   julong start_mallocs, start_frees;
 411   julong start_malloc_bytes, start_mfree_bytes, start_res_bytes;
 412  public:
 413   AllocStats();
 414 
 415   julong num_mallocs();    // since creation of receiver
 416   julong alloc_bytes();
 417   julong num_frees();
 418   julong free_bytes();
 419   julong resource_bytes();
 420   void   print();
 421 };
 422 #endif
 423 
 424 
 425 //------------------------------ReallocMark---------------------------------
 426 // Code which uses REALLOC_RESOURCE_ARRAY should check an associated
 427 // ReallocMark, which is declared in the same scope as the reallocated
 428 // pointer.  Any operation that could __potentially__ cause a reallocation
 429 // should check the ReallocMark.
 430 class ReallocMark: public StackObj {
 431 protected:
 432   NOT_PRODUCT(int _nesting;)
 433 
 434 public:
 435   ReallocMark()   PRODUCT_RETURN;
 436   void check()    PRODUCT_RETURN;
 437 };
 438 
 439 #endif // SHARE_VM_MEMORY_ALLOCATION_HPP
src/share/vm/memory/allocation.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File