src/share/vm/memory/allocation.hpp

Print this page




 389   friend class ResourceMark;
 390   friend class HandleMark;
 391   friend class NoHandleMark;
 392   friend class VMStructs;
 393 
 394   Chunk *_first;                // First chunk
 395   Chunk *_chunk;                // current chunk
 396   char *_hwm, *_max;            // High water mark and max in current chunk
 397   // Get a new Chunk of at least size x
 398   void* grow(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
 399   size_t _size_in_bytes;        // Size of arena (used for native memory tracking)
 400 
 401   NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
 402   friend class AllocStats;
 403   debug_only(void* malloc(size_t size);)
 404   debug_only(void* internal_malloc_4(size_t x);)
 405   NOT_PRODUCT(void inc_bytes_allocated(size_t x);)
 406 
 407   void signal_out_of_memory(size_t request, const char* whence) const;
 408 
 409   bool check_for_overflow(size_t request, const char* whence,
 410       AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) const {
 411     if (UINTPTR_MAX - request < (uintptr_t)_hwm) {
 412       if (alloc_failmode == AllocFailStrategy::RETURN_NULL) {
 413         return false;
 414       }
 415       signal_out_of_memory(request, whence);
 416     }
 417     return true;
 418  }
 419 
 420  public:
 421   Arena();
 422   Arena(size_t init_size);
 423   ~Arena();
 424   void  destruct_contents();
 425   char* hwm() const             { return _hwm; }
 426 
 427   // new operators
 428   void* operator new (size_t size) throw();
 429   void* operator new (size_t size, const std::nothrow_t& nothrow_constant) throw();
 430 
 431   // dynamic memory type tagging
 432   void* operator new(size_t size, MEMFLAGS flags) throw();
 433   void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags) throw();
 434   void  operator delete(void* p);
 435 
 436   // Fast allocate in the arena.  Common case is: pointer test + increment.
 437   void* Amalloc(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
 438     assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
 439     x = ARENA_ALIGN(x);
 440     debug_only(if (UseMallocOnly) return malloc(x);)
 441     if (!check_for_overflow(x, "Arena::Amalloc", alloc_failmode))
 442       return NULL;
 443     NOT_PRODUCT(inc_bytes_allocated(x);)
 444     if (_hwm + x > _max) {
 445       return grow(x, alloc_failmode);
 446     } else {
 447       char *old = _hwm;
 448       _hwm += x;
 449       return old;
 450     }
 451   }
 452   // Further assume size is padded out to words
 453   void *Amalloc_4(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
 454     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 455     debug_only(if (UseMallocOnly) return malloc(x);)
 456     if (!check_for_overflow(x, "Arena::Amalloc_4", alloc_failmode))
 457       return NULL;
 458     NOT_PRODUCT(inc_bytes_allocated(x);)
 459     if (_hwm + x > _max) {
 460       return grow(x, alloc_failmode);
 461     } else {
 462       char *old = _hwm;
 463       _hwm += x;
 464       return old;
 465     }
 466   }
 467 
 468   // Allocate with 'double' alignment. It is 8 bytes on sparc.
 469   // In other cases Amalloc_D() should be the same as Amalloc_4().
 470   void* Amalloc_D(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
 471     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 472     debug_only(if (UseMallocOnly) return malloc(x);)
 473 #if defined(SPARC) && !defined(_LP64)
 474 #define DALIGN_M1 7
 475     size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
 476     x += delta;
 477 #endif
 478     if (!check_for_overflow(x, "Arena::Amalloc_D", alloc_failmode))
 479       return NULL;
 480     NOT_PRODUCT(inc_bytes_allocated(x);)
 481     if (_hwm + x > _max) {
 482       return grow(x, alloc_failmode); // grow() returns a result aligned >= 8 bytes.
 483     } else {
 484       char *old = _hwm;
 485       _hwm += x;
 486 #if defined(SPARC) && !defined(_LP64)
 487       old += delta; // align to 8-bytes
 488 #endif
 489       return old;
 490     }
 491   }
 492 
 493   // Fast delete in area.  Common case is: NOP (except for storage reclaimed)
 494   void Afree(void *ptr, size_t size) {
 495 #ifdef ASSERT
 496     if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory
 497     if (UseMallocOnly) return;
 498 #endif
 499     if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;




 389   friend class ResourceMark;
 390   friend class HandleMark;
 391   friend class NoHandleMark;
 392   friend class VMStructs;
 393 
 394   Chunk *_first;                // First chunk
 395   Chunk *_chunk;                // current chunk
 396   char *_hwm, *_max;            // High water mark and max in current chunk
 397   // Get a new Chunk of at least size x
 398   void* grow(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
 399   size_t _size_in_bytes;        // Size of arena (used for native memory tracking)
 400 
 401   NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
 402   friend class AllocStats;
 403   debug_only(void* malloc(size_t size);)
 404   debug_only(void* internal_malloc_4(size_t x);)
 405   NOT_PRODUCT(void inc_bytes_allocated(size_t x);)
 406 
 407   void signal_out_of_memory(size_t request, const char* whence) const;
 408 
 409   void check_for_overflow(size_t request, const char* whence) const {

 410     if (UINTPTR_MAX - request < (uintptr_t)_hwm) {



 411       signal_out_of_memory(request, whence);
 412     }

 413  }
 414 
 415  public:
 416   Arena();
 417   Arena(size_t init_size);
 418   ~Arena();
 419   void  destruct_contents();
 420   char* hwm() const             { return _hwm; }
 421 
 422   // new operators
 423   void* operator new (size_t size) throw();
 424   void* operator new (size_t size, const std::nothrow_t& nothrow_constant) throw();
 425 
 426   // dynamic memory type tagging
 427   void* operator new(size_t size, MEMFLAGS flags) throw();
 428   void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags) throw();
 429   void  operator delete(void* p);
 430 
 431   // Fast allocate in the arena.  Common case is: pointer test + increment.
 432   void* Amalloc(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
 433     assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
 434     x = ARENA_ALIGN(x);
 435     debug_only(if (UseMallocOnly) return malloc(x);)
 436     check_for_overflow(x, "Arena::Amalloc");

 437     NOT_PRODUCT(inc_bytes_allocated(x);)
 438     if (_hwm + x > _max) {
 439       return grow(x, alloc_failmode);
 440     } else {
 441       char *old = _hwm;
 442       _hwm += x;
 443       return old;
 444     }
 445   }
 446   // Further assume size is padded out to words
 447   void *Amalloc_4(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
 448     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 449     debug_only(if (UseMallocOnly) return malloc(x);)
 450     check_for_overflow(x, "Arena::Amalloc_4");

 451     NOT_PRODUCT(inc_bytes_allocated(x);)
 452     if (_hwm + x > _max) {
 453       return grow(x, alloc_failmode);
 454     } else {
 455       char *old = _hwm;
 456       _hwm += x;
 457       return old;
 458     }
 459   }
 460 
 461   // Allocate with 'double' alignment. It is 8 bytes on sparc.
 462   // In other cases Amalloc_D() should be the same as Amalloc_4().
 463   void* Amalloc_D(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
 464     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 465     debug_only(if (UseMallocOnly) return malloc(x);)
 466 #if defined(SPARC) && !defined(_LP64)
 467 #define DALIGN_M1 7
 468     size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
 469     x += delta;
 470 #endif
 471     check_for_overflow(x, "Arena::Amalloc_D");

 472     NOT_PRODUCT(inc_bytes_allocated(x);)
 473     if (_hwm + x > _max) {
 474       return grow(x, alloc_failmode); // grow() returns a result aligned >= 8 bytes.
 475     } else {
 476       char *old = _hwm;
 477       _hwm += x;
 478 #if defined(SPARC) && !defined(_LP64)
 479       old += delta; // align to 8-bytes
 480 #endif
 481       return old;
 482     }
 483   }
 484 
 485   // Fast delete in area.  Common case is: NOP (except for storage reclaimed)
 486   void Afree(void *ptr, size_t size) {
 487 #ifdef ASSERT
 488     if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory
 489     if (UseMallocOnly) return;
 490 #endif
 491     if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;