1 /*
   2  * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_MEMORY_ALLOCATION_HPP
  26 #define SHARE_VM_MEMORY_ALLOCATION_HPP
  27 
  28 #include "runtime/globals.hpp"
  29 #include "utilities/globalDefinitions.hpp"
  30 #ifdef COMPILER1
  31 #include "c1/c1_globals.hpp"
  32 #endif
  33 #ifdef COMPILER2
  34 #include "opto/c2_globals.hpp"
  35 #endif
  36 
  37 #define ARENA_ALIGN_M1 (((size_t)(ARENA_AMALLOC_ALIGNMENT)) - 1)
  38 #define ARENA_ALIGN_MASK (~((size_t)ARENA_ALIGN_M1))
  39 #define ARENA_ALIGN(x) ((((size_t)(x)) + ARENA_ALIGN_M1) & ARENA_ALIGN_MASK)
  40 
  41 // All classes in the virtual machine must be subclassed
  42 // by one of the following allocation classes:
  43 //
  44 // For objects allocated in the resource area (see resourceArea.hpp).
  45 // - ResourceObj
  46 //
  47 // For objects allocated in the C-heap (managed by: free & malloc).
  48 // - CHeapObj
  49 //
  50 // For objects allocated on the stack.
  51 // - StackObj
  52 //
  53 // For embedded objects.
  54 // - ValueObj
  55 //
  56 // For classes used as name spaces.
  57 // - AllStatic
  58 //
  59 // The printable subclasses are used for debugging and define virtual
  60 // member functions for printing. Classes that avoid allocating the
  61 // vtbl entries in the objects should therefore not be the printable
  62 // subclasses.
  63 //
  64 // The following macros and function should be used to allocate memory
  65 // directly in the resource area or in the C-heap:
  66 //
  67 //   NEW_RESOURCE_ARRAY(type,size)
  68 //   NEW_RESOURCE_OBJ(type)
  69 //   NEW_C_HEAP_ARRAY(type,size)
  70 //   NEW_C_HEAP_OBJ(type)
  71 //   char* AllocateHeap(size_t size, const char* name);
  72 //   void  FreeHeap(void* p);
  73 //
  74 // C-heap allocation can be traced using +PrintHeapAllocation.
  75 // malloc and free should therefore never called directly.
  76 
  77 // Base class for objects allocated in the C-heap.
  78 
  79 // In non product mode we introduce a super class for all allocation classes
  80 // that supports printing.
  81 // We avoid the superclass in product mode since some C++ compilers add
  82 // a word overhead for empty super classes.
  83 
  84 #ifdef PRODUCT
  85 #define ALLOCATION_SUPER_CLASS_SPEC
  86 #else
  87 #define ALLOCATION_SUPER_CLASS_SPEC : public AllocatedObj
  88 class AllocatedObj {
  89  public:
  90   // Printing support
  91   void print() const;
  92   void print_value() const;
  93 
  94   virtual void print_on(outputStream* st) const;
  95   virtual void print_value_on(outputStream* st) const;
  96 };
  97 #endif
  98 
  99 class CHeapObj ALLOCATION_SUPER_CLASS_SPEC {
 100  public:
 101   void* operator new(size_t size);
 102   void  operator delete(void* p);
 103   void* new_array(size_t size);
 104 };
 105 
 106 // Base class for objects allocated on the stack only.
 107 // Calling new or delete will result in fatal error.
 108 
 109 class StackObj ALLOCATION_SUPER_CLASS_SPEC {
 110  public:
 111   void* operator new(size_t size);
 112   void  operator delete(void* p);
 113 };
 114 
 115 // Base class for objects used as value objects.
 116 // Calling new or delete will result in fatal error.
 117 //
 118 // Portability note: Certain compilers (e.g. gcc) will
 119 // always make classes bigger if it has a superclass, even
 120 // if the superclass does not have any virtual methods or
 121 // instance fields. The HotSpot implementation relies on this
 122 // not to happen. So never make a ValueObj class a direct subclass
 123 // of this object, but use the VALUE_OBJ_CLASS_SPEC class instead, e.g.,
 124 // like this:
 125 //
 126 //   class A VALUE_OBJ_CLASS_SPEC {
 127 //     ...
 128 //   }
 129 //
 130 // With gcc and possible other compilers the VALUE_OBJ_CLASS_SPEC can
 131 // be defined as a an empty string "".
 132 //
 133 class _ValueObj {
 134  public:
 135   void* operator new(size_t size);
 136   void operator delete(void* p);
 137 };
 138 
 139 // Base class for classes that constitute name spaces.
 140 
 141 class AllStatic {
 142  public:
 143   AllStatic()  { ShouldNotCallThis(); }
 144   ~AllStatic() { ShouldNotCallThis(); }
 145 };
 146 
 147 
 148 //------------------------------Chunk------------------------------------------
 149 // Linked list of raw memory chunks
 150 class Chunk: public CHeapObj {
 151  protected:
 152   Chunk*       _next;     // Next Chunk in list
 153   const size_t _len;      // Size of this Chunk
 154  public:
 155   void* operator new(size_t size, size_t length);
 156   void  operator delete(void* p);
 157   Chunk(size_t length);
 158 
 159   enum {
 160     // default sizes; make them slightly smaller than 2**k to guard against
 161     // buddy-system style malloc implementations
 162 #ifdef _LP64
 163     slack      = 40,            // [RGV] Not sure if this is right, but make it
 164                                 //       a multiple of 8.
 165 #else
 166     slack      = 20,            // suspected sizeof(Chunk) + internal malloc headers
 167 #endif
 168 
 169     init_size  =  1*K  - slack, // Size of first chunk
 170     medium_size= 10*K  - slack, // Size of medium-sized chunk
 171     size       = 32*K  - slack, // Default size of an Arena chunk (following the first)
 172     non_pool_size = init_size + 32 // An initial size which is not one of above
 173   };
 174 
 175   void chop();                  // Chop this chunk
 176   void next_chop();             // Chop next chunk
 177   static size_t aligned_overhead_size(void) { return ARENA_ALIGN(sizeof(Chunk)); }
 178 
 179   size_t length() const         { return _len;  }
 180   Chunk* next() const           { return _next;  }
 181   void set_next(Chunk* n)       { _next = n;  }
 182   // Boundaries of data area (possibly unused)
 183   char* bottom() const          { return ((char*) this) + aligned_overhead_size();  }
 184   char* top()    const          { return bottom() + _len; }
 185   bool contains(char* p) const  { return bottom() <= p && p <= top(); }
 186 
 187   // Start the chunk_pool cleaner task
 188   static void start_chunk_pool_cleaner_task();
 189 
 190   static void clean_chunk_pool();
 191 };
 192 
 193 //------------------------------Arena------------------------------------------
 194 // Fast allocation of memory
 195 class Arena: public CHeapObj {
 196 protected:
 197   friend class ResourceMark;
 198   friend class HandleMark;
 199   friend class NoHandleMark;
 200   Chunk *_first;                // First chunk
 201   Chunk *_chunk;                // current chunk
 202   char *_hwm, *_max;            // High water mark and max in current chunk
 203   void* grow(size_t x);         // Get a new Chunk of at least size x
 204   NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing)
 205   NOT_PRODUCT(static size_t _bytes_allocated;) // total #bytes allocated since start
 206   friend class AllocStats;
 207   debug_only(void* malloc(size_t size);)
 208   debug_only(void* internal_malloc_4(size_t x);)
 209  public:
 210   Arena();
 211   Arena(size_t init_size);
 212   Arena(Arena *old);
 213   ~Arena();
 214   void  destruct_contents();
 215   char* hwm() const             { return _hwm; }
 216 
 217   // Fast allocate in the arena.  Common case is: pointer test + increment.
 218   void* Amalloc(size_t x) {
 219     assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
 220     x = ARENA_ALIGN(x);
 221     debug_only(if (UseMallocOnly) return malloc(x);)
 222     NOT_PRODUCT(_bytes_allocated += x);
 223     if (_hwm + x > _max) {
 224       return grow(x);
 225     } else {
 226       char *old = _hwm;
 227       _hwm += x;
 228       return old;
 229     }
 230   }
 231   // Further assume size is padded out to words
 232   void *Amalloc_4(size_t x) {
 233     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 234     debug_only(if (UseMallocOnly) return malloc(x);)
 235     NOT_PRODUCT(_bytes_allocated += x);
 236     if (_hwm + x > _max) {
 237       return grow(x);
 238     } else {
 239       char *old = _hwm;
 240       _hwm += x;
 241       return old;
 242     }
 243   }
 244 
 245   // Allocate with 'double' alignment. It is 8 bytes on sparc.
 246   // In other cases Amalloc_D() should be the same as Amalloc_4().
 247   void* Amalloc_D(size_t x) {
 248     assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
 249     debug_only(if (UseMallocOnly) return malloc(x);)
 250 #if defined(SPARC) && !defined(_LP64)
 251 #define DALIGN_M1 7
 252     size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
 253     x += delta;
 254 #endif
 255     NOT_PRODUCT(_bytes_allocated += x);
 256     if (_hwm + x > _max) {
 257       return grow(x); // grow() returns a result aligned >= 8 bytes.
 258     } else {
 259       char *old = _hwm;
 260       _hwm += x;
 261 #if defined(SPARC) && !defined(_LP64)
 262       old += delta; // align to 8-bytes
 263 #endif
 264       return old;
 265     }
 266   }
 267 
 268   // Fast delete in area.  Common case is: NOP (except for storage reclaimed)
 269   void Afree(void *ptr, size_t size) {
 270 #ifdef ASSERT
 271     if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory
 272     if (UseMallocOnly) return;
 273 #endif
 274     if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;
 275   }
 276 
 277   void *Arealloc( void *old_ptr, size_t old_size, size_t new_size );
 278 
 279   // Move contents of this arena into an empty arena
 280   Arena *move_contents(Arena *empty_arena);
 281 
 282   // Determine if pointer belongs to this Arena or not.
 283   bool contains( const void *ptr ) const;
 284 
 285   // Total of all chunks in use (not thread-safe)
 286   size_t used() const;
 287 
 288   // Total # of bytes used
 289   size_t size_in_bytes() const         NOT_PRODUCT({  return _size_in_bytes; }) PRODUCT_RETURN0;
 290   void set_size_in_bytes(size_t size)  NOT_PRODUCT({ _size_in_bytes = size;  }) PRODUCT_RETURN;
 291   static void free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2)  PRODUCT_RETURN;
 292   static void free_all(char** start, char** end)                                     PRODUCT_RETURN;
 293 
 294 private:
 295   // Reset this Arena to empty, access will trigger grow if necessary
 296   void   reset(void) {
 297     _first = _chunk = NULL;
 298     _hwm = _max = NULL;
 299   }
 300 };
 301 
 302 // One of the following macros must be used when allocating
 303 // an array or object from an arena
 304 #define NEW_ARENA_ARRAY(arena, type, size) \
 305   (type*) (arena)->Amalloc((size) * sizeof(type))
 306 
 307 #define REALLOC_ARENA_ARRAY(arena, type, old, old_size, new_size)    \
 308   (type*) (arena)->Arealloc((char*)(old), (old_size) * sizeof(type), \
 309                             (new_size) * sizeof(type) )
 310 
 311 #define FREE_ARENA_ARRAY(arena, type, old, size) \
 312   (arena)->Afree((char*)(old), (size) * sizeof(type))
 313 
 314 #define NEW_ARENA_OBJ(arena, type) \
 315   NEW_ARENA_ARRAY(arena, type, 1)
 316 
 317 
 318 //%note allocation_1
 319 extern char* resource_allocate_bytes(size_t size);
 320 extern char* resource_allocate_bytes(Thread* thread, size_t size);
 321 extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size);
 322 extern void resource_free_bytes( char *old, size_t size );
 323 
 324 //----------------------------------------------------------------------
 325 // Base class for objects allocated in the resource area per default.
 326 // Optionally, objects may be allocated on the C heap with
 327 // new(ResourceObj::C_HEAP) Foo(...) or in an Arena with new (&arena)
 328 // ResourceObj's can be allocated within other objects, but don't use
 329 // new or delete (allocation_type is unknown).  If new is used to allocate,
 330 // use delete to deallocate.
 331 class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {
 332  public:
 333   enum allocation_type { STACK_OR_EMBEDDED = 0, RESOURCE_AREA, C_HEAP, ARENA, allocation_mask = 0x3 };
 334   static void set_allocation_type(address res, allocation_type type) NOT_DEBUG_RETURN;
 335 #ifdef ASSERT
 336  private:
 337   // When this object is allocated on stack the new() operator is not
 338   // called but garbage on stack may look like a valid allocation_type.
 339   // Store negated 'this' pointer when new() is called to distinguish cases.
 340   uintptr_t _allocation;
 341  public:
 342   allocation_type get_allocation_type() const;
 343   bool allocated_on_stack()    const { return get_allocation_type() == STACK_OR_EMBEDDED; }
 344   bool allocated_on_res_area() const { return get_allocation_type() == RESOURCE_AREA; }
 345   bool allocated_on_C_heap()   const { return get_allocation_type() == C_HEAP; }
 346   bool allocated_on_arena()    const { return get_allocation_type() == ARENA; }
 347   ResourceObj(); // default construtor
 348   ResourceObj(const ResourceObj& r); // default copy construtor
 349   ResourceObj& operator=(const ResourceObj& r); // default copy assignment
 350   ~ResourceObj();
 351 #endif // ASSERT
 352 
 353  public:
 354   void* operator new(size_t size, allocation_type type);
 355   void* operator new(size_t size, Arena *arena) {
 356       address res = (address)arena->Amalloc(size);
 357       DEBUG_ONLY(set_allocation_type(res, ARENA);)
 358       return res;
 359   }
 360   void* operator new(size_t size) {
 361       address res = (address)resource_allocate_bytes(size);
 362       DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)
 363       return res;
 364   }
 365   void  operator delete(void* p);
 366 };
 367 
 368 // One of the following macros must be used when allocating an array
 369 // or object to determine whether it should reside in the C heap on in
 370 // the resource area.
 371 
 372 #define NEW_RESOURCE_ARRAY(type, size)\
 373   (type*) resource_allocate_bytes((size) * sizeof(type))
 374 
 375 #define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\
 376   (type*) resource_allocate_bytes(thread, (size) * sizeof(type))
 377 
 378 #define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\
 379   (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type) )
 380 
 381 #define FREE_RESOURCE_ARRAY(type, old, size)\
 382   resource_free_bytes((char*)(old), (size) * sizeof(type))
 383 
 384 #define FREE_FAST(old)\
 385     /* nop */
 386 
 387 #define NEW_RESOURCE_OBJ(type)\
 388   NEW_RESOURCE_ARRAY(type, 1)
 389 
 390 #define NEW_C_HEAP_ARRAY(type, size)\
 391   (type*) (AllocateHeap((size) * sizeof(type), XSTR(type) " in " __FILE__))
 392 
 393 #define REALLOC_C_HEAP_ARRAY(type, old, size)\
 394   (type*) (ReallocateHeap((char*)old, (size) * sizeof(type), XSTR(type) " in " __FILE__))
 395 
 396 #define FREE_C_HEAP_ARRAY(type,old) \
 397   FreeHeap((char*)(old))
 398 
 399 #define NEW_C_HEAP_OBJ(type)\
 400   NEW_C_HEAP_ARRAY(type, 1)
 401 
 402 extern bool warn_new_operator;
 403 
 404 // for statistics
 405 #ifndef PRODUCT
 406 class AllocStats : StackObj {
 407   int    start_mallocs, start_frees;
 408   size_t start_malloc_bytes, start_res_bytes;
 409  public:
 410   AllocStats();
 411 
 412   int    num_mallocs();    // since creation of receiver
 413   size_t alloc_bytes();
 414   size_t resource_bytes();
 415   int    num_frees();
 416   void   print();
 417 };
 418 #endif
 419 
 420 
 421 //------------------------------ReallocMark---------------------------------
 422 // Code which uses REALLOC_RESOURCE_ARRAY should check an associated
 423 // ReallocMark, which is declared in the same scope as the reallocated
 424 // pointer.  Any operation that could __potentially__ cause a reallocation
 425 // should check the ReallocMark.
 426 class ReallocMark: public StackObj {
 427 protected:
 428   NOT_PRODUCT(int _nesting;)
 429 
 430 public:
 431   ReallocMark()   PRODUCT_RETURN;
 432   void check()    PRODUCT_RETURN;
 433 };
 434 
 435 #endif // SHARE_VM_MEMORY_ALLOCATION_HPP