1 /*
   2  * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
  26 #define SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
  27 
  28 #include "runtime/atomic.hpp"
  29 #include "runtime/os.hpp"
  30 #include "services/memTracker.hpp"
  31 #include "utilities/globalDefinitions.hpp"
  32 
  33 // Explicit C-heap memory management
  34 
  35 void trace_heap_malloc(size_t size, const char* name, void *p);
  36 void trace_heap_free(void *p);
  37 
  38 #ifndef PRODUCT
  39 // Increments unsigned long value for statistics (not atomic on MP).
  40 inline void inc_stat_counter(volatile julong* dest, julong add_value) {
  41 #if defined(SPARC) || defined(X86)
  42   // Sparc and X86 have atomic jlong (8 bytes) instructions
  43   julong value = Atomic::load((volatile jlong*)dest);
  44   value += add_value;
  45   Atomic::store((jlong)value, (volatile jlong*)dest);
  46 #else
  47   // possible word-tearing during load/store
  48   *dest += add_value;
  49 #endif
  50 }
  51 #endif
  52 
  53 // allocate using malloc; will fail if no memory available
  54 inline char* AllocateHeap(size_t size, MEMFLAGS flags,
  55     const NativeCallStack& stack,
  56     AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
  57   char* p = (char*) os::malloc(size, flags, stack);
  58   #ifdef ASSERT
  59   if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p);
  60   #endif
  61   if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
  62     vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "AllocateHeap");
  63   }
  64   return p;
  65 }
  66 
  67 ALWAYSINLINE char* AllocateHeap(size_t size, MEMFLAGS flags,
  68     AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
  69   return AllocateHeap(size, flags, CURRENT_PC, alloc_failmode);
  70 }
  71 
  72 ALWAYSINLINE char* ReallocateHeap(char *old, size_t size, MEMFLAGS flag,
  73     AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
  74   char* p = (char*) os::realloc(old, size, flag, CURRENT_PC);
  75   #ifdef ASSERT
  76   if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p);
  77   #endif
  78   if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
  79     vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "ReallocateHeap");
  80   }
  81   return p;
  82 }
  83 
  84 inline void FreeHeap(void* p) {
  85   #ifdef ASSERT
  86   if (PrintMallocFree) trace_heap_free(p);
  87   #endif
  88   os::free(p);
  89 }
  90 
  91 
  92 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size,
  93       const NativeCallStack& stack) throw() {
  94   void* p = (void*)AllocateHeap(size, F, stack);
  95 #ifdef ASSERT
  96   if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
  97 #endif
  98   return p;
  99 }
 100 
 101 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size) throw() {
 102   return CHeapObj<F>::operator new(size, CALLER_PC);
 103 }
 104 
 105 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
 106   const std::nothrow_t&  nothrow_constant, const NativeCallStack& stack) throw() {
 107   void* p = (void*)AllocateHeap(size, F, stack,
 108       AllocFailStrategy::RETURN_NULL);
 109 #ifdef ASSERT
 110     if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
 111 #endif
 112     return p;
 113   }
 114 
 115 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
 116   const std::nothrow_t& nothrow_constant) throw() {
 117   return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
 118 }
 119 
 120 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
 121       const NativeCallStack& stack) throw() {
 122   return CHeapObj<F>::operator new(size, stack);
 123 }
 124 
 125 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size)
 126   throw() {
 127   return CHeapObj<F>::operator new(size, CALLER_PC);
 128 }
 129 
 130 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
 131   const std::nothrow_t&  nothrow_constant, const NativeCallStack& stack) throw() {
 132   return CHeapObj<F>::operator new(size, nothrow_constant, stack);
 133 }
 134 
 135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
 136   const std::nothrow_t& nothrow_constant) throw() {
 137   return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
 138 }
 139 
 140 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
 141     FreeHeap(p);
 142 }
 143 
 144 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
 145     FreeHeap(p);
 146 }
 147 
 148 template <class E, MEMFLAGS F>
 149 size_t MmapArrayAllocator<E, F>::size_for(size_t length) {
 150   size_t size = length * sizeof(E);
 151   int alignment = os::vm_allocation_granularity();
 152   return align_size_up(size, alignment);
 153 }
 154 
 155 template <class E, MEMFLAGS F>
 156 E* MmapArrayAllocator<E, F>::allocate_or_null(size_t length) {
 157   size_t size = size_for(length);
 158   int alignment = os::vm_allocation_granularity();
 159 
 160   char* addr = os::reserve_memory(size, NULL, alignment, F);
 161   if (addr == NULL) {
 162     return NULL;
 163   }
 164 
 165   if (os::commit_memory(addr, size, !ExecMem, "Allocator (commit)")) {
 166     return (E*)addr;
 167   } else {
 168     os::release_memory(addr, size);
 169     return NULL;
 170   }
 171 }
 172 
 173 template <class E, MEMFLAGS F>
 174 E* MmapArrayAllocator<E, F>::allocate(size_t length) {
 175   size_t size = size_for(length);
 176   int alignment = os::vm_allocation_granularity();
 177 
 178   char* addr = os::reserve_memory(size, NULL, alignment, F);
 179   if (addr == NULL) {
 180     vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
 181   }
 182 
 183   os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
 184 
 185   return (E*)addr;
 186 }
 187 
 188 template <class E, MEMFLAGS F>
 189 void MmapArrayAllocator<E, F>::free(E* addr, size_t length) {
 190   bool result = os::release_memory((char*)addr, size_for(length));
 191   assert(result, "Failed to release memory");
 192 }
 193 
 194 template <class E, MEMFLAGS F>
 195 size_t MallocArrayAllocator<E, F>::size_for(size_t length) {
 196   return length * sizeof(E);
 197 }
 198 
 199 template <class E, MEMFLAGS F>
 200 E* MallocArrayAllocator<E, F>::allocate(size_t length) {
 201   return (E*)AllocateHeap(size_for(length), F);
 202 }
 203 
 204 template<class E, MEMFLAGS F>
 205 void MallocArrayAllocator<E, F>::free(E* addr, size_t /*length*/) {
 206   FreeHeap(addr);
 207 }
 208 
 209 template <class E, MEMFLAGS F>
 210 bool ArrayAllocator<E, F>::should_use_malloc(size_t length) {
 211   return MallocArrayAllocator<E, F>::size_for(length) < ArrayAllocatorMallocLimit;
 212 }
 213 
 214 template <class E, MEMFLAGS F>
 215 E* ArrayAllocator<E, F>::allocate_malloc(size_t length) {
 216   return MallocArrayAllocator<E, F>::allocate(length);
 217 }
 218 
 219 template <class E, MEMFLAGS F>
 220 E* ArrayAllocator<E, F>::allocate_mmap(size_t length) {
 221   return MmapArrayAllocator<E, F>::allocate(length);
 222 }
 223 
 224 template <class E, MEMFLAGS F>
 225 E* ArrayAllocator<E, F>::allocate(size_t length) {
 226   if (should_use_malloc(length)) {
 227     return allocate_malloc(length);
 228   }
 229 
 230   return allocate_mmap(length);
 231 }
 232 
 233 template <class E, MEMFLAGS F>
 234 E* ArrayAllocator<E, F>::reallocate(E* old_addr, size_t old_length, size_t new_length) {
 235   E* new_addr = (new_length > 0)
 236       ? allocate(new_length)
 237       : NULL;
 238 
 239   if (new_addr != NULL && old_addr != NULL) {
 240     memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
 241   }
 242 
 243   if (old_addr != NULL) {
 244     free(old_addr, old_length);
 245   }
 246 
 247   return new_addr;
 248 }
 249 
 250 template<class E, MEMFLAGS F>
 251 void ArrayAllocator<E, F>::free_malloc(E* addr, size_t length) {
 252   MallocArrayAllocator<E, F>::free(addr, length);
 253 }
 254 
 255 template<class E, MEMFLAGS F>
 256 void ArrayAllocator<E, F>::free_mmap(E* addr, size_t length) {
 257   MmapArrayAllocator<E, F>::free(addr, length);
 258 }
 259 
 260 template<class E, MEMFLAGS F>
 261 void ArrayAllocator<E, F>::free(E* addr, size_t length) {
 262   if (addr != NULL) {
 263     if (should_use_malloc(length)) {
 264       free_malloc(addr, length);
 265     } else {
 266       free_mmap(addr, length);
 267     }
 268   }
 269 }
 270 
 271 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP