1 /* 2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP 26 #define SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP 27 28 #include "runtime/atomic.hpp" 29 #include "runtime/os.hpp" 30 #include "services/memTracker.hpp" 31 #include "utilities/globalDefinitions.hpp" 32 33 // Explicit C-heap memory management 34 35 void trace_heap_malloc(size_t size, const char* name, void *p); 36 void trace_heap_free(void *p); 37 38 #ifndef PRODUCT 39 // Increments unsigned long value for statistics (not atomic on MP). 40 inline void inc_stat_counter(volatile julong* dest, julong add_value) { 41 #if defined(SPARC) || defined(X86) 42 // Sparc and X86 have atomic jlong (8 bytes) instructions 43 julong value = Atomic::load((volatile jlong*)dest); 44 value += add_value; 45 Atomic::store((jlong)value, (volatile jlong*)dest); 46 #else 47 // possible word-tearing during load/store 48 *dest += add_value; 49 #endif 50 } 51 #endif 52 53 // allocate using malloc; will fail if no memory available 54 inline char* AllocateHeap(size_t size, MEMFLAGS flags, 55 const NativeCallStack& stack, 56 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) { 57 char* p = (char*) os::malloc(size, flags, stack); 58 #ifdef ASSERT 59 if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p); 60 #endif 61 if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) { 62 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "AllocateHeap"); 63 } 64 return p; 65 } 66 67 ALWAYSINLINE char* AllocateHeap(size_t size, MEMFLAGS flags, 68 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) { 69 return AllocateHeap(size, flags, CURRENT_PC, alloc_failmode); 70 } 71 72 ALWAYSINLINE char* ReallocateHeap(char *old, size_t size, MEMFLAGS flag, 73 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) { 74 char* p = (char*) os::realloc(old, size, flag, CURRENT_PC); 75 #ifdef ASSERT 76 if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p); 77 #endif 78 if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) { 79 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "ReallocateHeap"); 80 } 81 return p; 82 } 83 84 inline void FreeHeap(void* p) { 85 #ifdef ASSERT 86 if (PrintMallocFree) trace_heap_free(p); 87 #endif 88 os::free(p); 89 } 90 91 92 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size, 93 const NativeCallStack& stack) throw() { 94 void* p = (void*)AllocateHeap(size, F, stack); 95 #ifdef ASSERT 96 if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p); 97 #endif 98 return p; 99 } 100 101 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size) throw() { 102 return CHeapObj<F>::operator new(size, CALLER_PC); 103 } 104 105 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size, 106 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() { 107 void* p = (void*)AllocateHeap(size, F, stack, 108 AllocFailStrategy::RETURN_NULL); 109 #ifdef ASSERT 110 if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p); 111 #endif 112 return p; 113 } 114 115 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size, 116 const std::nothrow_t& nothrow_constant) throw() { 117 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC); 118 } 119 120 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size, 121 const NativeCallStack& stack) throw() { 122 return CHeapObj<F>::operator new(size, stack); 123 } 124 125 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size) 126 throw() { 127 return CHeapObj<F>::operator new(size, CALLER_PC); 128 } 129 130 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size, 131 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() { 132 return CHeapObj<F>::operator new(size, nothrow_constant, stack); 133 } 134 135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size, 136 const std::nothrow_t& nothrow_constant) throw() { 137 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC); 138 } 139 140 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){ 141 FreeHeap(p); 142 } 143 144 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){ 145 FreeHeap(p); 146 } 147 148 template <class E, MEMFLAGS F> 149 size_t MmapArrayAllocator<E, F>::size_for(size_t length) { 150 size_t size = length * sizeof(E); 151 int alignment = os::vm_allocation_granularity(); 152 return align_size_up(size, alignment); 153 } 154 155 template <class E, MEMFLAGS F> 156 E* MmapArrayAllocator<E, F>::allocate(size_t length) { 157 size_t size = size_for(length); 158 int alignment = os::vm_allocation_granularity(); 159 160 char* addr = os::reserve_memory(size, NULL, alignment, F); 161 if (addr == NULL) { 162 vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)"); 163 } 164 165 os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)"); 166 167 return (E*)addr; 168 } 169 170 template <class E, MEMFLAGS F> 171 void MmapArrayAllocator<E, F>::free(E* addr, size_t length) { 172 bool result = os::release_memory((char*)addr, size_for(length)); 173 assert(result, "Failed to release memory"); 174 } 175 176 template <class E, MEMFLAGS F> 177 size_t MallocArrayAllocator<E, F>::size_for(size_t length) { 178 return length * sizeof(E); 179 } 180 181 template <class E, MEMFLAGS F> 182 E* MallocArrayAllocator<E, F>::allocate(size_t length) { 183 return (E*)AllocateHeap(size_for(length), F); 184 } 185 186 template<class E, MEMFLAGS F> 187 void MallocArrayAllocator<E, F>::free(E* addr, size_t /*length*/) { 188 FreeHeap(addr); 189 } 190 191 template <class E, MEMFLAGS F> 192 bool ArrayAllocator<E, F>::should_use_malloc(size_t length) { 193 return MallocArrayAllocator<E, F>::size_for(length) < ArrayAllocatorMallocLimit; 194 } 195 196 template <class E, MEMFLAGS F> 197 E* ArrayAllocator<E, F>::allocate_malloc(size_t length) { 198 return MallocArrayAllocator<E, F>::allocate(length); 199 } 200 201 template <class E, MEMFLAGS F> 202 E* ArrayAllocator<E, F>::allocate_mmap(size_t length) { 203 return MmapArrayAllocator<E, F>::allocate(length); 204 } 205 206 template <class E, MEMFLAGS F> 207 E* ArrayAllocator<E, F>::allocate(size_t length) { 208 if (should_use_malloc(length)) { 209 return allocate_malloc(length); 210 } 211 212 return allocate_mmap(length); 213 } 214 215 template <class E, MEMFLAGS F> 216 E* ArrayAllocator<E, F>::reallocate(E* old_addr, size_t old_length, size_t new_length) { 217 E* new_addr = (new_length > 0) 218 ? allocate(new_length) 219 : NULL; 220 221 if (new_addr != NULL && old_addr != NULL) { 222 memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E)); 223 } 224 225 if (old_addr != NULL) { 226 free(old_addr, old_length); 227 } 228 229 return new_addr; 230 } 231 232 template<class E, MEMFLAGS F> 233 void ArrayAllocator<E, F>::free_malloc(E* addr, size_t length) { 234 MallocArrayAllocator<E, F>::free(addr, length); 235 } 236 237 template<class E, MEMFLAGS F> 238 void ArrayAllocator<E, F>::free_mmap(E* addr, size_t length) { 239 MmapArrayAllocator<E, F>::free(addr, length); 240 } 241 242 template<class E, MEMFLAGS F> 243 void ArrayAllocator<E, F>::free(E* addr, size_t length) { 244 if (addr != NULL) { 245 if (should_use_malloc(length)) { 246 free_malloc(addr, length); 247 } else { 248 free_mmap(addr, length); 249 } 250 } 251 } 252 253 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP