128 }
129
130 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
131 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() {
132 return CHeapObj<F>::operator new(size, nothrow_constant, stack);
133 }
134
135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
136 const std::nothrow_t& nothrow_constant) throw() {
137 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
138 }
139
140 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
141 FreeHeap(p);
142 }
143
144 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
145 FreeHeap(p);
146 }
147
148 template <class E, MEMFLAGS F>
149 size_t MmapArrayAllocator<E, F>::size_for(size_t length) {
150 size_t size = length * sizeof(E);
151 int alignment = os::vm_allocation_granularity();
152 return align_size_up(size, alignment);
153 }
154
155 template <class E, MEMFLAGS F>
156 E* MmapArrayAllocator<E, F>::allocate_or_null(size_t length) {
157 size_t size = size_for(length);
158 int alignment = os::vm_allocation_granularity();
159
160 char* addr = os::reserve_memory(size, NULL, alignment, F);
161 if (addr == NULL) {
162 return NULL;
163 }
164
165 if (os::commit_memory(addr, size, !ExecMem, "Allocator (commit)")) {
166 return (E*)addr;
167 } else {
168 os::release_memory(addr, size);
169 return NULL;
170 }
171 }
172
173 template <class E, MEMFLAGS F>
174 E* MmapArrayAllocator<E, F>::allocate(size_t length) {
175 size_t size = size_for(length);
176 int alignment = os::vm_allocation_granularity();
177
178 char* addr = os::reserve_memory(size, NULL, alignment, F);
179 if (addr == NULL) {
180 vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
181 }
182
183 os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
184
185 return (E*)addr;
186 }
187
188 template <class E, MEMFLAGS F>
189 void MmapArrayAllocator<E, F>::free(E* addr, size_t length) {
190 bool result = os::release_memory((char*)addr, size_for(length));
191 assert(result, "Failed to release memory");
192 }
193
194 template <class E, MEMFLAGS F>
195 size_t MallocArrayAllocator<E, F>::size_for(size_t length) {
196 return length * sizeof(E);
197 }
198
199 template <class E, MEMFLAGS F>
200 E* MallocArrayAllocator<E, F>::allocate(size_t length) {
201 return (E*)AllocateHeap(size_for(length), F);
202 }
203
204 template<class E, MEMFLAGS F>
205 void MallocArrayAllocator<E, F>::free(E* addr, size_t /*length*/) {
206 FreeHeap(addr);
207 }
208
209 template <class E, MEMFLAGS F>
210 bool ArrayAllocator<E, F>::should_use_malloc(size_t length) {
211 return MallocArrayAllocator<E, F>::size_for(length) < ArrayAllocatorMallocLimit;
212 }
213
214 template <class E, MEMFLAGS F>
215 E* ArrayAllocator<E, F>::allocate_malloc(size_t length) {
216 return MallocArrayAllocator<E, F>::allocate(length);
217 }
218
219 template <class E, MEMFLAGS F>
220 E* ArrayAllocator<E, F>::allocate_mmap(size_t length) {
221 return MmapArrayAllocator<E, F>::allocate(length);
222 }
223
224 template <class E, MEMFLAGS F>
225 E* ArrayAllocator<E, F>::allocate(size_t length) {
226 if (should_use_malloc(length)) {
227 return allocate_malloc(length);
228 }
229
230 return allocate_mmap(length);
231 }
232
233 template <class E, MEMFLAGS F>
234 E* ArrayAllocator<E, F>::reallocate(E* old_addr, size_t old_length, size_t new_length) {
235 E* new_addr = (new_length > 0)
236 ? allocate(new_length)
237 : NULL;
238
239 if (new_addr != NULL && old_addr != NULL) {
240 memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
241 }
242
243 if (old_addr != NULL) {
244 free(old_addr, old_length);
245 }
246
247 return new_addr;
248 }
249
250 template<class E, MEMFLAGS F>
251 void ArrayAllocator<E, F>::free_malloc(E* addr, size_t length) {
252 MallocArrayAllocator<E, F>::free(addr, length);
253 }
254
255 template<class E, MEMFLAGS F>
256 void ArrayAllocator<E, F>::free_mmap(E* addr, size_t length) {
257 MmapArrayAllocator<E, F>::free(addr, length);
258 }
259
260 template<class E, MEMFLAGS F>
261 void ArrayAllocator<E, F>::free(E* addr, size_t length) {
262 if (addr != NULL) {
263 if (should_use_malloc(length)) {
264 free_malloc(addr, length);
265 } else {
266 free_mmap(addr, length);
267 }
268 }
269 }
270
271 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
|
128 }
129
130 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
131 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() {
132 return CHeapObj<F>::operator new(size, nothrow_constant, stack);
133 }
134
135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
136 const std::nothrow_t& nothrow_constant) throw() {
137 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
138 }
139
140 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
141 FreeHeap(p);
142 }
143
144 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
145 FreeHeap(p);
146 }
147
148 template <class E>
149 size_t MmapArrayAllocator<E>::size_for(size_t length) {
150 size_t size = length * sizeof(E);
151 int alignment = os::vm_allocation_granularity();
152 return align_size_up(size, alignment);
153 }
154
155 template <class E>
156 E* MmapArrayAllocator<E>::allocate_or_null(size_t length, MEMFLAGS flags) {
157 size_t size = size_for(length);
158 int alignment = os::vm_allocation_granularity();
159
160 char* addr = os::reserve_memory(size, NULL, alignment, flags);
161 if (addr == NULL) {
162 return NULL;
163 }
164
165 if (os::commit_memory(addr, size, !ExecMem, "Allocator (commit)")) {
166 return (E*)addr;
167 } else {
168 os::release_memory(addr, size);
169 return NULL;
170 }
171 }
172
173 template <class E>
174 E* MmapArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
175 size_t size = size_for(length);
176 int alignment = os::vm_allocation_granularity();
177
178 char* addr = os::reserve_memory(size, NULL, alignment, flags);
179 if (addr == NULL) {
180 vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
181 }
182
183 os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
184
185 return (E*)addr;
186 }
187
188 template <class E>
189 void MmapArrayAllocator<E>::free(E* addr, size_t length) {
190 bool result = os::release_memory((char*)addr, size_for(length));
191 assert(result, "Failed to release memory");
192 }
193
194 template <class E>
195 size_t MallocArrayAllocator<E>::size_for(size_t length) {
196 return length * sizeof(E);
197 }
198
199 template <class E>
200 E* MallocArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
201 return (E*)AllocateHeap(size_for(length), flags);
202 }
203
204 template<class E>
205 void MallocArrayAllocator<E>::free(E* addr, size_t /*length*/) {
206 FreeHeap(addr);
207 }
208
209 template <class E>
210 bool ArrayAllocator<E>::should_use_malloc(size_t length) {
211 return MallocArrayAllocator<E>::size_for(length) < ArrayAllocatorMallocLimit;
212 }
213
214 template <class E>
215 E* ArrayAllocator<E>::allocate_malloc(size_t length, MEMFLAGS flags) {
216 return MallocArrayAllocator<E>::allocate(length, flags);
217 }
218
219 template <class E>
220 E* ArrayAllocator<E>::allocate_mmap(size_t length, MEMFLAGS flags) {
221 return MmapArrayAllocator<E>::allocate(length, flags);
222 }
223
224 template <class E>
225 E* ArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
226 if (should_use_malloc(length)) {
227 return allocate_malloc(length, flags);
228 }
229
230 return allocate_mmap(length, flags);
231 }
232
233 template <class E>
234 E* ArrayAllocator<E>::reallocate(E* old_addr, size_t old_length, size_t new_length, MEMFLAGS flags) {
235 E* new_addr = (new_length > 0)
236 ? allocate(new_length, flags)
237 : NULL;
238
239 if (new_addr != NULL && old_addr != NULL) {
240 memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
241 }
242
243 if (old_addr != NULL) {
244 free(old_addr, old_length);
245 }
246
247 return new_addr;
248 }
249
250 template<class E>
251 void ArrayAllocator<E>::free_malloc(E* addr, size_t length) {
252 MallocArrayAllocator<E>::free(addr, length);
253 }
254
255 template<class E>
256 void ArrayAllocator<E>::free_mmap(E* addr, size_t length) {
257 MmapArrayAllocator<E>::free(addr, length);
258 }
259
260 template<class E>
261 void ArrayAllocator<E>::free(E* addr, size_t length) {
262 if (addr != NULL) {
263 if (should_use_malloc(length)) {
264 free_malloc(addr, length);
265 } else {
266 free_mmap(addr, length);
267 }
268 }
269 }
270
271 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
|