134
135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
136 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() {
137 return CHeapObj<F>::operator new(size, nothrow_constant, stack);
138 }
139
140 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
141 const std::nothrow_t& nothrow_constant) throw() {
142 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
143 }
144
145 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
146 FreeHeap(p);
147 }
148
149 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
150 FreeHeap(p);
151 }
152
153 template <class E, MEMFLAGS F>
154 char* ArrayAllocator<E, F>::allocate_inner(size_t &size, bool &use_malloc) {
155 char* addr = NULL;
156
157 if (use_malloc) {
158 addr = AllocateHeap(size, F);
159 if (addr == NULL && size >= (size_t)os::vm_allocation_granularity()) {
160 // malloc failed let's try with mmap instead
161 use_malloc = false;
162 } else {
163 return addr;
164 }
165 }
166
167 int alignment = os::vm_allocation_granularity();
168 size = align_size_up(size, alignment);
169
170 addr = os::reserve_memory(size, NULL, alignment, F);
171 if (addr == NULL) {
172 vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
173 }
174
175 os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
176 return addr;
177 }
178
179 template <class E, MEMFLAGS F>
180 E* ArrayAllocator<E, F>::allocate(size_t length) {
181 assert(_addr == NULL, "Already in use");
182
183 _size = sizeof(E) * length;
184 _use_malloc = should_use_malloc(_size);
185 _addr = allocate_inner(_size, _use_malloc);
186
187 return (E*)_addr;
188 }
189
190 template <class E, MEMFLAGS F>
191 E* ArrayAllocator<E, F>::reallocate(size_t new_length) {
192 size_t new_size = sizeof(E) * new_length;
193 bool use_malloc = should_use_malloc(new_size);
194 char* new_addr = allocate_inner(new_size, use_malloc);
195
196 memcpy(new_addr, _addr, MIN2(new_size, _size));
197
198 free();
199 _size = new_size;
200 _use_malloc = use_malloc;
201 _addr = new_addr;
202 return (E*)new_addr;
203 }
204
205 template<class E, MEMFLAGS F>
206 void ArrayAllocator<E, F>::free() {
207 if (_addr != NULL) {
208 if (_use_malloc) {
209 FreeHeap(_addr);
210 } else {
211 os::release_memory(_addr, _size);
212 }
213 _addr = NULL;
214 }
215 }
216
217 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
|
134
135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
136 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() {
137 return CHeapObj<F>::operator new(size, nothrow_constant, stack);
138 }
139
140 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
141 const std::nothrow_t& nothrow_constant) throw() {
142 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
143 }
144
145 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
146 FreeHeap(p);
147 }
148
149 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
150 FreeHeap(p);
151 }
152
153 template <class E, MEMFLAGS F>
154 size_t ArrayAllocator<E, F>::size_for_malloc(size_t length) {
155 return length * sizeof(E);
156 }
157
158 template <class E, MEMFLAGS F>
159 size_t ArrayAllocator<E, F>::size_for_mmap(size_t length) {
160 size_t size = length * sizeof(E);
161 int alignment = os::vm_allocation_granularity();
162 return align_size_up(size, alignment);
163 }
164
165 template <class E, MEMFLAGS F>
166 bool ArrayAllocator<E, F>::should_use_malloc(size_t length) {
167 return size_for_malloc(length) < ArrayAllocatorMallocLimit;
168 }
169
170 template <class E, MEMFLAGS F>
171 E* ArrayAllocator<E, F>::allocate_malloc(size_t length) {
172 return (E*)AllocateHeap(size_for_malloc(length), F);
173 }
174
175 template <class E, MEMFLAGS F>
176 E* ArrayAllocator<E, F>::allocate_mmap(size_t length) {
177 size_t size = size_for_mmap(length);
178 int alignment = os::vm_allocation_granularity();
179
180 char* addr = os::reserve_memory(size, NULL, alignment, F);
181 if (addr == NULL) {
182 vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
183 }
184
185 os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
186
187 return (E*)addr;
188 }
189
190 template <class E, MEMFLAGS F>
191 E* ArrayAllocator<E, F>::allocate(size_t length) {
192 if (should_use_malloc(length)) {
193 return allocate_malloc(length);
194 }
195
196 return allocate_mmap(length);
197 }
198
199 template <class E, MEMFLAGS F>
200 E* ArrayAllocator<E, F>::reallocate(E* old_addr, size_t old_length, size_t new_length) {
201 E* new_addr = (new_length > 0)
202 ? allocate(new_length)
203 : NULL;
204
205 if (new_addr != NULL && old_addr != NULL) {
206 memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
207 }
208
209 if (old_addr != NULL) {
210 free(old_addr, old_length);
211 }
212
213 return new_addr;
214 }
215
216 template<class E, MEMFLAGS F>
217 void ArrayAllocator<E, F>::free_malloc(E* addr, size_t /*length*/) {
218 FreeHeap(addr);
219 }
220
221 template<class E, MEMFLAGS F>
222 void ArrayAllocator<E, F>::free_mmap(E* addr, size_t length) {
223 bool result = os::release_memory((char*)addr, size_for_mmap(length));
224 assert(result, "Failed to release memory");
225 }
226
227 template<class E, MEMFLAGS F>
228 void ArrayAllocator<E, F>::free(E* addr, size_t length) {
229 if (addr != NULL) {
230 if (should_use_malloc(length)) {
231 free_malloc(addr, length);
232 } else {
233 free_mmap(addr, length);
234 }
235 }
236 }
237
238 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
|