240 // The alignment caused the number of bytes to wrap. An expand_by(0) will
241 // return true with the implication that and expansion was done when it
242 // was not. A call to expand implies a best effort to expand by "bytes"
243 // but not a guarantee. Align down to give a best effort. This is likely
244 // the most that the generation can expand since it has some capacity to
245 // start with.
246 aligned_bytes = align_size_down(bytes, alignment);
247 }
248
249 bool success = false;
250 if (aligned_expand_bytes > aligned_bytes) {
251 success = expand_by(aligned_expand_bytes);
252 }
253 if (!success) {
254 success = expand_by(aligned_bytes);
255 }
256 if (!success) {
257 success = expand_to_reserved();
258 }
259
260 if (success && GC_locker::is_active_and_needs_gc()) {
261 log_debug(gc)("Garbage collection disabled, expanded heap instead");
262 }
263 }
264
265 bool PSOldGen::expand_by(size_t bytes) {
266 assert_lock_strong(ExpandHeap_lock);
267 assert_locked_or_safepoint(Heap_lock);
268 if (bytes == 0) {
269 return true; // That's what virtual_space()->expand_by(0) would return
270 }
271 bool result = virtual_space()->expand_by(bytes);
272 if (result) {
273 if (ZapUnusedHeapArea) {
274 // We need to mangle the newly expanded area. The memregion spans
275 // end -> new_end, we assume that top -> end is already mangled.
276 // Do the mangling before post_resize() is called because
277 // the space is available for allocation after post_resize();
278 HeapWord* const virtual_space_high = (HeapWord*) virtual_space()->high();
279 assert(object_space()->end() < virtual_space_high,
280 "Should be true before post_resize()");
|
240 // The alignment caused the number of bytes to wrap. An expand_by(0) will
241 // return true with the implication that and expansion was done when it
242 // was not. A call to expand implies a best effort to expand by "bytes"
243 // but not a guarantee. Align down to give a best effort. This is likely
244 // the most that the generation can expand since it has some capacity to
245 // start with.
246 aligned_bytes = align_size_down(bytes, alignment);
247 }
248
249 bool success = false;
250 if (aligned_expand_bytes > aligned_bytes) {
251 success = expand_by(aligned_expand_bytes);
252 }
253 if (!success) {
254 success = expand_by(aligned_bytes);
255 }
256 if (!success) {
257 success = expand_to_reserved();
258 }
259
260 if (success && GCLocker::is_active_and_needs_gc()) {
261 log_debug(gc)("Garbage collection disabled, expanded heap instead");
262 }
263 }
264
265 bool PSOldGen::expand_by(size_t bytes) {
266 assert_lock_strong(ExpandHeap_lock);
267 assert_locked_or_safepoint(Heap_lock);
268 if (bytes == 0) {
269 return true; // That's what virtual_space()->expand_by(0) would return
270 }
271 bool result = virtual_space()->expand_by(bytes);
272 if (result) {
273 if (ZapUnusedHeapArea) {
274 // We need to mangle the newly expanded area. The memregion spans
275 // end -> new_end, we assume that top -> end is already mangled.
276 // Do the mangling before post_resize() is called because
277 // the space is available for allocation after post_resize();
278 HeapWord* const virtual_space_high = (HeapWord*) virtual_space()->high();
279 assert(object_space()->end() < virtual_space_high,
280 "Should be true before post_resize()");
|