416 }
417
418 // Figure out how much to take from eden. Include the average amount promoted
419 // in the total; otherwise the next young gen GC will simply bail out to a
420 // full GC.
421 const size_t alignment = old_gen->virtual_space()->alignment();
422 const size_t eden_used = eden_space->used_in_bytes();
423 const size_t promoted = (size_t)size_policy->avg_promoted()->padded_average();
424 const size_t absorb_size = align_up(eden_used + promoted, alignment);
425 const size_t eden_capacity = eden_space->capacity_in_bytes();
426
427 if (absorb_size >= eden_capacity) {
428 return false; // Must leave some space in eden.
429 }
430
431 const size_t new_young_size = young_gen->capacity_in_bytes() - absorb_size;
432 if (new_young_size < young_gen->min_gen_size()) {
433 return false; // Respect young gen minimum size.
434 }
435
436 log_trace(heap, ergo)(" absorbing " SIZE_FORMAT "K: "
437 "eden " SIZE_FORMAT "K->" SIZE_FORMAT "K "
438 "from " SIZE_FORMAT "K, to " SIZE_FORMAT "K "
439 "young_gen " SIZE_FORMAT "K->" SIZE_FORMAT "K ",
440 absorb_size / K,
441 eden_capacity / K, (eden_capacity - absorb_size) / K,
442 young_gen->from_space()->used_in_bytes() / K,
443 young_gen->to_space()->used_in_bytes() / K,
444 young_gen->capacity_in_bytes() / K, new_young_size / K);
445
446 // Fill the unused part of the old gen.
447 MutableSpace* const old_space = old_gen->object_space();
448 HeapWord* const unused_start = old_space->top();
449 size_t const unused_words = pointer_delta(old_space->end(), unused_start);
450
451 if (unused_words > 0) {
452 if (unused_words < CollectedHeap::min_fill_size()) {
453 return false; // If the old gen cannot be filled, must give up.
454 }
455 CollectedHeap::fill_with_objects(unused_start, unused_words);
456 }
|
416 }
417
418 // Figure out how much to take from eden. Include the average amount promoted
419 // in the total; otherwise the next young gen GC will simply bail out to a
420 // full GC.
421 const size_t alignment = old_gen->virtual_space()->alignment();
422 const size_t eden_used = eden_space->used_in_bytes();
423 const size_t promoted = (size_t)size_policy->avg_promoted()->padded_average();
424 const size_t absorb_size = align_up(eden_used + promoted, alignment);
425 const size_t eden_capacity = eden_space->capacity_in_bytes();
426
427 if (absorb_size >= eden_capacity) {
428 return false; // Must leave some space in eden.
429 }
430
431 const size_t new_young_size = young_gen->capacity_in_bytes() - absorb_size;
432 if (new_young_size < young_gen->min_gen_size()) {
433 return false; // Respect young gen minimum size.
434 }
435
436 log_trace(gc, ergo, heap)(" absorbing " SIZE_FORMAT "K: "
437 "eden " SIZE_FORMAT "K->" SIZE_FORMAT "K "
438 "from " SIZE_FORMAT "K, to " SIZE_FORMAT "K "
439 "young_gen " SIZE_FORMAT "K->" SIZE_FORMAT "K ",
440 absorb_size / K,
441 eden_capacity / K, (eden_capacity - absorb_size) / K,
442 young_gen->from_space()->used_in_bytes() / K,
443 young_gen->to_space()->used_in_bytes() / K,
444 young_gen->capacity_in_bytes() / K, new_young_size / K);
445
446 // Fill the unused part of the old gen.
447 MutableSpace* const old_space = old_gen->object_space();
448 HeapWord* const unused_start = old_space->top();
449 size_t const unused_words = pointer_delta(old_space->end(), unused_start);
450
451 if (unused_words > 0) {
452 if (unused_words < CollectedHeap::min_fill_size()) {
453 return false; // If the old gen cannot be filled, must give up.
454 }
455 CollectedHeap::fill_with_objects(unused_start, unused_words);
456 }
|