< prev index next >

src/hotspot/share/gc/parallel/psYoungGen.cpp

Print this page




 771   return 0;
 772 }
 773 
 774 size_t PSYoungGen::available_for_contraction() {
 775   ShouldNotReachHere();
 776   return 0;
 777 }
 778 
 779 size_t PSYoungGen::available_to_min_gen() {
 780   assert(virtual_space()->committed_size() >= min_gen_size(), "Invariant");
 781   return virtual_space()->committed_size() - min_gen_size();
 782 }
 783 
 784 // This method assumes that from-space has live data and that
 785 // any shrinkage of the young gen is limited by location of
 786 // from-space.
 787 size_t PSYoungGen::available_to_live() {
 788   size_t delta_in_survivor = 0;
 789   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 790   const size_t space_alignment = heap->space_alignment();
 791   const size_t gen_alignment = heap->generation_alignment();
 792 
 793   MutableSpace* space_shrinking = NULL;
 794   if (from_space()->end() > to_space()->end()) {
 795     space_shrinking = from_space();
 796   } else {
 797     space_shrinking = to_space();
 798   }
 799 
 800   // Include any space that is committed but not included in
 801   // the survivor spaces.
 802   assert(((HeapWord*)virtual_space()->high()) >= space_shrinking->end(),
 803     "Survivor space beyond high end");
 804   size_t unused_committed = pointer_delta(virtual_space()->high(),
 805     space_shrinking->end(), sizeof(char));
 806 
 807   if (space_shrinking->is_empty()) {
 808     // Don't let the space shrink to 0
 809     assert(space_shrinking->capacity_in_bytes() >= space_alignment,
 810       "Space is too small");
 811     delta_in_survivor = space_shrinking->capacity_in_bytes() - space_alignment;




 771   return 0;
 772 }
 773 
 774 size_t PSYoungGen::available_for_contraction() {
 775   ShouldNotReachHere();
 776   return 0;
 777 }
 778 
 779 size_t PSYoungGen::available_to_min_gen() {
 780   assert(virtual_space()->committed_size() >= min_gen_size(), "Invariant");
 781   return virtual_space()->committed_size() - min_gen_size();
 782 }
 783 
 784 // This method assumes that from-space has live data and that
 785 // any shrinkage of the young gen is limited by location of
 786 // from-space.
 787 size_t PSYoungGen::available_to_live() {
 788   size_t delta_in_survivor = 0;
 789   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 790   const size_t space_alignment = heap->space_alignment();
 791   const size_t gen_alignment = heap->gen_alignment();
 792 
 793   MutableSpace* space_shrinking = NULL;
 794   if (from_space()->end() > to_space()->end()) {
 795     space_shrinking = from_space();
 796   } else {
 797     space_shrinking = to_space();
 798   }
 799 
 800   // Include any space that is committed but not included in
 801   // the survivor spaces.
 802   assert(((HeapWord*)virtual_space()->high()) >= space_shrinking->end(),
 803     "Survivor space beyond high end");
 804   size_t unused_committed = pointer_delta(virtual_space()->high(),
 805     space_shrinking->end(), sizeof(char));
 806 
 807   if (space_shrinking->is_empty()) {
 808     // Don't let the space shrink to 0
 809     assert(space_shrinking->capacity_in_bytes() >= space_alignment,
 810       "Space is too small");
 811     delta_in_survivor = space_shrinking->capacity_in_bytes() - space_alignment;


< prev index next >