Print this page


Split Split Close
Expand all
Collapse all
          --- old/src/share/vm/gc_implementation/parallelScavenge/parallelScavengeHeap.cpp
          +++ new/src/share/vm/gc_implementation/parallelScavenge/parallelScavengeHeap.cpp
↓ open down ↓ 124 lines elided ↑ open up ↑
 125  125  
 126  126    pg_min_size = align_size_up(pg_min_size, pg_align);
 127  127    pg_max_size = align_size_up(pg_max_size, pg_align);
 128  128    size_t pg_cur_size = pg_min_size;
 129  129  
 130  130    trace_gen_sizes("ps heap rnd",
 131  131                    pg_min_size, pg_max_size,
 132  132                    og_min_size, og_max_size,
 133  133                    yg_min_size, yg_max_size);
 134  134  
 135      -  const size_t total_reserved = pg_max_size + og_max_size + yg_max_size;
      135 +  size_t total_reserved = 0;
      136 +
      137 +  total_reserved = add_and_check_overflow(total_reserved, pg_max_size);
      138 +  total_reserved = add_and_check_overflow(total_reserved, og_max_size);
      139 +  total_reserved = add_and_check_overflow(total_reserved, yg_max_size);
      140 +
 136  141    char* addr = Universe::preferred_heap_base(total_reserved, Universe::UnscaledNarrowOop);
 137  142  
 138  143    // The main part of the heap (old gen + young gen) can often use a larger page
 139  144    // size than is needed or wanted for the perm gen.  Use the "compound
 140  145    // alignment" ReservedSpace ctor to avoid having to use the same page size for
 141  146    // all gens.
 142  147  
 143  148    ReservedHeapSpace heap_rs(pg_max_size, pg_align, og_max_size + yg_max_size,
 144  149                              og_align, addr);
 145  150  
↓ open down ↓ 910 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX