24
25 #include "precompiled.hpp"
26 #include "gc/parallel/mutableNUMASpace.hpp"
27 #include "gc/parallel/parallelScavengeHeap.hpp"
28 #include "gc/parallel/psScavenge.hpp"
29 #include "gc/parallel/psYoungGen.hpp"
30 #include "gc/shared/gcUtil.hpp"
31 #include "gc/shared/genArguments.hpp"
32 #include "gc/shared/spaceDecorator.inline.hpp"
33 #include "logging/log.hpp"
34 #include "oops/oop.inline.hpp"
35 #include "runtime/java.hpp"
36 #include "utilities/align.hpp"
37
38 PSYoungGen::PSYoungGen(ReservedSpace rs, size_t initial_size, size_t min_size, size_t max_size) :
39 _reserved(),
40 _virtual_space(NULL),
41 _eden_space(NULL),
42 _from_space(NULL),
43 _to_space(NULL),
44 _init_gen_size(initial_size),
45 _min_gen_size(min_size),
46 _max_gen_size(max_size),
47 _gen_counters(NULL),
48 _eden_counters(NULL),
49 _from_counters(NULL),
50 _to_counters(NULL)
51 {
52 initialize(rs, GenAlignment);
53 }
54
55 void PSYoungGen::initialize_virtual_space(ReservedSpace rs, size_t alignment) {
56 assert(_init_gen_size != 0, "Should have a finite size");
57 _virtual_space = new PSVirtualSpace(rs, alignment);
58 if (!virtual_space()->expand_by(_init_gen_size)) {
59 vm_exit_during_initialization("Could not reserve enough space for "
60 "object heap");
61 }
62 }
63
64 void PSYoungGen::initialize(ReservedSpace rs, size_t alignment) {
65 initialize_virtual_space(rs, alignment);
66 initialize_work();
67 }
68
69 void PSYoungGen::initialize_work() {
70
71 _reserved = MemRegion((HeapWord*)virtual_space()->low_boundary(),
72 (HeapWord*)virtual_space()->high_boundary());
73
74 MemRegion cmr((HeapWord*)virtual_space()->low(),
75 (HeapWord*)virtual_space()->high());
76 ParallelScavengeHeap::heap()->card_table()->resize_covered_region(cmr);
77
78 if (ZapUnusedHeapArea) {
79 // Mangle newly committed space immediately because it
80 // can be done here more simply that after the new
81 // spaces have been computed.
82 SpaceMangler::mangle_region(cmr);
83 }
84
85 if (UseNUMA) {
86 _eden_space = new MutableNUMASpace(virtual_space()->alignment());
87 } else {
88 _eden_space = new MutableSpace(virtual_space()->alignment());
89 }
90 _from_space = new MutableSpace(virtual_space()->alignment());
91 _to_space = new MutableSpace(virtual_space()->alignment());
92
93 // Generation Counters - generation 0, 3 subspaces
94 _gen_counters = new PSGenerationCounters("new", 0, 3, _min_gen_size,
95 _max_gen_size, _virtual_space);
96
97 // Compute maximum space sizes for performance counters
98 size_t alignment = SpaceAlignment;
99 size_t size = virtual_space()->reserved_size();
100
101 size_t max_survivor_size;
102 size_t max_eden_size;
103
104 if (UseAdaptiveSizePolicy) {
105 max_survivor_size = size / MinSurvivorRatio;
106
107 // round the survivor space size down to the nearest alignment
108 // and make sure its size is greater than 0.
109 max_survivor_size = align_down(max_survivor_size, alignment);
110 max_survivor_size = MAX2(max_survivor_size, alignment);
111
112 // set the maximum size of eden to be the size of the young gen
113 // less two times the minimum survivor size. The minimum survivor
114 // size for UseAdaptiveSizePolicy is one alignment.
115 max_eden_size = size - 2 * alignment;
241 assert(to_top <= virtual_space()->high(), "to top");
242
243 virtual_space()->verify();
244 }
245 #endif
246
247 void PSYoungGen::resize(size_t eden_size, size_t survivor_size) {
248 // Resize the generation if needed. If the generation resize
249 // reports false, do not attempt to resize the spaces.
250 if (resize_generation(eden_size, survivor_size)) {
251 // Then we lay out the spaces inside the generation
252 resize_spaces(eden_size, survivor_size);
253
254 space_invariants();
255
256 log_trace(gc, ergo)("Young generation size: "
257 "desired eden: " SIZE_FORMAT " survivor: " SIZE_FORMAT
258 " used: " SIZE_FORMAT " capacity: " SIZE_FORMAT
259 " gen limits: " SIZE_FORMAT " / " SIZE_FORMAT,
260 eden_size, survivor_size, used_in_bytes(), capacity_in_bytes(),
261 _max_gen_size, min_gen_size());
262 }
263 }
264
265
266 bool PSYoungGen::resize_generation(size_t eden_size, size_t survivor_size) {
267 const size_t alignment = virtual_space()->alignment();
268 size_t orig_size = virtual_space()->committed_size();
269 bool size_changed = false;
270
271 // There used to be this guarantee there.
272 // guarantee ((eden_size + 2*survivor_size) <= _max_gen_size, "incorrect input arguments");
273 // Code below forces this requirement. In addition the desired eden
274 // size and desired survivor sizes are desired goals and may
275 // exceed the total generation size.
276
277 assert(min_gen_size() <= orig_size && orig_size <= max_size(), "just checking");
278
279 // Adjust new generation size
280 const size_t eden_plus_survivors =
281 align_up(eden_size + 2 * survivor_size, alignment);
282 size_t desired_size = clamp(eden_plus_survivors, min_gen_size(), max_size());
283 assert(desired_size <= max_size(), "just checking");
284
285 if (desired_size > orig_size) {
286 // Grow the generation
287 size_t change = desired_size - orig_size;
288 assert(change % alignment == 0, "just checking");
289 HeapWord* prev_high = (HeapWord*) virtual_space()->high();
290 if (!virtual_space()->expand_by(change)) {
291 return false; // Error if we fail to resize!
292 }
293 if (ZapUnusedHeapArea) {
294 // Mangle newly committed space immediately because it
295 // can be done here more simply that after the new
296 // spaces have been computed.
297 HeapWord* new_high = (HeapWord*) virtual_space()->high();
298 MemRegion mangle_region(prev_high, new_high);
299 SpaceMangler::mangle_region(mangle_region);
300 }
301 size_changed = true;
302 } else if (desired_size < orig_size) {
303 size_t desired_change = orig_size - desired_size;
304 assert(desired_change % alignment == 0, "just checking");
305
306 desired_change = limit_gen_shrink(desired_change);
307
308 if (desired_change > 0) {
309 virtual_space()->shrink_by(desired_change);
310 reset_survivors_after_shrink();
311
312 size_changed = true;
313 }
314 } else {
315 if (orig_size == gen_size_limit()) {
316 log_trace(gc)("PSYoung generation size at maximum: " SIZE_FORMAT "K", orig_size/K);
317 } else if (orig_size == min_gen_size()) {
318 log_trace(gc)("PSYoung generation size at minium: " SIZE_FORMAT "K", orig_size/K);
319 }
320 }
321
322 if (size_changed) {
323 post_resize();
324 log_trace(gc)("PSYoung generation size changed: " SIZE_FORMAT "K->" SIZE_FORMAT "K",
325 orig_size/K, virtual_space()->committed_size()/K);
326 }
327
328 guarantee(eden_plus_survivors <= virtual_space()->committed_size() ||
329 virtual_space()->committed_size() == max_size(), "Sanity");
330
331 return true;
332 }
333
334 #ifndef PRODUCT
335 // In the numa case eden is not mangled so a survivor space
336 // moving into a region previously occupied by a survivor
337 // may find an unmangled region. Also in the PS case eden
338 // to-space and from-space may not touch (i.e., there may be
339 // gaps between them due to movement while resizing the
340 // spaces). Those gaps must be mangled.
341 void PSYoungGen::mangle_survivors(MutableSpace* s1,
342 MemRegion s1MR,
343 MutableSpace* s2,
344 MemRegion s2MR) {
345 // Check eden and gap between eden and from-space, in deciding
346 // what to mangle in from-space. Check the gap between from-space
347 // and to-space when deciding what to mangle.
348 //
349 // +--------+ +----+ +---+
|
24
25 #include "precompiled.hpp"
26 #include "gc/parallel/mutableNUMASpace.hpp"
27 #include "gc/parallel/parallelScavengeHeap.hpp"
28 #include "gc/parallel/psScavenge.hpp"
29 #include "gc/parallel/psYoungGen.hpp"
30 #include "gc/shared/gcUtil.hpp"
31 #include "gc/shared/genArguments.hpp"
32 #include "gc/shared/spaceDecorator.inline.hpp"
33 #include "logging/log.hpp"
34 #include "oops/oop.inline.hpp"
35 #include "runtime/java.hpp"
36 #include "utilities/align.hpp"
37
38 PSYoungGen::PSYoungGen(ReservedSpace rs, size_t initial_size, size_t min_size, size_t max_size) :
39 _reserved(),
40 _virtual_space(NULL),
41 _eden_space(NULL),
42 _from_space(NULL),
43 _to_space(NULL),
44 _min_gen_size(min_size),
45 _max_gen_size(max_size),
46 _gen_counters(NULL),
47 _eden_counters(NULL),
48 _from_counters(NULL),
49 _to_counters(NULL)
50 {
51 initialize(rs, initial_size, GenAlignment);
52 }
53
54 void PSYoungGen::initialize_virtual_space(ReservedSpace rs,
55 size_t initial_size,
56 size_t alignment) {
57 assert(initial_size != 0, "Should have a finite size");
58 _virtual_space = new PSVirtualSpace(rs, alignment);
59 if (!virtual_space()->expand_by(initial_size)) {
60 vm_exit_during_initialization("Could not reserve enough space for object heap");
61 }
62 }
63
64 void PSYoungGen::initialize(ReservedSpace rs, size_t initial_size, size_t alignment) {
65 initialize_virtual_space(rs, initial_size, alignment);
66 initialize_work();
67 }
68
69 void PSYoungGen::initialize_work() {
70
71 _reserved = MemRegion((HeapWord*)virtual_space()->low_boundary(),
72 (HeapWord*)virtual_space()->high_boundary());
73 assert(_reserved.byte_size() == max_gen_size(), "invariant");
74
75 MemRegion cmr((HeapWord*)virtual_space()->low(),
76 (HeapWord*)virtual_space()->high());
77 ParallelScavengeHeap::heap()->card_table()->resize_covered_region(cmr);
78
79 if (ZapUnusedHeapArea) {
80 // Mangle newly committed space immediately because it
81 // can be done here more simply that after the new
82 // spaces have been computed.
83 SpaceMangler::mangle_region(cmr);
84 }
85
86 if (UseNUMA) {
87 _eden_space = new MutableNUMASpace(virtual_space()->alignment());
88 } else {
89 _eden_space = new MutableSpace(virtual_space()->alignment());
90 }
91 _from_space = new MutableSpace(virtual_space()->alignment());
92 _to_space = new MutableSpace(virtual_space()->alignment());
93
94 // Generation Counters - generation 0, 3 subspaces
95 _gen_counters = new PSGenerationCounters("new", 0, 3, min_gen_size(),
96 max_gen_size(), virtual_space());
97
98 // Compute maximum space sizes for performance counters
99 size_t alignment = SpaceAlignment;
100 size_t size = virtual_space()->reserved_size();
101
102 size_t max_survivor_size;
103 size_t max_eden_size;
104
105 if (UseAdaptiveSizePolicy) {
106 max_survivor_size = size / MinSurvivorRatio;
107
108 // round the survivor space size down to the nearest alignment
109 // and make sure its size is greater than 0.
110 max_survivor_size = align_down(max_survivor_size, alignment);
111 max_survivor_size = MAX2(max_survivor_size, alignment);
112
113 // set the maximum size of eden to be the size of the young gen
114 // less two times the minimum survivor size. The minimum survivor
115 // size for UseAdaptiveSizePolicy is one alignment.
116 max_eden_size = size - 2 * alignment;
242 assert(to_top <= virtual_space()->high(), "to top");
243
244 virtual_space()->verify();
245 }
246 #endif
247
248 void PSYoungGen::resize(size_t eden_size, size_t survivor_size) {
249 // Resize the generation if needed. If the generation resize
250 // reports false, do not attempt to resize the spaces.
251 if (resize_generation(eden_size, survivor_size)) {
252 // Then we lay out the spaces inside the generation
253 resize_spaces(eden_size, survivor_size);
254
255 space_invariants();
256
257 log_trace(gc, ergo)("Young generation size: "
258 "desired eden: " SIZE_FORMAT " survivor: " SIZE_FORMAT
259 " used: " SIZE_FORMAT " capacity: " SIZE_FORMAT
260 " gen limits: " SIZE_FORMAT " / " SIZE_FORMAT,
261 eden_size, survivor_size, used_in_bytes(), capacity_in_bytes(),
262 max_gen_size(), min_gen_size());
263 }
264 }
265
266
267 bool PSYoungGen::resize_generation(size_t eden_size, size_t survivor_size) {
268 const size_t alignment = virtual_space()->alignment();
269 size_t orig_size = virtual_space()->committed_size();
270 bool size_changed = false;
271
272 // There used to be this guarantee there.
273 // guarantee ((eden_size + 2*survivor_size) <= max_gen_size(), "incorrect input arguments");
274 // Code below forces this requirement. In addition the desired eden
275 // size and desired survivor sizes are desired goals and may
276 // exceed the total generation size.
277
278 assert(min_gen_size() <= orig_size && orig_size <= max_gen_size(), "just checking");
279
280 // Adjust new generation size
281 const size_t eden_plus_survivors =
282 align_up(eden_size + 2 * survivor_size, alignment);
283 size_t desired_size = clamp(eden_plus_survivors, min_gen_size(), max_gen_size());
284 assert(desired_size <= max_gen_size(), "just checking");
285
286 if (desired_size > orig_size) {
287 // Grow the generation
288 size_t change = desired_size - orig_size;
289 assert(change % alignment == 0, "just checking");
290 HeapWord* prev_high = (HeapWord*) virtual_space()->high();
291 if (!virtual_space()->expand_by(change)) {
292 return false; // Error if we fail to resize!
293 }
294 if (ZapUnusedHeapArea) {
295 // Mangle newly committed space immediately because it
296 // can be done here more simply that after the new
297 // spaces have been computed.
298 HeapWord* new_high = (HeapWord*) virtual_space()->high();
299 MemRegion mangle_region(prev_high, new_high);
300 SpaceMangler::mangle_region(mangle_region);
301 }
302 size_changed = true;
303 } else if (desired_size < orig_size) {
304 size_t desired_change = orig_size - desired_size;
305 assert(desired_change % alignment == 0, "just checking");
306
307 desired_change = limit_gen_shrink(desired_change);
308
309 if (desired_change > 0) {
310 virtual_space()->shrink_by(desired_change);
311 reset_survivors_after_shrink();
312
313 size_changed = true;
314 }
315 } else {
316 if (orig_size == max_gen_size()) {
317 log_trace(gc)("PSYoung generation size at maximum: " SIZE_FORMAT "K", orig_size/K);
318 } else if (orig_size == min_gen_size()) {
319 log_trace(gc)("PSYoung generation size at minium: " SIZE_FORMAT "K", orig_size/K);
320 }
321 }
322
323 if (size_changed) {
324 post_resize();
325 log_trace(gc)("PSYoung generation size changed: " SIZE_FORMAT "K->" SIZE_FORMAT "K",
326 orig_size/K, virtual_space()->committed_size()/K);
327 }
328
329 guarantee(eden_plus_survivors <= virtual_space()->committed_size() ||
330 virtual_space()->committed_size() == max_gen_size(), "Sanity");
331
332 return true;
333 }
334
335 #ifndef PRODUCT
336 // In the numa case eden is not mangled so a survivor space
337 // moving into a region previously occupied by a survivor
338 // may find an unmangled region. Also in the PS case eden
339 // to-space and from-space may not touch (i.e., there may be
340 // gaps between them due to movement while resizing the
341 // spaces). Those gaps must be mangled.
342 void PSYoungGen::mangle_survivors(MutableSpace* s1,
343 MemRegion s1MR,
344 MutableSpace* s2,
345 MemRegion s2MR) {
346 // Check eden and gap between eden and from-space, in deciding
347 // what to mangle in from-space. Check the gap between from-space
348 // and to-space when deciding what to mangle.
349 //
350 // +--------+ +----+ +---+
|