2159 // resizing the young generation complicates the code because the
2160 // resizing of the old generation may have moved the boundary
2161 // between the young generation and the old generation. Let the
2162 // young generation resizing happen at the minor collections.
2163 }
2164 if (PrintAdaptiveSizePolicy) {
2165 gclog_or_tty->print_cr("AdaptiveSizeStop: collection: %d ",
2166 heap->total_collections());
2167 }
2168 }
2169
2170 if (UsePerfData) {
2171 PSGCAdaptivePolicyCounters* const counters = heap->gc_policy_counters();
2172 counters->update_counters();
2173 counters->update_old_capacity(old_gen->capacity_in_bytes());
2174 counters->update_young_capacity(young_gen->capacity_in_bytes());
2175 }
2176
2177 heap->resize_all_tlabs();
2178
2179 // Resize the metaspace capactiy after a collection
2180 MetaspaceGC::compute_new_size();
2181
2182 if (TraceGen1Time) accumulated_time()->stop();
2183
2184 if (PrintGC) {
2185 if (PrintGCDetails) {
2186 // No GC timestamp here. This is after GC so it would be confusing.
2187 young_gen->print_used_change(pre_gc_values.young_gen_used());
2188 old_gen->print_used_change(pre_gc_values.old_gen_used());
2189 heap->print_heap_change(pre_gc_values.heap_used());
2190 MetaspaceAux::print_metaspace_change(pre_gc_values.metadata_used());
2191 } else {
2192 heap->print_heap_change(pre_gc_values.heap_used());
2193 }
2194 }
2195
2196 // Track memory usage and detect low memory
2197 MemoryService::track_memory_usage();
2198 heap->update_counters();
2199 gc_task_manager()->release_idle_workers();
3289 // The destination of the first live object that starts in the region is one
3290 // past the end of the partial object entering the region (if any).
3291 HeapWord* const dest_addr = sd.partial_obj_end(dp_region);
3292 HeapWord* const new_top = _space_info[space_id].new_top();
3293 assert(new_top >= dest_addr, "bad new_top value");
3294 const size_t words = pointer_delta(new_top, dest_addr);
3295
3296 if (words > 0) {
3297 ObjectStartArray* start_array = _space_info[space_id].start_array();
3298 MoveAndUpdateClosure closure(bitmap, cm, start_array, dest_addr, words);
3299
3300 ParMarkBitMap::IterationStatus status;
3301 status = bitmap->iterate(&closure, dest_addr, end_addr);
3302 assert(status == ParMarkBitMap::full, "iteration not complete");
3303 assert(bitmap->find_obj_beg(closure.source(), end_addr) == end_addr,
3304 "live objects skipped because closure is full");
3305 }
3306 }
3307
3308 jlong PSParallelCompact::millis_since_last_gc() {
3309 // We need a monotonically non-deccreasing time in ms but
3310 // os::javaTimeMillis() does not guarantee monotonicity.
3311 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3312 jlong ret_val = now - _time_of_last_gc;
3313 // XXX See note in genCollectedHeap::millis_since_last_gc().
3314 if (ret_val < 0) {
3315 NOT_PRODUCT(warning("time warp: "INT64_FORMAT, ret_val);)
3316 return 0;
3317 }
3318 return ret_val;
3319 }
3320
3321 void PSParallelCompact::reset_millis_since_last_gc() {
3322 // We need a monotonically non-deccreasing time in ms but
3323 // os::javaTimeMillis() does not guarantee monotonicity.
3324 _time_of_last_gc = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3325 }
3326
3327 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3328 {
3329 if (source() != destination()) {
3330 DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3331 Copy::aligned_conjoint_words(source(), destination(), words_remaining());
3332 }
3333 update_state(words_remaining());
3334 assert(is_full(), "sanity");
3335 return ParMarkBitMap::full;
3336 }
3337
3338 void MoveAndUpdateClosure::copy_partial_obj()
3339 {
3340 size_t words = words_remaining();
3341
3342 HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
|
2159 // resizing the young generation complicates the code because the
2160 // resizing of the old generation may have moved the boundary
2161 // between the young generation and the old generation. Let the
2162 // young generation resizing happen at the minor collections.
2163 }
2164 if (PrintAdaptiveSizePolicy) {
2165 gclog_or_tty->print_cr("AdaptiveSizeStop: collection: %d ",
2166 heap->total_collections());
2167 }
2168 }
2169
2170 if (UsePerfData) {
2171 PSGCAdaptivePolicyCounters* const counters = heap->gc_policy_counters();
2172 counters->update_counters();
2173 counters->update_old_capacity(old_gen->capacity_in_bytes());
2174 counters->update_young_capacity(young_gen->capacity_in_bytes());
2175 }
2176
2177 heap->resize_all_tlabs();
2178
2179 // Resize the metaspace capacity after a collection
2180 MetaspaceGC::compute_new_size();
2181
2182 if (TraceGen1Time) accumulated_time()->stop();
2183
2184 if (PrintGC) {
2185 if (PrintGCDetails) {
2186 // No GC timestamp here. This is after GC so it would be confusing.
2187 young_gen->print_used_change(pre_gc_values.young_gen_used());
2188 old_gen->print_used_change(pre_gc_values.old_gen_used());
2189 heap->print_heap_change(pre_gc_values.heap_used());
2190 MetaspaceAux::print_metaspace_change(pre_gc_values.metadata_used());
2191 } else {
2192 heap->print_heap_change(pre_gc_values.heap_used());
2193 }
2194 }
2195
2196 // Track memory usage and detect low memory
2197 MemoryService::track_memory_usage();
2198 heap->update_counters();
2199 gc_task_manager()->release_idle_workers();
3289 // The destination of the first live object that starts in the region is one
3290 // past the end of the partial object entering the region (if any).
3291 HeapWord* const dest_addr = sd.partial_obj_end(dp_region);
3292 HeapWord* const new_top = _space_info[space_id].new_top();
3293 assert(new_top >= dest_addr, "bad new_top value");
3294 const size_t words = pointer_delta(new_top, dest_addr);
3295
3296 if (words > 0) {
3297 ObjectStartArray* start_array = _space_info[space_id].start_array();
3298 MoveAndUpdateClosure closure(bitmap, cm, start_array, dest_addr, words);
3299
3300 ParMarkBitMap::IterationStatus status;
3301 status = bitmap->iterate(&closure, dest_addr, end_addr);
3302 assert(status == ParMarkBitMap::full, "iteration not complete");
3303 assert(bitmap->find_obj_beg(closure.source(), end_addr) == end_addr,
3304 "live objects skipped because closure is full");
3305 }
3306 }
3307
3308 jlong PSParallelCompact::millis_since_last_gc() {
3309 // We need a monotonically non-decreasing time in ms but
3310 // os::javaTimeMillis() does not guarantee monotonicity.
3311 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3312 jlong ret_val = now - _time_of_last_gc;
3313 // XXX See note in genCollectedHeap::millis_since_last_gc().
3314 if (ret_val < 0) {
3315 NOT_PRODUCT(warning("time warp: "INT64_FORMAT, ret_val);)
3316 return 0;
3317 }
3318 return ret_val;
3319 }
3320
3321 void PSParallelCompact::reset_millis_since_last_gc() {
3322 // We need a monotonically non-decreasing time in ms but
3323 // os::javaTimeMillis() does not guarantee monotonicity.
3324 _time_of_last_gc = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3325 }
3326
3327 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3328 {
3329 if (source() != destination()) {
3330 DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3331 Copy::aligned_conjoint_words(source(), destination(), words_remaining());
3332 }
3333 update_state(words_remaining());
3334 assert(is_full(), "sanity");
3335 return ParMarkBitMap::full;
3336 }
3337
3338 void MoveAndUpdateClosure::copy_partial_obj()
3339 {
3340 size_t words = words_remaining();
3341
3342 HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
|