< prev index next >

src/hotspot/share/gc/g1/g1Policy.cpp

Print this page

        

*** 71,81 **** _young_gen_sizer(G1YoungGenSizer::create_gen_sizer()), _free_regions_at_end_of_collection(0), _rs_length(0), _rs_length_prediction(0), _pending_cards_at_gc_start(0), ! _bytes_allocated_in_old_since_last_gc(0), _initial_mark_to_mixed(), _collection_set(NULL), _g1h(NULL), _phase_times(new G1GCPhaseTimes(gc_timer, ParallelGCThreads)), _mark_remark_start_sec(0), --- 71,81 ---- _young_gen_sizer(G1YoungGenSizer::create_gen_sizer()), _free_regions_at_end_of_collection(0), _rs_length(0), _rs_length_prediction(0), _pending_cards_at_gc_start(0), ! _old_gen_alloc_tracker(), _initial_mark_to_mixed(), _collection_set(NULL), _g1h(NULL), _phase_times(new G1GCPhaseTimes(gc_timer, ParallelGCThreads)), _mark_remark_start_sec(0),
*** 458,468 **** _free_regions_at_end_of_collection = _g1h->num_free_regions(); _survivor_surv_rate_group->reset(); update_young_list_max_and_target_length(); update_rs_length_prediction(); ! _bytes_allocated_in_old_since_last_gc = 0; record_pause(FullGC, _full_collection_start_sec, end_sec); } static void log_refinement_stats(const char* kind, const G1ConcurrentRefineStats& stats) { --- 458,468 ---- _free_regions_at_end_of_collection = _g1h->num_free_regions(); _survivor_surv_rate_group->reset(); update_young_list_max_and_target_length(); update_rs_length_prediction(); ! _old_gen_alloc_tracker.reset_after_full_gc(); record_pause(FullGC, _full_collection_start_sec, end_sec); } static void log_refinement_stats(const char* kind, const G1ConcurrentRefineStats& stats) {
*** 793,807 **** // restrained by the heap reserve. Using the actual length would make the // prediction too small and the limit the young gen every time we get to the // predicted target occupancy. size_t last_unrestrained_young_length = update_young_list_max_and_target_length(); ! update_ihop_prediction(app_time_ms / 1000.0, ! _bytes_allocated_in_old_since_last_gc, last_unrestrained_young_length * HeapRegion::GrainBytes, this_pause_was_young_only); - _bytes_allocated_in_old_since_last_gc = 0; _ihop_control->send_trace_event(_g1h->gc_tracer_stw()); } else { // Any garbage collection triggered as periodic collection resets the time-to-mixed // measurement. Periodic collection typically means that the application is "inactive", i.e. --- 793,807 ---- // restrained by the heap reserve. Using the actual length would make the // prediction too small and the limit the young gen every time we get to the // predicted target occupancy. size_t last_unrestrained_young_length = update_young_list_max_and_target_length(); ! _old_gen_alloc_tracker.reset_after_young_gc(app_time_ms / 1000.0); ! update_ihop_prediction(_old_gen_alloc_tracker.last_cycle_duration(), ! _old_gen_alloc_tracker.last_cycle_old_bytes(), last_unrestrained_young_length * HeapRegion::GrainBytes, this_pause_was_young_only); _ihop_control->send_trace_event(_g1h->gc_tracer_stw()); } else { // Any garbage collection triggered as periodic collection resets the time-to-mixed // measurement. Periodic collection typically means that the application is "inactive", i.e.
< prev index next >