< prev index next >

src/hotspot/share/gc/g1/g1Policy.cpp

Print this page
rev 52448 : JDK-8212657: Implementation of JDK-8204089 Promptly Return Unused Committed Memory from G1
Summary: Issue optional, default enabled, concurrent cycles when the VM is idle to reclaim unused internal and Java heap memory.
Reviewed-by:
Contributed-by: Rodrigo Bruno <rbruno@gsd.inesc-id.pt>, Ruslan Synytsky <rs@jelastic.com>, Thomas Schatzl <thomas.schatzl@oracle.com>


 697 
 698     _analytics->report_constant_other_time_ms(constant_other_time_ms(pause_time_ms));
 699 
 700     // Do not update RS lengths and the number of pending cards with information from mixed gc:
 701     // these are is wildly different to during young only gc and mess up young gen sizing right
 702     // after the mixed gc phase.
 703     // During mixed gc we do not use them for young gen sizing.
 704     if (this_pause_was_young_only) {
 705       _analytics->report_pending_cards((double) _pending_cards);
 706       _analytics->report_rs_lengths((double) _max_rs_lengths);
 707     }
 708   }
 709 
 710   assert(!(this_pause_included_initial_mark && collector_state()->mark_or_rebuild_in_progress()),
 711          "If the last pause has been an initial mark, we should not have been in the marking window");
 712   if (this_pause_included_initial_mark) {
 713     collector_state()->set_mark_or_rebuild_in_progress(true);
 714   }
 715 
 716   _free_regions_at_end_of_collection = _g1h->num_free_regions();






 717   // IHOP control wants to know the expected young gen length if it were not
 718   // restrained by the heap reserve. Using the actual length would make the
 719   // prediction too small and the limit the young gen every time we get to the
 720   // predicted target occupancy.
 721   size_t last_unrestrained_young_length = update_young_list_max_and_target_length();
 722   update_rs_lengths_prediction();
 723 
 724   update_ihop_prediction(app_time_ms / 1000.0,
 725                          _bytes_allocated_in_old_since_last_gc,
 726                          last_unrestrained_young_length * HeapRegion::GrainBytes,
 727                          this_pause_was_young_only);
 728   _bytes_allocated_in_old_since_last_gc = 0;
 729 
 730   _ihop_control->send_trace_event(_g1h->gc_tracer_stw());









 731 
 732   // Note that _mmu_tracker->max_gc_time() returns the time in seconds.
 733   double update_rs_time_goal_ms = _mmu_tracker->max_gc_time() * MILLIUNITS * G1RSetUpdatingPauseTimePercent / 100.0;
 734 
 735   if (update_rs_time_goal_ms < scan_hcc_time_ms) {
 736     log_debug(gc, ergo, refine)("Adjust concurrent refinement thresholds (scanning the HCC expected to take longer than Update RS time goal)."
 737                                 "Update RS time goal: %1.2fms Scan HCC time: %1.2fms",
 738                                 update_rs_time_goal_ms, scan_hcc_time_ms);
 739 
 740     update_rs_time_goal_ms = 0;
 741   } else {
 742     update_rs_time_goal_ms -= scan_hcc_time_ms;
 743   }
 744   _g1h->concurrent_refine()->adjust(average_time_ms(G1GCPhaseTimes::UpdateRS),
 745                                     phase_times()->sum_thread_work_items(G1GCPhaseTimes::UpdateRS),
 746                                     update_rs_time_goal_ms);
 747 
 748   cset_chooser()->verify();
 749 }
 750 


1061   }
1062 }
1063 
1064 void G1Policy::record_pause(PauseKind kind, double start, double end) {
1065   // Manage the MMU tracker. For some reason it ignores Full GCs.
1066   if (kind != FullGC) {
1067     _mmu_tracker->add_pause(start, end);
1068   }
1069   // Manage the mutator time tracking from initial mark to first mixed gc.
1070   switch (kind) {
1071     case FullGC:
1072       abort_time_to_mixed_tracking();
1073       break;
1074     case Cleanup:
1075     case Remark:
1076     case YoungOnlyGC:
1077     case LastYoungGC:
1078       _initial_mark_to_mixed.add_pause(end - start);
1079       break;
1080     case InitialMarkGC:

1081       _initial_mark_to_mixed.record_initial_mark_end(end);

1082       break;
1083     case MixedGC:
1084       _initial_mark_to_mixed.record_mixed_gc_start(start);
1085       break;
1086     default:
1087       ShouldNotReachHere();
1088   }
1089 }
1090 
1091 void G1Policy::abort_time_to_mixed_tracking() {
1092   _initial_mark_to_mixed.reset();
1093 }
1094 
1095 bool G1Policy::next_gc_should_be_mixed(const char* true_action_str,
1096                                        const char* false_action_str) const {
1097   if (cset_chooser()->is_empty()) {
1098     log_debug(gc, ergo)("%s (candidate old regions not available)", false_action_str);
1099     return false;
1100   }
1101 




 697 
 698     _analytics->report_constant_other_time_ms(constant_other_time_ms(pause_time_ms));
 699 
 700     // Do not update RS lengths and the number of pending cards with information from mixed gc:
 701     // these are is wildly different to during young only gc and mess up young gen sizing right
 702     // after the mixed gc phase.
 703     // During mixed gc we do not use them for young gen sizing.
 704     if (this_pause_was_young_only) {
 705       _analytics->report_pending_cards((double) _pending_cards);
 706       _analytics->report_rs_lengths((double) _max_rs_lengths);
 707     }
 708   }
 709 
 710   assert(!(this_pause_included_initial_mark && collector_state()->mark_or_rebuild_in_progress()),
 711          "If the last pause has been an initial mark, we should not have been in the marking window");
 712   if (this_pause_included_initial_mark) {
 713     collector_state()->set_mark_or_rebuild_in_progress(true);
 714   }
 715 
 716   _free_regions_at_end_of_collection = _g1h->num_free_regions();
 717 
 718   update_rs_lengths_prediction();
 719 
 720   // Do not update dynamic IHOP due to G1 periodic collection as it is highly likely
 721   // that in this case we are not running in a "normal" operating mode.
 722   if (_g1h->gc_cause() != GCCause::_g1_periodic_collection) {
 723     // IHOP control wants to know the expected young gen length if it were not
 724     // restrained by the heap reserve. Using the actual length would make the
 725     // prediction too small and the limit the young gen every time we get to the
 726     // predicted target occupancy.
 727     size_t last_unrestrained_young_length = update_young_list_max_and_target_length();

 728 
 729     update_ihop_prediction(app_time_ms / 1000.0,
 730                            _bytes_allocated_in_old_since_last_gc,
 731                            last_unrestrained_young_length * HeapRegion::GrainBytes,
 732                            this_pause_was_young_only);
 733     _bytes_allocated_in_old_since_last_gc = 0;
 734 
 735     _ihop_control->send_trace_event(_g1h->gc_tracer_stw());
 736   } else {
 737     // Any garbage collection triggered as periodic collection resets the time-to-mixed
 738     // measurement. Periodic collection typically means that the application is "inactive", i.e.
 739     // the marking threads may have received an uncharacterisic amount of cpu time
 740     // for completing the marking, i.e. are faster than expected.
 741     // This skews the predicted marking length towards smaller values which might cause
 742     // the mark start being too late.
 743     _initial_mark_to_mixed.reset();
 744   }
 745 
 746   // Note that _mmu_tracker->max_gc_time() returns the time in seconds.
 747   double update_rs_time_goal_ms = _mmu_tracker->max_gc_time() * MILLIUNITS * G1RSetUpdatingPauseTimePercent / 100.0;
 748 
 749   if (update_rs_time_goal_ms < scan_hcc_time_ms) {
 750     log_debug(gc, ergo, refine)("Adjust concurrent refinement thresholds (scanning the HCC expected to take longer than Update RS time goal)."
 751                                 "Update RS time goal: %1.2fms Scan HCC time: %1.2fms",
 752                                 update_rs_time_goal_ms, scan_hcc_time_ms);
 753 
 754     update_rs_time_goal_ms = 0;
 755   } else {
 756     update_rs_time_goal_ms -= scan_hcc_time_ms;
 757   }
 758   _g1h->concurrent_refine()->adjust(average_time_ms(G1GCPhaseTimes::UpdateRS),
 759                                     phase_times()->sum_thread_work_items(G1GCPhaseTimes::UpdateRS),
 760                                     update_rs_time_goal_ms);
 761 
 762   cset_chooser()->verify();
 763 }
 764 


1075   }
1076 }
1077 
1078 void G1Policy::record_pause(PauseKind kind, double start, double end) {
1079   // Manage the MMU tracker. For some reason it ignores Full GCs.
1080   if (kind != FullGC) {
1081     _mmu_tracker->add_pause(start, end);
1082   }
1083   // Manage the mutator time tracking from initial mark to first mixed gc.
1084   switch (kind) {
1085     case FullGC:
1086       abort_time_to_mixed_tracking();
1087       break;
1088     case Cleanup:
1089     case Remark:
1090     case YoungOnlyGC:
1091     case LastYoungGC:
1092       _initial_mark_to_mixed.add_pause(end - start);
1093       break;
1094     case InitialMarkGC:
1095       if (_g1h->gc_cause() != GCCause::_g1_periodic_collection) {
1096         _initial_mark_to_mixed.record_initial_mark_end(end);
1097       }
1098       break;
1099     case MixedGC:
1100       _initial_mark_to_mixed.record_mixed_gc_start(start);
1101       break;
1102     default:
1103       ShouldNotReachHere();
1104   }
1105 }
1106 
1107 void G1Policy::abort_time_to_mixed_tracking() {
1108   _initial_mark_to_mixed.reset();
1109 }
1110 
1111 bool G1Policy::next_gc_should_be_mixed(const char* true_action_str,
1112                                        const char* false_action_str) const {
1113   if (cset_chooser()->is_empty()) {
1114     log_debug(gc, ergo)("%s (candidate old regions not available)", false_action_str);
1115     return false;
1116   }
1117 


< prev index next >