2883 verify_type = G1HeapVerifier::G1VerifyConcurrentStart;
2884 } else if (collector_state()->in_young_only_phase()) {
2885 if (collector_state()->in_young_gc_before_mixed()) {
2886 gc_string.append("(Prepare Mixed)");
2887 } else {
2888 gc_string.append("(Normal)");
2889 }
2890 verify_type = G1HeapVerifier::G1VerifyYoungNormal;
2891 } else {
2892 gc_string.append("(Mixed)");
2893 verify_type = G1HeapVerifier::G1VerifyMixed;
2894 }
2895 GCTraceTime(Info, gc) tm(gc_string, NULL, gc_cause(), true);
2896
2897 uint active_workers = AdaptiveSizePolicy::calc_active_workers(workers()->total_workers(),
2898 workers()->active_workers(),
2899 Threads::number_of_non_daemon_threads());
2900 active_workers = workers()->update_active_workers(active_workers);
2901 log_info(gc,task)("Using %u workers of %u for evacuation", active_workers, workers()->total_workers());
2902
2903 G1MonitoringScope ms(g1mm(),
2904 false /* full_gc */,
2905 collector_state()->yc_type() == Mixed /* all_memory_pools_affected */);
2906
2907 G1HeapTransition heap_transition(this);
2908 size_t heap_used_bytes_before_gc = used();
2909
2910 // Don't dynamically change the number of GC threads this early. A value of
2911 // 0 is used to indicate serial work. When parallel work is done,
2912 // it will be set.
2913
2914 { // Call to jvmpi::post_class_unload_events must occur outside of active GC
2915 IsGCActiveMark x;
2916
2917 gc_prologue(false);
2918
2919 if (VerifyRememberedSets) {
2920 log_info(gc, verify)("[Verifying RemSets before GC]");
2921 VerifyRegionRemSetClosure v_cl;
2922 heap_region_iterate(&v_cl);
2923 }
2924
2925 _verifier->verify_before_gc(verify_type);
2926
|
2883 verify_type = G1HeapVerifier::G1VerifyConcurrentStart;
2884 } else if (collector_state()->in_young_only_phase()) {
2885 if (collector_state()->in_young_gc_before_mixed()) {
2886 gc_string.append("(Prepare Mixed)");
2887 } else {
2888 gc_string.append("(Normal)");
2889 }
2890 verify_type = G1HeapVerifier::G1VerifyYoungNormal;
2891 } else {
2892 gc_string.append("(Mixed)");
2893 verify_type = G1HeapVerifier::G1VerifyMixed;
2894 }
2895 GCTraceTime(Info, gc) tm(gc_string, NULL, gc_cause(), true);
2896
2897 uint active_workers = AdaptiveSizePolicy::calc_active_workers(workers()->total_workers(),
2898 workers()->active_workers(),
2899 Threads::number_of_non_daemon_threads());
2900 active_workers = workers()->update_active_workers(active_workers);
2901 log_info(gc,task)("Using %u workers of %u for evacuation", active_workers, workers()->total_workers());
2902
2903 if(gc_cause() == GCCause::_java_lang_system_gc) {
2904 G1MonitoringScope ms(g1mm(),
2905 true /* full_gc */,
2906 collector_state()->yc_type() == Mixed /* all_memory_pools_affected */);
2907 } else {
2908 G1MonitoringScope ms(g1mm(),
2909 false /* full_gc */,
2910 collector_state()->yc_type() == Mixed /* all_memory_pools_affected */);
2911 }
2912 G1HeapTransition heap_transition(this);
2913 size_t heap_used_bytes_before_gc = used();
2914
2915 // Don't dynamically change the number of GC threads this early. A value of
2916 // 0 is used to indicate serial work. When parallel work is done,
2917 // it will be set.
2918
2919 { // Call to jvmpi::post_class_unload_events must occur outside of active GC
2920 IsGCActiveMark x;
2921
2922 gc_prologue(false);
2923
2924 if (VerifyRememberedSets) {
2925 log_info(gc, verify)("[Verifying RemSets before GC]");
2926 VerifyRegionRemSetClosure v_cl;
2927 heap_region_iterate(&v_cl);
2928 }
2929
2930 _verifier->verify_before_gc(verify_type);
2931
|