61 /* implementation specific */ 62 63 _tenured_generation_full, 64 _metadata_GC_threshold, 65 _metadata_GC_clear_soft_refs, 66 67 _cms_generation_full, 68 _cms_initial_mark, 69 _cms_final_remark, 70 _cms_concurrent_mark, 71 72 _old_generation_expanded_on_last_scavenge, 73 _old_generation_too_full_to_scavenge, 74 _adaptive_size_policy, 75 76 _g1_inc_collection_pause, 77 _g1_humongous_allocation, 78 79 _dcmd_gc_run, 80 81 _last_gc_cause 82 }; 83 84 inline static bool is_user_requested_gc(GCCause::Cause cause) { 85 return (cause == GCCause::_java_lang_system_gc || 86 cause == GCCause::_dcmd_gc_run); 87 } 88 89 inline static bool is_serviceability_requested_gc(GCCause::Cause 90 cause) { 91 return (cause == GCCause::_jvmti_force_gc || 92 cause == GCCause::_heap_inspection || 93 cause == GCCause::_heap_dump); 94 } 95 96 // Causes for collection of the tenured gernation 97 inline static bool is_tenured_allocation_failure_gc(GCCause::Cause cause) { 98 assert(cause != GCCause::_old_generation_too_full_to_scavenge && 99 cause != GCCause::_old_generation_expanded_on_last_scavenge, 100 "This GCCause may be correct but is not expected yet: %s", | 61 /* implementation specific */ 62 63 _tenured_generation_full, 64 _metadata_GC_threshold, 65 _metadata_GC_clear_soft_refs, 66 67 _cms_generation_full, 68 _cms_initial_mark, 69 _cms_final_remark, 70 _cms_concurrent_mark, 71 72 _old_generation_expanded_on_last_scavenge, 73 _old_generation_too_full_to_scavenge, 74 _adaptive_size_policy, 75 76 _g1_inc_collection_pause, 77 _g1_humongous_allocation, 78 79 _dcmd_gc_run, 80 81 _z_timer, 82 _z_warmup, 83 _z_allocation_rate, 84 _z_allocation_stall, 85 _z_proactive, 86 87 _last_gc_cause 88 }; 89 90 inline static bool is_user_requested_gc(GCCause::Cause cause) { 91 return (cause == GCCause::_java_lang_system_gc || 92 cause == GCCause::_dcmd_gc_run); 93 } 94 95 inline static bool is_serviceability_requested_gc(GCCause::Cause 96 cause) { 97 return (cause == GCCause::_jvmti_force_gc || 98 cause == GCCause::_heap_inspection || 99 cause == GCCause::_heap_dump); 100 } 101 102 // Causes for collection of the tenured gernation 103 inline static bool is_tenured_allocation_failure_gc(GCCause::Cause cause) { 104 assert(cause != GCCause::_old_generation_too_full_to_scavenge && 105 cause != GCCause::_old_generation_expanded_on_last_scavenge, 106 "This GCCause may be correct but is not expected yet: %s", |