936 #ifdef ASSERT
937 // It is possible that incremental_collection_failed() == true
938 // here, because an attempted scavenge did not succeed. The policy
939 // is normally expected to cause a full collection which should
940 // clear that condition, so we should not be here twice in a row
941 // with incremental_collection_failed() == true without having done
942 // a full collection in between.
943 if (!seen_incremental_collection_failed &&
944 gch->incremental_collection_failed()) {
945 if (Verbose && PrintGCDetails) {
946 gclog_or_tty->print("DefNewEpilogue: cause(%s), not full, not_seen_failed, failed, set_seen_failed",
947 GCCause::to_string(gch->gc_cause()));
948 }
949 seen_incremental_collection_failed = true;
950 } else if (seen_incremental_collection_failed) {
951 if (Verbose && PrintGCDetails) {
952 gclog_or_tty->print("DefNewEpilogue: cause(%s), not full, seen_failed, will_clear_seen_failed",
953 GCCause::to_string(gch->gc_cause()));
954 }
955 assert(gch->gc_cause() == GCCause::_scavenge_alot ||
956 (gch->gc_cause() == GCCause::_java_lang_system_gc && UseConcMarkSweepGC && ExplicitGCInvokesConcurrent) ||
957 !gch->incremental_collection_failed(),
958 "Twice in a row");
959 seen_incremental_collection_failed = false;
960 }
961 #endif // ASSERT
962 }
963
964 if (ZapUnusedHeapArea) {
965 eden()->check_mangled_unused_area_complete();
966 from()->check_mangled_unused_area_complete();
967 to()->check_mangled_unused_area_complete();
968 }
969
970 if (!CleanChunkPoolAsync) {
971 Chunk::clean_chunk_pool();
972 }
973
974 // update the generation and space performance counters
975 update_counters();
976 gch->collector_policy()->counters()->update_counters();
|
936 #ifdef ASSERT
937 // It is possible that incremental_collection_failed() == true
938 // here, because an attempted scavenge did not succeed. The policy
939 // is normally expected to cause a full collection which should
940 // clear that condition, so we should not be here twice in a row
941 // with incremental_collection_failed() == true without having done
942 // a full collection in between.
943 if (!seen_incremental_collection_failed &&
944 gch->incremental_collection_failed()) {
945 if (Verbose && PrintGCDetails) {
946 gclog_or_tty->print("DefNewEpilogue: cause(%s), not full, not_seen_failed, failed, set_seen_failed",
947 GCCause::to_string(gch->gc_cause()));
948 }
949 seen_incremental_collection_failed = true;
950 } else if (seen_incremental_collection_failed) {
951 if (Verbose && PrintGCDetails) {
952 gclog_or_tty->print("DefNewEpilogue: cause(%s), not full, seen_failed, will_clear_seen_failed",
953 GCCause::to_string(gch->gc_cause()));
954 }
955 assert(gch->gc_cause() == GCCause::_scavenge_alot ||
956 (GCCause::is_user_requested_gc(gch->gc_cause()) && UseConcMarkSweepGC && ExplicitGCInvokesConcurrent) ||
957 !gch->incremental_collection_failed(),
958 "Twice in a row");
959 seen_incremental_collection_failed = false;
960 }
961 #endif // ASSERT
962 }
963
964 if (ZapUnusedHeapArea) {
965 eden()->check_mangled_unused_area_complete();
966 from()->check_mangled_unused_area_complete();
967 to()->check_mangled_unused_area_complete();
968 }
969
970 if (!CleanChunkPoolAsync) {
971 Chunk::clean_chunk_pool();
972 }
973
974 // update the generation and space performance counters
975 update_counters();
976 gch->collector_policy()->counters()->update_counters();
|