src/share/vm/gc/g1/g1CollectedHeap.cpp

Print this page

        

*** 3533,3542 **** --- 3533,3552 ---- // Finally flush all remembered set entries to re-check into the global DCQS. cl.flush_rem_set_entries(); } + class VerifyRegionRemSetClosure : public HeapRegionClosure { + public: + bool doHeapRegion(HeapRegion* hr) { + if (!hr->is_archive() && !hr->is_continues_humongous()) { + hr->verify_rem_set(); + } + return false; + } + }; + #ifdef ASSERT class VerifyCSetClosure: public HeapRegionClosure { public: bool doHeapRegion(HeapRegion* hr) { // Here we check that the CSet region's RSet is ready for parallel
*** 3722,3731 **** --- 3732,3747 ---- gc_prologue(false); increment_total_collections(false /* full gc */); increment_gc_time_stamp(); + if (VerifyRememberedSets) { + log_info(gc, verify)("[Verifying RemSets before GC]"); + VerifyRegionRemSetClosure v_cl; + heap_region_iterate(&v_cl); + } + verify_before_gc(); check_bitmaps("GC Start"); #if defined(COMPILER2) || INCLUDE_JVMCI
*** 3926,3935 **** --- 3942,3957 ---- // during concurrent refinement. So we don't need the // is_gc_active() check to decided which top to use when // scanning cards (see CR 7039627). increment_gc_time_stamp(); + if (VerifyRememberedSets) { + log_info(gc, verify)("[Verifying RemSets after GC]"); + VerifyRegionRemSetClosure v_cl; + heap_region_iterate(&v_cl); + } + verify_after_gc(); check_bitmaps("GC End"); assert(!ref_processor_stw()->discovery_enabled(), "Postcondition"); ref_processor_stw()->verify_no_references_recorded();