< prev index next >

src/hotspot/share/gc/cms/concurrentMarkSweepGeneration.cpp

Print this page




2279 
2280 
2281   // Allocate marking bit map if not already allocated
2282   if (!init) { // first time
2283     if (!verification_mark_bm()->allocate(_span)) {
2284       return false;
2285     }
2286     init = true;
2287   }
2288 
2289   assert(verification_mark_stack()->isEmpty(), "Should be empty");
2290 
2291   // Turn off refs discovery -- so we will be tracing through refs.
2292   // This is as intended, because by this time
2293   // GC must already have cleared any refs that need to be cleared,
2294   // and traced those that need to be marked; moreover,
2295   // the marking done here is not going to interfere in any
2296   // way with the marking information used by GC.
2297   NoRefDiscovery no_discovery(ref_processor());
2298 
2299 #if defined(COMPILER2) || INCLUDE_JVMCI
2300   DerivedPointerTableDeactivate dpt_deact;
2301 #endif
2302 
2303   // Clear any marks from a previous round
2304   verification_mark_bm()->clear_all();
2305   assert(verification_mark_stack()->isEmpty(), "markStack should be empty");
2306   verify_work_stacks_empty();
2307 
2308   CMSHeap* heap = CMSHeap::heap();
2309   heap->ensure_parsability(false);  // fill TLABs, but no need to retire them
2310   // Update the saved marks which may affect the root scans.
2311   heap->save_marks();
2312 
2313   if (CMSRemarkVerifyVariant == 1) {
2314     // In this first variant of verification, we complete
2315     // all marking, then check if the new marks-vector is
2316     // a subset of the CMS marks-vector.
2317     verify_after_remark_work_1();
2318   } else {
2319     guarantee(CMSRemarkVerifyVariant == 2, "Range checking for CMSRemarkVerifyVariant should guarantee 1 or 2");


2852   verify_overflow_empty();
2853 
2854   heap->ensure_parsability(false);  // fill TLABs, but no need to retire them
2855   // Update the saved marks which may affect the root scans.
2856   heap->save_marks();
2857 
2858   // weak reference processing has not started yet.
2859   ref_processor()->set_enqueuing_is_done(false);
2860 
2861   // Need to remember all newly created CLDs,
2862   // so that we can guarantee that the remark finds them.
2863   ClassLoaderDataGraph::remember_new_clds(true);
2864 
2865   // Whenever a CLD is found, it will be claimed before proceeding to mark
2866   // the klasses. The claimed marks need to be cleared before marking starts.
2867   ClassLoaderDataGraph::clear_claimed_marks();
2868 
2869   print_eden_and_survivor_chunk_arrays();
2870 
2871   {
2872 #if defined(COMPILER2) || INCLUDE_JVMCI
2873     DerivedPointerTableDeactivate dpt_deact;
2874 #endif
2875     if (CMSParallelInitialMarkEnabled) {
2876       // The parallel version.
2877       WorkGang* workers = heap->workers();
2878       assert(workers != NULL, "Need parallel worker threads.");
2879       uint n_workers = workers->active_workers();
2880 
2881       StrongRootsScope srs(n_workers);
2882 
2883       CMSParInitialMarkTask tsk(this, &srs, n_workers);
2884       initialize_sequential_subtasks_for_young_gen_rescan(n_workers);
2885       // If the total workers is greater than 1, then multiple workers
2886       // may be used at some time and the initialization has been set
2887       // such that the single threaded path cannot be used.
2888       if (workers->total_workers() > 1) {
2889         workers->run_task(&tsk);
2890       } else {
2891         tsk.work(0);
2892       }


4154 
4155   // We might assume that we need not fill TLAB's when
4156   // CMSScavengeBeforeRemark is set, because we may have just done
4157   // a scavenge which would have filled all TLAB's -- and besides
4158   // Eden would be empty. This however may not always be the case --
4159   // for instance although we asked for a scavenge, it may not have
4160   // happened because of a JNI critical section. We probably need
4161   // a policy for deciding whether we can in that case wait until
4162   // the critical section releases and then do the remark following
4163   // the scavenge, and skip it here. In the absence of that policy,
4164   // or of an indication of whether the scavenge did indeed occur,
4165   // we cannot rely on TLAB's having been filled and must do
4166   // so here just in case a scavenge did not happen.
4167   heap->ensure_parsability(false);  // fill TLAB's, but no need to retire them
4168   // Update the saved marks which may affect the root scans.
4169   heap->save_marks();
4170 
4171   print_eden_and_survivor_chunk_arrays();
4172 
4173   {
4174 #if defined(COMPILER2) || INCLUDE_JVMCI
4175     DerivedPointerTableDeactivate dpt_deact;
4176 #endif
4177 
4178     // Note on the role of the mod union table:
4179     // Since the marker in "markFromRoots" marks concurrently with
4180     // mutators, it is possible for some reachable objects not to have been
4181     // scanned. For instance, an only reference to an object A was
4182     // placed in object B after the marker scanned B. Unless B is rescanned,
4183     // A would be collected. Such updates to references in marked objects
4184     // are detected via the mod union table which is the set of all cards
4185     // dirtied since the first checkpoint in this GC cycle and prior to
4186     // the most recent young generation GC, minus those cleaned up by the
4187     // concurrent precleaning.
4188     if (CMSParallelRemarkEnabled) {
4189       GCTraceTime(Debug, gc, phases) t("Rescan (parallel)", _gc_timer_cm);
4190       do_remark_parallel();
4191     } else {
4192       GCTraceTime(Debug, gc, phases) t("Rescan (non-parallel)", _gc_timer_cm);
4193       do_remark_non_parallel();
4194     }




2279 
2280 
2281   // Allocate marking bit map if not already allocated
2282   if (!init) { // first time
2283     if (!verification_mark_bm()->allocate(_span)) {
2284       return false;
2285     }
2286     init = true;
2287   }
2288 
2289   assert(verification_mark_stack()->isEmpty(), "Should be empty");
2290 
2291   // Turn off refs discovery -- so we will be tracing through refs.
2292   // This is as intended, because by this time
2293   // GC must already have cleared any refs that need to be cleared,
2294   // and traced those that need to be marked; moreover,
2295   // the marking done here is not going to interfere in any
2296   // way with the marking information used by GC.
2297   NoRefDiscovery no_discovery(ref_processor());
2298 
2299 #if COMPILER2_OR_JVMCI
2300   DerivedPointerTableDeactivate dpt_deact;
2301 #endif
2302 
2303   // Clear any marks from a previous round
2304   verification_mark_bm()->clear_all();
2305   assert(verification_mark_stack()->isEmpty(), "markStack should be empty");
2306   verify_work_stacks_empty();
2307 
2308   CMSHeap* heap = CMSHeap::heap();
2309   heap->ensure_parsability(false);  // fill TLABs, but no need to retire them
2310   // Update the saved marks which may affect the root scans.
2311   heap->save_marks();
2312 
2313   if (CMSRemarkVerifyVariant == 1) {
2314     // In this first variant of verification, we complete
2315     // all marking, then check if the new marks-vector is
2316     // a subset of the CMS marks-vector.
2317     verify_after_remark_work_1();
2318   } else {
2319     guarantee(CMSRemarkVerifyVariant == 2, "Range checking for CMSRemarkVerifyVariant should guarantee 1 or 2");


2852   verify_overflow_empty();
2853 
2854   heap->ensure_parsability(false);  // fill TLABs, but no need to retire them
2855   // Update the saved marks which may affect the root scans.
2856   heap->save_marks();
2857 
2858   // weak reference processing has not started yet.
2859   ref_processor()->set_enqueuing_is_done(false);
2860 
2861   // Need to remember all newly created CLDs,
2862   // so that we can guarantee that the remark finds them.
2863   ClassLoaderDataGraph::remember_new_clds(true);
2864 
2865   // Whenever a CLD is found, it will be claimed before proceeding to mark
2866   // the klasses. The claimed marks need to be cleared before marking starts.
2867   ClassLoaderDataGraph::clear_claimed_marks();
2868 
2869   print_eden_and_survivor_chunk_arrays();
2870 
2871   {
2872 #if COMPILER2_OR_JVMCI
2873     DerivedPointerTableDeactivate dpt_deact;
2874 #endif
2875     if (CMSParallelInitialMarkEnabled) {
2876       // The parallel version.
2877       WorkGang* workers = heap->workers();
2878       assert(workers != NULL, "Need parallel worker threads.");
2879       uint n_workers = workers->active_workers();
2880 
2881       StrongRootsScope srs(n_workers);
2882 
2883       CMSParInitialMarkTask tsk(this, &srs, n_workers);
2884       initialize_sequential_subtasks_for_young_gen_rescan(n_workers);
2885       // If the total workers is greater than 1, then multiple workers
2886       // may be used at some time and the initialization has been set
2887       // such that the single threaded path cannot be used.
2888       if (workers->total_workers() > 1) {
2889         workers->run_task(&tsk);
2890       } else {
2891         tsk.work(0);
2892       }


4154 
4155   // We might assume that we need not fill TLAB's when
4156   // CMSScavengeBeforeRemark is set, because we may have just done
4157   // a scavenge which would have filled all TLAB's -- and besides
4158   // Eden would be empty. This however may not always be the case --
4159   // for instance although we asked for a scavenge, it may not have
4160   // happened because of a JNI critical section. We probably need
4161   // a policy for deciding whether we can in that case wait until
4162   // the critical section releases and then do the remark following
4163   // the scavenge, and skip it here. In the absence of that policy,
4164   // or of an indication of whether the scavenge did indeed occur,
4165   // we cannot rely on TLAB's having been filled and must do
4166   // so here just in case a scavenge did not happen.
4167   heap->ensure_parsability(false);  // fill TLAB's, but no need to retire them
4168   // Update the saved marks which may affect the root scans.
4169   heap->save_marks();
4170 
4171   print_eden_and_survivor_chunk_arrays();
4172 
4173   {
4174 #if COMPILER2_OR_JVMCI
4175     DerivedPointerTableDeactivate dpt_deact;
4176 #endif
4177 
4178     // Note on the role of the mod union table:
4179     // Since the marker in "markFromRoots" marks concurrently with
4180     // mutators, it is possible for some reachable objects not to have been
4181     // scanned. For instance, an only reference to an object A was
4182     // placed in object B after the marker scanned B. Unless B is rescanned,
4183     // A would be collected. Such updates to references in marked objects
4184     // are detected via the mod union table which is the set of all cards
4185     // dirtied since the first checkpoint in this GC cycle and prior to
4186     // the most recent young generation GC, minus those cleaned up by the
4187     // concurrent precleaning.
4188     if (CMSParallelRemarkEnabled) {
4189       GCTraceTime(Debug, gc, phases) t("Rescan (parallel)", _gc_timer_cm);
4190       do_remark_parallel();
4191     } else {
4192       GCTraceTime(Debug, gc, phases) t("Rescan (non-parallel)", _gc_timer_cm);
4193       do_remark_non_parallel();
4194     }


< prev index next >