< prev index next >

src/share/vm/gc/cms/concurrentMarkSweepGeneration.cpp

Print this page




2399   }
2400   if (!silent) gclog_or_tty->print(" done] ");
2401   return true;
2402 }
2403 
2404 void CMSCollector::verify_after_remark_work_1() {
2405   ResourceMark rm;
2406   HandleMark  hm;
2407   GenCollectedHeap* gch = GenCollectedHeap::heap();
2408 
2409   // Get a clear set of claim bits for the roots processing to work with.
2410   ClassLoaderDataGraph::clear_claimed_marks();
2411 
2412   // Mark from roots one level into CMS
2413   MarkRefsIntoClosure notOlder(_span, verification_mark_bm());
2414   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2415 
2416   {
2417     StrongRootsScope srs(1);
2418 
2419     gch->gen_process_roots(&srs,
2420                            GenCollectedHeap::OldGen,
2421                            true,   // young gen as roots
2422                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2423                            should_unload_classes(),
2424                            &notOlder,
2425                            NULL,
2426                            NULL);
2427   }
2428 
2429   // Now mark from the roots
2430   MarkFromRootsClosure markFromRootsClosure(this, _span,
2431     verification_mark_bm(), verification_mark_stack(),
2432     false /* don't yield */, true /* verifying */);
2433   assert(_restart_addr == NULL, "Expected pre-condition");
2434   verification_mark_bm()->iterate(&markFromRootsClosure);
2435   while (_restart_addr != NULL) {
2436     // Deal with stack overflow: by restarting at the indicated
2437     // address.
2438     HeapWord* ra = _restart_addr;
2439     markFromRootsClosure.reset(ra);
2440     _restart_addr = NULL;
2441     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2442   }
2443   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2444   verify_work_stacks_empty();
2445 


2471 };
2472 
2473 void CMSCollector::verify_after_remark_work_2() {
2474   ResourceMark rm;
2475   HandleMark  hm;
2476   GenCollectedHeap* gch = GenCollectedHeap::heap();
2477 
2478   // Get a clear set of claim bits for the roots processing to work with.
2479   ClassLoaderDataGraph::clear_claimed_marks();
2480 
2481   // Mark from roots one level into CMS
2482   MarkRefsIntoVerifyClosure notOlder(_span, verification_mark_bm(),
2483                                      markBitMap());
2484   CLDToOopClosure cld_closure(&notOlder, true);
2485 
2486   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2487 
2488   {
2489     StrongRootsScope srs(1);
2490 
2491     gch->gen_process_roots(&srs,
2492                            GenCollectedHeap::OldGen,
2493                            true,   // young gen as roots
2494                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2495                            should_unload_classes(),
2496                            &notOlder,
2497                            NULL,
2498                            &cld_closure);
2499   }
2500 
2501   // Now mark from the roots
2502   MarkFromRootsVerifyClosure markFromRootsClosure(this, _span,
2503     verification_mark_bm(), markBitMap(), verification_mark_stack());
2504   assert(_restart_addr == NULL, "Expected pre-condition");
2505   verification_mark_bm()->iterate(&markFromRootsClosure);
2506   while (_restart_addr != NULL) {
2507     // Deal with stack overflow: by restarting at the indicated
2508     // address.
2509     HeapWord* ra = _restart_addr;
2510     markFromRootsClosure.reset(ra);
2511     _restart_addr = NULL;
2512     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2513   }
2514   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2515   verify_work_stacks_empty();
2516 
2517   VerifyKlassOopsKlassClosure verify_klass_oops(verification_mark_bm());


2524   verification_mark_bm()->iterate(&vcl);
2525   assert(!vcl.failed(), "Else verification above should not have succeeded");
2526 }
2527 
2528 void ConcurrentMarkSweepGeneration::save_marks() {
2529   // delegate to CMS space
2530   cmsSpace()->save_marks();
2531   for (uint i = 0; i < ParallelGCThreads; i++) {
2532     _par_gc_thread_states[i]->promo.startTrackingPromotions();
2533   }
2534 }
2535 
2536 bool ConcurrentMarkSweepGeneration::no_allocs_since_save_marks() {
2537   return cmsSpace()->no_allocs_since_save_marks();
2538 }
2539 
2540 #define CMS_SINCE_SAVE_MARKS_DEFN(OopClosureType, nv_suffix)    \
2541                                                                 \
2542 void ConcurrentMarkSweepGeneration::                            \
2543 oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl) {   \
2544   cl->set_generation(this);                                     \
2545   cmsSpace()->oop_since_save_marks_iterate##nv_suffix(cl);      \
2546   cl->reset_generation();                                       \
2547   save_marks();                                                 \
2548 }
2549 
2550 ALL_SINCE_SAVE_MARKS_CLOSURES(CMS_SINCE_SAVE_MARKS_DEFN)
2551 
2552 void
2553 ConcurrentMarkSweepGeneration::oop_iterate(ExtendedOopClosure* cl) {
2554   if (freelistLock()->owned_by_self()) {
2555     Generation::oop_iterate(cl);
2556   } else {
2557     MutexLockerEx x(freelistLock(), Mutex::_no_safepoint_check_flag);
2558     Generation::oop_iterate(cl);
2559   }
2560 }
2561 
2562 void
2563 ConcurrentMarkSweepGeneration::object_iterate(ObjectClosure* cl) {
2564   if (freelistLock()->owned_by_self()) {
2565     Generation::object_iterate(cl);
2566   } else {


2994       WorkGang* workers = gch->workers();
2995       assert(workers != NULL, "Need parallel worker threads.");
2996       uint n_workers = workers->active_workers();
2997 
2998       StrongRootsScope srs(n_workers);
2999 
3000       CMSParInitialMarkTask tsk(this, &srs, n_workers);
3001       initialize_sequential_subtasks_for_young_gen_rescan(n_workers);
3002       if (n_workers > 1) {
3003         workers->run_task(&tsk);
3004       } else {
3005         tsk.work(0);
3006       }
3007     } else {
3008       // The serial version.
3009       CLDToOopClosure cld_closure(&notOlder, true);
3010       gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
3011 
3012       StrongRootsScope srs(1);
3013 
3014       gch->gen_process_roots(&srs,
3015                              GenCollectedHeap::OldGen,
3016                              true,   // young gen as roots
3017                              GenCollectedHeap::ScanningOption(roots_scanning_options()),
3018                              should_unload_classes(),
3019                              &notOlder,
3020                              NULL,
3021                              &cld_closure);
3022     }
3023   }
3024 
3025   // Clear mod-union table; it will be dirtied in the prologue of
3026   // CMS generation per each young generation collection.
3027 
3028   assert(_modUnionTable.isAllClear(),
3029        "Was cleared in most recent final checkpoint phase"
3030        " or no bits are set in the gc_prologue before the start of the next "
3031        "subsequent marking phase.");
3032 
3033   assert(_ct->klass_rem_set()->mod_union_is_clear(), "Must be");
3034 
3035   // Save the end of the used_region of the constituent generations
3036   // to be used to limit the extent of sweep in each generation.
3037   save_sweep_limits();
3038   verify_overflow_empty();
3039 }
3040 


4425   GenCollectedHeap* gch = GenCollectedHeap::heap();
4426   Par_MarkRefsIntoClosure par_mri_cl(_collector->_span, &(_collector->_markBitMap));
4427 
4428   // ---------- young gen roots --------------
4429   {
4430     work_on_young_gen_roots(worker_id, &par_mri_cl);
4431     _timer.stop();
4432     if (PrintCMSStatistics != 0) {
4433       gclog_or_tty->print_cr(
4434         "Finished young gen initial mark scan work in %dth thread: %3.3f sec",
4435         worker_id, _timer.seconds());
4436     }
4437   }
4438 
4439   // ---------- remaining roots --------------
4440   _timer.reset();
4441   _timer.start();
4442 
4443   CLDToOopClosure cld_closure(&par_mri_cl, true);
4444 
4445   gch->gen_process_roots(_strong_roots_scope,
4446                          GenCollectedHeap::OldGen,
4447                          false,     // yg was scanned above
4448                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4449                          _collector->should_unload_classes(),
4450                          &par_mri_cl,
4451                          NULL,
4452                          &cld_closure);
4453   assert(_collector->should_unload_classes()
4454          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4455          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4456   _timer.stop();
4457   if (PrintCMSStatistics != 0) {
4458     gclog_or_tty->print_cr(
4459       "Finished remaining root initial mark scan work in %dth thread: %3.3f sec",
4460       worker_id, _timer.seconds());
4461   }
4462 }
4463 
4464 // Parallel remark task
4465 class CMSParRemarkTask: public CMSParMarkTask {
4466   CompactibleFreeListSpace* _cms_space;
4467 
4468   // The per-thread work queues, available here for stealing.
4469   OopTaskQueueSet*       _task_queues;
4470   ParallelTaskTerminator _term;
4471   StrongRootsScope*      _strong_roots_scope;


4564     work_queue(worker_id));
4565 
4566   // Rescan young gen roots first since these are likely
4567   // coarsely partitioned and may, on that account, constitute
4568   // the critical path; thus, it's best to start off that
4569   // work first.
4570   // ---------- young gen roots --------------
4571   {
4572     work_on_young_gen_roots(worker_id, &par_mrias_cl);
4573     _timer.stop();
4574     if (PrintCMSStatistics != 0) {
4575       gclog_or_tty->print_cr(
4576         "Finished young gen rescan work in %dth thread: %3.3f sec",
4577         worker_id, _timer.seconds());
4578     }
4579   }
4580 
4581   // ---------- remaining roots --------------
4582   _timer.reset();
4583   _timer.start();
4584   gch->gen_process_roots(_strong_roots_scope,
4585                          GenCollectedHeap::OldGen,
4586                          false,     // yg was scanned above
4587                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4588                          _collector->should_unload_classes(),
4589                          &par_mrias_cl,
4590                          NULL,
4591                          NULL);     // The dirty klasses will be handled below
4592 
4593   assert(_collector->should_unload_classes()
4594          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4595          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4596   _timer.stop();
4597   if (PrintCMSStatistics != 0) {
4598     gclog_or_tty->print_cr(
4599       "Finished remaining root rescan work in %dth thread: %3.3f sec",
4600       worker_id, _timer.seconds());
4601   }
4602 
4603   // ---------- unhandled CLD scanning ----------
4604   if (worker_id == 0) { // Single threaded at the moment.
4605     _timer.reset();
4606     _timer.start();
4607 
4608     // Scan all new class loader data objects and new dependencies that were
4609     // introduced during concurrent marking.
4610     ResourceMark rm;


5145       verify_work_stacks_empty();
5146       if (PrintCMSStatistics != 0) {
5147         gclog_or_tty->print(" (re-scanned " SIZE_FORMAT " dirty cards in cms gen) ",
5148           markFromDirtyCardsClosure.num_dirty_cards());
5149       }
5150     }
5151   }
5152   if (VerifyDuringGC &&
5153       GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
5154     HandleMark hm;  // Discard invalid handles created during verification
5155     Universe::verify();
5156   }
5157   {
5158     GCTraceTime t("root rescan", PrintGCDetails, false, _gc_timer_cm);
5159 
5160     verify_work_stacks_empty();
5161 
5162     gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
5163     StrongRootsScope srs(1);
5164 
5165     gch->gen_process_roots(&srs,
5166                            GenCollectedHeap::OldGen,
5167                            true,  // young gen as roots
5168                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
5169                            should_unload_classes(),
5170                            &mrias_cl,
5171                            NULL,
5172                            NULL); // The dirty klasses will be handled below
5173 
5174     assert(should_unload_classes()
5175            || (roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
5176            "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
5177   }
5178 
5179   {
5180     GCTraceTime t("visit unhandled CLDs", PrintGCDetails, false, _gc_timer_cm);
5181 
5182     verify_work_stacks_empty();
5183 
5184     // Scan all class loader data objects that might have been introduced
5185     // during concurrent marking.
5186     ResourceMark rm;
5187     GrowableArray<ClassLoaderData*>* array = ClassLoaderDataGraph::new_clds();
5188     for (int i = 0; i < array->length(); i++) {
5189       mrias_cl.do_cld_nv(array->at(i));
5190     }
5191 




2399   }
2400   if (!silent) gclog_or_tty->print(" done] ");
2401   return true;
2402 }
2403 
2404 void CMSCollector::verify_after_remark_work_1() {
2405   ResourceMark rm;
2406   HandleMark  hm;
2407   GenCollectedHeap* gch = GenCollectedHeap::heap();
2408 
2409   // Get a clear set of claim bits for the roots processing to work with.
2410   ClassLoaderDataGraph::clear_claimed_marks();
2411 
2412   // Mark from roots one level into CMS
2413   MarkRefsIntoClosure notOlder(_span, verification_mark_bm());
2414   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2415 
2416   {
2417     StrongRootsScope srs(1);
2418 
2419     gch->old_process_roots(&srs,

2420                            true,   // young gen as roots
2421                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2422                            should_unload_classes(),
2423                            &notOlder,

2424                            NULL);
2425   }
2426 
2427   // Now mark from the roots
2428   MarkFromRootsClosure markFromRootsClosure(this, _span,
2429     verification_mark_bm(), verification_mark_stack(),
2430     false /* don't yield */, true /* verifying */);
2431   assert(_restart_addr == NULL, "Expected pre-condition");
2432   verification_mark_bm()->iterate(&markFromRootsClosure);
2433   while (_restart_addr != NULL) {
2434     // Deal with stack overflow: by restarting at the indicated
2435     // address.
2436     HeapWord* ra = _restart_addr;
2437     markFromRootsClosure.reset(ra);
2438     _restart_addr = NULL;
2439     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2440   }
2441   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2442   verify_work_stacks_empty();
2443 


2469 };
2470 
2471 void CMSCollector::verify_after_remark_work_2() {
2472   ResourceMark rm;
2473   HandleMark  hm;
2474   GenCollectedHeap* gch = GenCollectedHeap::heap();
2475 
2476   // Get a clear set of claim bits for the roots processing to work with.
2477   ClassLoaderDataGraph::clear_claimed_marks();
2478 
2479   // Mark from roots one level into CMS
2480   MarkRefsIntoVerifyClosure notOlder(_span, verification_mark_bm(),
2481                                      markBitMap());
2482   CLDToOopClosure cld_closure(&notOlder, true);
2483 
2484   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2485 
2486   {
2487     StrongRootsScope srs(1);
2488 
2489     gch->old_process_roots(&srs,

2490                            true,   // young gen as roots
2491                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2492                            should_unload_classes(),
2493                            &notOlder,

2494                            &cld_closure);
2495   }
2496 
2497   // Now mark from the roots
2498   MarkFromRootsVerifyClosure markFromRootsClosure(this, _span,
2499     verification_mark_bm(), markBitMap(), verification_mark_stack());
2500   assert(_restart_addr == NULL, "Expected pre-condition");
2501   verification_mark_bm()->iterate(&markFromRootsClosure);
2502   while (_restart_addr != NULL) {
2503     // Deal with stack overflow: by restarting at the indicated
2504     // address.
2505     HeapWord* ra = _restart_addr;
2506     markFromRootsClosure.reset(ra);
2507     _restart_addr = NULL;
2508     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2509   }
2510   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2511   verify_work_stacks_empty();
2512 
2513   VerifyKlassOopsKlassClosure verify_klass_oops(verification_mark_bm());


2520   verification_mark_bm()->iterate(&vcl);
2521   assert(!vcl.failed(), "Else verification above should not have succeeded");
2522 }
2523 
2524 void ConcurrentMarkSweepGeneration::save_marks() {
2525   // delegate to CMS space
2526   cmsSpace()->save_marks();
2527   for (uint i = 0; i < ParallelGCThreads; i++) {
2528     _par_gc_thread_states[i]->promo.startTrackingPromotions();
2529   }
2530 }
2531 
2532 bool ConcurrentMarkSweepGeneration::no_allocs_since_save_marks() {
2533   return cmsSpace()->no_allocs_since_save_marks();
2534 }
2535 
2536 #define CMS_SINCE_SAVE_MARKS_DEFN(OopClosureType, nv_suffix)    \
2537                                                                 \
2538 void ConcurrentMarkSweepGeneration::                            \
2539 oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl) {   \
2540   cl->assert_generation(this);                                  \
2541   cmsSpace()->oop_since_save_marks_iterate##nv_suffix(cl);      \

2542   save_marks();                                                 \
2543 }
2544 
2545 ALL_SINCE_SAVE_MARKS_CLOSURES(CMS_SINCE_SAVE_MARKS_DEFN)
2546 
2547 void
2548 ConcurrentMarkSweepGeneration::oop_iterate(ExtendedOopClosure* cl) {
2549   if (freelistLock()->owned_by_self()) {
2550     Generation::oop_iterate(cl);
2551   } else {
2552     MutexLockerEx x(freelistLock(), Mutex::_no_safepoint_check_flag);
2553     Generation::oop_iterate(cl);
2554   }
2555 }
2556 
2557 void
2558 ConcurrentMarkSweepGeneration::object_iterate(ObjectClosure* cl) {
2559   if (freelistLock()->owned_by_self()) {
2560     Generation::object_iterate(cl);
2561   } else {


2989       WorkGang* workers = gch->workers();
2990       assert(workers != NULL, "Need parallel worker threads.");
2991       uint n_workers = workers->active_workers();
2992 
2993       StrongRootsScope srs(n_workers);
2994 
2995       CMSParInitialMarkTask tsk(this, &srs, n_workers);
2996       initialize_sequential_subtasks_for_young_gen_rescan(n_workers);
2997       if (n_workers > 1) {
2998         workers->run_task(&tsk);
2999       } else {
3000         tsk.work(0);
3001       }
3002     } else {
3003       // The serial version.
3004       CLDToOopClosure cld_closure(&notOlder, true);
3005       gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
3006 
3007       StrongRootsScope srs(1);
3008 
3009       gch->old_process_roots(&srs,

3010                              true,   // young gen as roots
3011                              GenCollectedHeap::ScanningOption(roots_scanning_options()),
3012                              should_unload_classes(),
3013                              &notOlder,

3014                              &cld_closure);
3015     }
3016   }
3017 
3018   // Clear mod-union table; it will be dirtied in the prologue of
3019   // CMS generation per each young generation collection.
3020 
3021   assert(_modUnionTable.isAllClear(),
3022        "Was cleared in most recent final checkpoint phase"
3023        " or no bits are set in the gc_prologue before the start of the next "
3024        "subsequent marking phase.");
3025 
3026   assert(_ct->klass_rem_set()->mod_union_is_clear(), "Must be");
3027 
3028   // Save the end of the used_region of the constituent generations
3029   // to be used to limit the extent of sweep in each generation.
3030   save_sweep_limits();
3031   verify_overflow_empty();
3032 }
3033 


4418   GenCollectedHeap* gch = GenCollectedHeap::heap();
4419   Par_MarkRefsIntoClosure par_mri_cl(_collector->_span, &(_collector->_markBitMap));
4420 
4421   // ---------- young gen roots --------------
4422   {
4423     work_on_young_gen_roots(worker_id, &par_mri_cl);
4424     _timer.stop();
4425     if (PrintCMSStatistics != 0) {
4426       gclog_or_tty->print_cr(
4427         "Finished young gen initial mark scan work in %dth thread: %3.3f sec",
4428         worker_id, _timer.seconds());
4429     }
4430   }
4431 
4432   // ---------- remaining roots --------------
4433   _timer.reset();
4434   _timer.start();
4435 
4436   CLDToOopClosure cld_closure(&par_mri_cl, true);
4437 
4438   gch->old_process_roots(_strong_roots_scope,

4439                          false,     // yg was scanned above
4440                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4441                          _collector->should_unload_classes(),
4442                          &par_mri_cl,

4443                          &cld_closure);
4444   assert(_collector->should_unload_classes()
4445          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4446          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4447   _timer.stop();
4448   if (PrintCMSStatistics != 0) {
4449     gclog_or_tty->print_cr(
4450       "Finished remaining root initial mark scan work in %dth thread: %3.3f sec",
4451       worker_id, _timer.seconds());
4452   }
4453 }
4454 
4455 // Parallel remark task
4456 class CMSParRemarkTask: public CMSParMarkTask {
4457   CompactibleFreeListSpace* _cms_space;
4458 
4459   // The per-thread work queues, available here for stealing.
4460   OopTaskQueueSet*       _task_queues;
4461   ParallelTaskTerminator _term;
4462   StrongRootsScope*      _strong_roots_scope;


4555     work_queue(worker_id));
4556 
4557   // Rescan young gen roots first since these are likely
4558   // coarsely partitioned and may, on that account, constitute
4559   // the critical path; thus, it's best to start off that
4560   // work first.
4561   // ---------- young gen roots --------------
4562   {
4563     work_on_young_gen_roots(worker_id, &par_mrias_cl);
4564     _timer.stop();
4565     if (PrintCMSStatistics != 0) {
4566       gclog_or_tty->print_cr(
4567         "Finished young gen rescan work in %dth thread: %3.3f sec",
4568         worker_id, _timer.seconds());
4569     }
4570   }
4571 
4572   // ---------- remaining roots --------------
4573   _timer.reset();
4574   _timer.start();
4575   gch->old_process_roots(_strong_roots_scope,

4576                          false,     // yg was scanned above
4577                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4578                          _collector->should_unload_classes(),
4579                          &par_mrias_cl,

4580                          NULL);     // The dirty klasses will be handled below
4581 
4582   assert(_collector->should_unload_classes()
4583          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4584          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4585   _timer.stop();
4586   if (PrintCMSStatistics != 0) {
4587     gclog_or_tty->print_cr(
4588       "Finished remaining root rescan work in %dth thread: %3.3f sec",
4589       worker_id, _timer.seconds());
4590   }
4591 
4592   // ---------- unhandled CLD scanning ----------
4593   if (worker_id == 0) { // Single threaded at the moment.
4594     _timer.reset();
4595     _timer.start();
4596 
4597     // Scan all new class loader data objects and new dependencies that were
4598     // introduced during concurrent marking.
4599     ResourceMark rm;


5134       verify_work_stacks_empty();
5135       if (PrintCMSStatistics != 0) {
5136         gclog_or_tty->print(" (re-scanned " SIZE_FORMAT " dirty cards in cms gen) ",
5137           markFromDirtyCardsClosure.num_dirty_cards());
5138       }
5139     }
5140   }
5141   if (VerifyDuringGC &&
5142       GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
5143     HandleMark hm;  // Discard invalid handles created during verification
5144     Universe::verify();
5145   }
5146   {
5147     GCTraceTime t("root rescan", PrintGCDetails, false, _gc_timer_cm);
5148 
5149     verify_work_stacks_empty();
5150 
5151     gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
5152     StrongRootsScope srs(1);
5153 
5154     gch->old_process_roots(&srs,

5155                            true,  // young gen as roots
5156                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
5157                            should_unload_classes(),
5158                            &mrias_cl,

5159                            NULL); // The dirty klasses will be handled below
5160 
5161     assert(should_unload_classes()
5162            || (roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
5163            "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
5164   }
5165 
5166   {
5167     GCTraceTime t("visit unhandled CLDs", PrintGCDetails, false, _gc_timer_cm);
5168 
5169     verify_work_stacks_empty();
5170 
5171     // Scan all class loader data objects that might have been introduced
5172     // during concurrent marking.
5173     ResourceMark rm;
5174     GrowableArray<ClassLoaderData*>* array = ClassLoaderDataGraph::new_clds();
5175     for (int i = 0; i < array->length(); i++) {
5176       mrias_cl.do_cld_nv(array->at(i));
5177     }
5178 


< prev index next >