< prev index next >

src/share/vm/gc/cms/concurrentMarkSweepGeneration.cpp

Print this page




2397   }
2398   if (!silent) gclog_or_tty->print(" done] ");
2399   return true;
2400 }
2401 
2402 void CMSCollector::verify_after_remark_work_1() {
2403   ResourceMark rm;
2404   HandleMark  hm;
2405   GenCollectedHeap* gch = GenCollectedHeap::heap();
2406 
2407   // Get a clear set of claim bits for the roots processing to work with.
2408   ClassLoaderDataGraph::clear_claimed_marks();
2409 
2410   // Mark from roots one level into CMS
2411   MarkRefsIntoClosure notOlder(_span, verification_mark_bm());
2412   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2413 
2414   {
2415     StrongRootsScope srs(1);
2416 
2417     gch->gen_process_roots(&srs,
2418                            GenCollectedHeap::OldGen,
2419                            true,   // young gen as roots
2420                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2421                            should_unload_classes(),
2422                            &notOlder,
2423                            NULL,
2424                            NULL);
2425   }
2426 
2427   // Now mark from the roots
2428   MarkFromRootsClosure markFromRootsClosure(this, _span,
2429     verification_mark_bm(), verification_mark_stack(),
2430     false /* don't yield */, true /* verifying */);
2431   assert(_restart_addr == NULL, "Expected pre-condition");
2432   verification_mark_bm()->iterate(&markFromRootsClosure);
2433   while (_restart_addr != NULL) {
2434     // Deal with stack overflow: by restarting at the indicated
2435     // address.
2436     HeapWord* ra = _restart_addr;
2437     markFromRootsClosure.reset(ra);
2438     _restart_addr = NULL;
2439     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2440   }
2441   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2442   verify_work_stacks_empty();
2443 


2469 };
2470 
2471 void CMSCollector::verify_after_remark_work_2() {
2472   ResourceMark rm;
2473   HandleMark  hm;
2474   GenCollectedHeap* gch = GenCollectedHeap::heap();
2475 
2476   // Get a clear set of claim bits for the roots processing to work with.
2477   ClassLoaderDataGraph::clear_claimed_marks();
2478 
2479   // Mark from roots one level into CMS
2480   MarkRefsIntoVerifyClosure notOlder(_span, verification_mark_bm(),
2481                                      markBitMap());
2482   CLDToOopClosure cld_closure(&notOlder, true);
2483 
2484   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2485 
2486   {
2487     StrongRootsScope srs(1);
2488 
2489     gch->gen_process_roots(&srs,
2490                            GenCollectedHeap::OldGen,
2491                            true,   // young gen as roots
2492                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2493                            should_unload_classes(),
2494                            &notOlder,
2495                            NULL,
2496                            &cld_closure);
2497   }
2498 
2499   // Now mark from the roots
2500   MarkFromRootsVerifyClosure markFromRootsClosure(this, _span,
2501     verification_mark_bm(), markBitMap(), verification_mark_stack());
2502   assert(_restart_addr == NULL, "Expected pre-condition");
2503   verification_mark_bm()->iterate(&markFromRootsClosure);
2504   while (_restart_addr != NULL) {
2505     // Deal with stack overflow: by restarting at the indicated
2506     // address.
2507     HeapWord* ra = _restart_addr;
2508     markFromRootsClosure.reset(ra);
2509     _restart_addr = NULL;
2510     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2511   }
2512   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2513   verify_work_stacks_empty();
2514 
2515   VerifyKlassOopsKlassClosure verify_klass_oops(verification_mark_bm());


2522   verification_mark_bm()->iterate(&vcl);
2523   assert(!vcl.failed(), "Else verification above should not have succeeded");
2524 }
2525 
2526 void ConcurrentMarkSweepGeneration::save_marks() {
2527   // delegate to CMS space
2528   cmsSpace()->save_marks();
2529   for (uint i = 0; i < ParallelGCThreads; i++) {
2530     _par_gc_thread_states[i]->promo.startTrackingPromotions();
2531   }
2532 }
2533 
2534 bool ConcurrentMarkSweepGeneration::no_allocs_since_save_marks() {
2535   return cmsSpace()->no_allocs_since_save_marks();
2536 }
2537 
2538 #define CMS_SINCE_SAVE_MARKS_DEFN(OopClosureType, nv_suffix)    \
2539                                                                 \
2540 void ConcurrentMarkSweepGeneration::                            \
2541 oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl) {   \
2542   cl->set_generation(this);                                     \
2543   cmsSpace()->oop_since_save_marks_iterate##nv_suffix(cl);      \
2544   cl->reset_generation();                                       \
2545   save_marks();                                                 \
2546 }
2547 
2548 ALL_SINCE_SAVE_MARKS_CLOSURES(CMS_SINCE_SAVE_MARKS_DEFN)
2549 
2550 void
2551 ConcurrentMarkSweepGeneration::oop_iterate(ExtendedOopClosure* cl) {
2552   if (freelistLock()->owned_by_self()) {
2553     Generation::oop_iterate(cl);
2554   } else {
2555     MutexLockerEx x(freelistLock(), Mutex::_no_safepoint_check_flag);
2556     Generation::oop_iterate(cl);
2557   }
2558 }
2559 
2560 void
2561 ConcurrentMarkSweepGeneration::object_iterate(ObjectClosure* cl) {
2562   if (freelistLock()->owned_by_self()) {
2563     Generation::object_iterate(cl);
2564   } else {


2990       WorkGang* workers = gch->workers();
2991       assert(workers != NULL, "Need parallel worker threads.");
2992       uint n_workers = workers->active_workers();
2993 
2994       StrongRootsScope srs(n_workers);
2995 
2996       CMSParInitialMarkTask tsk(this, &srs, n_workers);
2997       initialize_sequential_subtasks_for_young_gen_rescan(n_workers);
2998       if (n_workers > 1) {
2999         workers->run_task(&tsk);
3000       } else {
3001         tsk.work(0);
3002       }
3003     } else {
3004       // The serial version.
3005       CLDToOopClosure cld_closure(&notOlder, true);
3006       gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
3007 
3008       StrongRootsScope srs(1);
3009 
3010       gch->gen_process_roots(&srs,
3011                              GenCollectedHeap::OldGen,
3012                              true,   // young gen as roots
3013                              GenCollectedHeap::ScanningOption(roots_scanning_options()),
3014                              should_unload_classes(),
3015                              &notOlder,
3016                              NULL,
3017                              &cld_closure);
3018     }
3019   }
3020 
3021   // Clear mod-union table; it will be dirtied in the prologue of
3022   // CMS generation per each young generation collection.
3023 
3024   assert(_modUnionTable.isAllClear(),
3025        "Was cleared in most recent final checkpoint phase"
3026        " or no bits are set in the gc_prologue before the start of the next "
3027        "subsequent marking phase.");
3028 
3029   assert(_ct->klass_rem_set()->mod_union_is_clear(), "Must be");
3030 
3031   // Save the end of the used_region of the constituent generations
3032   // to be used to limit the extent of sweep in each generation.
3033   save_sweep_limits();
3034   verify_overflow_empty();
3035 }
3036 


4419   GenCollectedHeap* gch = GenCollectedHeap::heap();
4420   Par_MarkRefsIntoClosure par_mri_cl(_collector->_span, &(_collector->_markBitMap));
4421 
4422   // ---------- young gen roots --------------
4423   {
4424     work_on_young_gen_roots(worker_id, &par_mri_cl);
4425     _timer.stop();
4426     if (PrintCMSStatistics != 0) {
4427       gclog_or_tty->print_cr(
4428         "Finished young gen initial mark scan work in %dth thread: %3.3f sec",
4429         worker_id, _timer.seconds());
4430     }
4431   }
4432 
4433   // ---------- remaining roots --------------
4434   _timer.reset();
4435   _timer.start();
4436 
4437   CLDToOopClosure cld_closure(&par_mri_cl, true);
4438 
4439   gch->gen_process_roots(_strong_roots_scope,
4440                          GenCollectedHeap::OldGen,
4441                          false,     // yg was scanned above
4442                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4443                          _collector->should_unload_classes(),
4444                          &par_mri_cl,
4445                          NULL,
4446                          &cld_closure);
4447   assert(_collector->should_unload_classes()
4448          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4449          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4450   _timer.stop();
4451   if (PrintCMSStatistics != 0) {
4452     gclog_or_tty->print_cr(
4453       "Finished remaining root initial mark scan work in %dth thread: %3.3f sec",
4454       worker_id, _timer.seconds());
4455   }
4456 }
4457 
4458 // Parallel remark task
4459 class CMSParRemarkTask: public CMSParMarkTask {
4460   CompactibleFreeListSpace* _cms_space;
4461 
4462   // The per-thread work queues, available here for stealing.
4463   OopTaskQueueSet*       _task_queues;
4464   ParallelTaskTerminator _term;
4465   StrongRootsScope*      _strong_roots_scope;


4558     work_queue(worker_id));
4559 
4560   // Rescan young gen roots first since these are likely
4561   // coarsely partitioned and may, on that account, constitute
4562   // the critical path; thus, it's best to start off that
4563   // work first.
4564   // ---------- young gen roots --------------
4565   {
4566     work_on_young_gen_roots(worker_id, &par_mrias_cl);
4567     _timer.stop();
4568     if (PrintCMSStatistics != 0) {
4569       gclog_or_tty->print_cr(
4570         "Finished young gen rescan work in %dth thread: %3.3f sec",
4571         worker_id, _timer.seconds());
4572     }
4573   }
4574 
4575   // ---------- remaining roots --------------
4576   _timer.reset();
4577   _timer.start();
4578   gch->gen_process_roots(_strong_roots_scope,
4579                          GenCollectedHeap::OldGen,
4580                          false,     // yg was scanned above
4581                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4582                          _collector->should_unload_classes(),
4583                          &par_mrias_cl,
4584                          NULL,
4585                          NULL);     // The dirty klasses will be handled below
4586 
4587   assert(_collector->should_unload_classes()
4588          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4589          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4590   _timer.stop();
4591   if (PrintCMSStatistics != 0) {
4592     gclog_or_tty->print_cr(
4593       "Finished remaining root rescan work in %dth thread: %3.3f sec",
4594       worker_id, _timer.seconds());
4595   }
4596 
4597   // ---------- unhandled CLD scanning ----------
4598   if (worker_id == 0) { // Single threaded at the moment.
4599     _timer.reset();
4600     _timer.start();
4601 
4602     // Scan all new class loader data objects and new dependencies that were
4603     // introduced during concurrent marking.
4604     ResourceMark rm;


5139       verify_work_stacks_empty();
5140       if (PrintCMSStatistics != 0) {
5141         gclog_or_tty->print(" (re-scanned " SIZE_FORMAT " dirty cards in cms gen) ",
5142           markFromDirtyCardsClosure.num_dirty_cards());
5143       }
5144     }
5145   }
5146   if (VerifyDuringGC &&
5147       GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
5148     HandleMark hm;  // Discard invalid handles created during verification
5149     Universe::verify();
5150   }
5151   {
5152     GCTraceTime t("root rescan", PrintGCDetails, false, _gc_timer_cm);
5153 
5154     verify_work_stacks_empty();
5155 
5156     gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
5157     StrongRootsScope srs(1);
5158 
5159     gch->gen_process_roots(&srs,
5160                            GenCollectedHeap::OldGen,
5161                            true,  // young gen as roots
5162                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
5163                            should_unload_classes(),
5164                            &mrias_cl,
5165                            NULL,
5166                            NULL); // The dirty klasses will be handled below
5167 
5168     assert(should_unload_classes()
5169            || (roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
5170            "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
5171   }
5172 
5173   {
5174     GCTraceTime t("visit unhandled CLDs", PrintGCDetails, false, _gc_timer_cm);
5175 
5176     verify_work_stacks_empty();
5177 
5178     // Scan all class loader data objects that might have been introduced
5179     // during concurrent marking.
5180     ResourceMark rm;
5181     GrowableArray<ClassLoaderData*>* array = ClassLoaderDataGraph::new_clds();
5182     for (int i = 0; i < array->length(); i++) {
5183       mrias_cl.do_cld_nv(array->at(i));
5184     }
5185 




2397   }
2398   if (!silent) gclog_or_tty->print(" done] ");
2399   return true;
2400 }
2401 
2402 void CMSCollector::verify_after_remark_work_1() {
2403   ResourceMark rm;
2404   HandleMark  hm;
2405   GenCollectedHeap* gch = GenCollectedHeap::heap();
2406 
2407   // Get a clear set of claim bits for the roots processing to work with.
2408   ClassLoaderDataGraph::clear_claimed_marks();
2409 
2410   // Mark from roots one level into CMS
2411   MarkRefsIntoClosure notOlder(_span, verification_mark_bm());
2412   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2413 
2414   {
2415     StrongRootsScope srs(1);
2416 
2417     gch->old_process_roots(&srs,

2418                            true,   // young gen as roots
2419                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2420                            should_unload_classes(),
2421                            &notOlder,

2422                            NULL);
2423   }
2424 
2425   // Now mark from the roots
2426   MarkFromRootsClosure markFromRootsClosure(this, _span,
2427     verification_mark_bm(), verification_mark_stack(),
2428     false /* don't yield */, true /* verifying */);
2429   assert(_restart_addr == NULL, "Expected pre-condition");
2430   verification_mark_bm()->iterate(&markFromRootsClosure);
2431   while (_restart_addr != NULL) {
2432     // Deal with stack overflow: by restarting at the indicated
2433     // address.
2434     HeapWord* ra = _restart_addr;
2435     markFromRootsClosure.reset(ra);
2436     _restart_addr = NULL;
2437     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2438   }
2439   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2440   verify_work_stacks_empty();
2441 


2467 };
2468 
2469 void CMSCollector::verify_after_remark_work_2() {
2470   ResourceMark rm;
2471   HandleMark  hm;
2472   GenCollectedHeap* gch = GenCollectedHeap::heap();
2473 
2474   // Get a clear set of claim bits for the roots processing to work with.
2475   ClassLoaderDataGraph::clear_claimed_marks();
2476 
2477   // Mark from roots one level into CMS
2478   MarkRefsIntoVerifyClosure notOlder(_span, verification_mark_bm(),
2479                                      markBitMap());
2480   CLDToOopClosure cld_closure(&notOlder, true);
2481 
2482   gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2483 
2484   {
2485     StrongRootsScope srs(1);
2486 
2487     gch->old_process_roots(&srs,

2488                            true,   // young gen as roots
2489                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
2490                            should_unload_classes(),
2491                            &notOlder,

2492                            &cld_closure);
2493   }
2494 
2495   // Now mark from the roots
2496   MarkFromRootsVerifyClosure markFromRootsClosure(this, _span,
2497     verification_mark_bm(), markBitMap(), verification_mark_stack());
2498   assert(_restart_addr == NULL, "Expected pre-condition");
2499   verification_mark_bm()->iterate(&markFromRootsClosure);
2500   while (_restart_addr != NULL) {
2501     // Deal with stack overflow: by restarting at the indicated
2502     // address.
2503     HeapWord* ra = _restart_addr;
2504     markFromRootsClosure.reset(ra);
2505     _restart_addr = NULL;
2506     verification_mark_bm()->iterate(&markFromRootsClosure, ra, _span.end());
2507   }
2508   assert(verification_mark_stack()->isEmpty(), "Should have been drained");
2509   verify_work_stacks_empty();
2510 
2511   VerifyKlassOopsKlassClosure verify_klass_oops(verification_mark_bm());


2518   verification_mark_bm()->iterate(&vcl);
2519   assert(!vcl.failed(), "Else verification above should not have succeeded");
2520 }
2521 
2522 void ConcurrentMarkSweepGeneration::save_marks() {
2523   // delegate to CMS space
2524   cmsSpace()->save_marks();
2525   for (uint i = 0; i < ParallelGCThreads; i++) {
2526     _par_gc_thread_states[i]->promo.startTrackingPromotions();
2527   }
2528 }
2529 
2530 bool ConcurrentMarkSweepGeneration::no_allocs_since_save_marks() {
2531   return cmsSpace()->no_allocs_since_save_marks();
2532 }
2533 
2534 #define CMS_SINCE_SAVE_MARKS_DEFN(OopClosureType, nv_suffix)    \
2535                                                                 \
2536 void ConcurrentMarkSweepGeneration::                            \
2537 oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl) {   \
2538   cl->assert_generation(this);                                  \
2539   cmsSpace()->oop_since_save_marks_iterate##nv_suffix(cl);      \

2540   save_marks();                                                 \
2541 }
2542 
2543 ALL_SINCE_SAVE_MARKS_CLOSURES(CMS_SINCE_SAVE_MARKS_DEFN)
2544 
2545 void
2546 ConcurrentMarkSweepGeneration::oop_iterate(ExtendedOopClosure* cl) {
2547   if (freelistLock()->owned_by_self()) {
2548     Generation::oop_iterate(cl);
2549   } else {
2550     MutexLockerEx x(freelistLock(), Mutex::_no_safepoint_check_flag);
2551     Generation::oop_iterate(cl);
2552   }
2553 }
2554 
2555 void
2556 ConcurrentMarkSweepGeneration::object_iterate(ObjectClosure* cl) {
2557   if (freelistLock()->owned_by_self()) {
2558     Generation::object_iterate(cl);
2559   } else {


2985       WorkGang* workers = gch->workers();
2986       assert(workers != NULL, "Need parallel worker threads.");
2987       uint n_workers = workers->active_workers();
2988 
2989       StrongRootsScope srs(n_workers);
2990 
2991       CMSParInitialMarkTask tsk(this, &srs, n_workers);
2992       initialize_sequential_subtasks_for_young_gen_rescan(n_workers);
2993       if (n_workers > 1) {
2994         workers->run_task(&tsk);
2995       } else {
2996         tsk.work(0);
2997       }
2998     } else {
2999       // The serial version.
3000       CLDToOopClosure cld_closure(&notOlder, true);
3001       gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
3002 
3003       StrongRootsScope srs(1);
3004 
3005       gch->old_process_roots(&srs,

3006                              true,   // young gen as roots
3007                              GenCollectedHeap::ScanningOption(roots_scanning_options()),
3008                              should_unload_classes(),
3009                              &notOlder,

3010                              &cld_closure);
3011     }
3012   }
3013 
3014   // Clear mod-union table; it will be dirtied in the prologue of
3015   // CMS generation per each young generation collection.
3016 
3017   assert(_modUnionTable.isAllClear(),
3018        "Was cleared in most recent final checkpoint phase"
3019        " or no bits are set in the gc_prologue before the start of the next "
3020        "subsequent marking phase.");
3021 
3022   assert(_ct->klass_rem_set()->mod_union_is_clear(), "Must be");
3023 
3024   // Save the end of the used_region of the constituent generations
3025   // to be used to limit the extent of sweep in each generation.
3026   save_sweep_limits();
3027   verify_overflow_empty();
3028 }
3029 


4412   GenCollectedHeap* gch = GenCollectedHeap::heap();
4413   Par_MarkRefsIntoClosure par_mri_cl(_collector->_span, &(_collector->_markBitMap));
4414 
4415   // ---------- young gen roots --------------
4416   {
4417     work_on_young_gen_roots(worker_id, &par_mri_cl);
4418     _timer.stop();
4419     if (PrintCMSStatistics != 0) {
4420       gclog_or_tty->print_cr(
4421         "Finished young gen initial mark scan work in %dth thread: %3.3f sec",
4422         worker_id, _timer.seconds());
4423     }
4424   }
4425 
4426   // ---------- remaining roots --------------
4427   _timer.reset();
4428   _timer.start();
4429 
4430   CLDToOopClosure cld_closure(&par_mri_cl, true);
4431 
4432   gch->old_process_roots(_strong_roots_scope,

4433                          false,     // yg was scanned above
4434                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4435                          _collector->should_unload_classes(),
4436                          &par_mri_cl,

4437                          &cld_closure);
4438   assert(_collector->should_unload_classes()
4439          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4440          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4441   _timer.stop();
4442   if (PrintCMSStatistics != 0) {
4443     gclog_or_tty->print_cr(
4444       "Finished remaining root initial mark scan work in %dth thread: %3.3f sec",
4445       worker_id, _timer.seconds());
4446   }
4447 }
4448 
4449 // Parallel remark task
4450 class CMSParRemarkTask: public CMSParMarkTask {
4451   CompactibleFreeListSpace* _cms_space;
4452 
4453   // The per-thread work queues, available here for stealing.
4454   OopTaskQueueSet*       _task_queues;
4455   ParallelTaskTerminator _term;
4456   StrongRootsScope*      _strong_roots_scope;


4549     work_queue(worker_id));
4550 
4551   // Rescan young gen roots first since these are likely
4552   // coarsely partitioned and may, on that account, constitute
4553   // the critical path; thus, it's best to start off that
4554   // work first.
4555   // ---------- young gen roots --------------
4556   {
4557     work_on_young_gen_roots(worker_id, &par_mrias_cl);
4558     _timer.stop();
4559     if (PrintCMSStatistics != 0) {
4560       gclog_or_tty->print_cr(
4561         "Finished young gen rescan work in %dth thread: %3.3f sec",
4562         worker_id, _timer.seconds());
4563     }
4564   }
4565 
4566   // ---------- remaining roots --------------
4567   _timer.reset();
4568   _timer.start();
4569   gch->old_process_roots(_strong_roots_scope,

4570                          false,     // yg was scanned above
4571                          GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
4572                          _collector->should_unload_classes(),
4573                          &par_mrias_cl,

4574                          NULL);     // The dirty klasses will be handled below
4575 
4576   assert(_collector->should_unload_classes()
4577          || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4578          "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4579   _timer.stop();
4580   if (PrintCMSStatistics != 0) {
4581     gclog_or_tty->print_cr(
4582       "Finished remaining root rescan work in %dth thread: %3.3f sec",
4583       worker_id, _timer.seconds());
4584   }
4585 
4586   // ---------- unhandled CLD scanning ----------
4587   if (worker_id == 0) { // Single threaded at the moment.
4588     _timer.reset();
4589     _timer.start();
4590 
4591     // Scan all new class loader data objects and new dependencies that were
4592     // introduced during concurrent marking.
4593     ResourceMark rm;


5128       verify_work_stacks_empty();
5129       if (PrintCMSStatistics != 0) {
5130         gclog_or_tty->print(" (re-scanned " SIZE_FORMAT " dirty cards in cms gen) ",
5131           markFromDirtyCardsClosure.num_dirty_cards());
5132       }
5133     }
5134   }
5135   if (VerifyDuringGC &&
5136       GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
5137     HandleMark hm;  // Discard invalid handles created during verification
5138     Universe::verify();
5139   }
5140   {
5141     GCTraceTime t("root rescan", PrintGCDetails, false, _gc_timer_cm);
5142 
5143     verify_work_stacks_empty();
5144 
5145     gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
5146     StrongRootsScope srs(1);
5147 
5148     gch->old_process_roots(&srs,

5149                            true,  // young gen as roots
5150                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
5151                            should_unload_classes(),
5152                            &mrias_cl,

5153                            NULL); // The dirty klasses will be handled below
5154 
5155     assert(should_unload_classes()
5156            || (roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
5157            "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
5158   }
5159 
5160   {
5161     GCTraceTime t("visit unhandled CLDs", PrintGCDetails, false, _gc_timer_cm);
5162 
5163     verify_work_stacks_empty();
5164 
5165     // Scan all class loader data objects that might have been introduced
5166     // during concurrent marking.
5167     ResourceMark rm;
5168     GrowableArray<ClassLoaderData*>* array = ClassLoaderDataGraph::new_clds();
5169     for (int i = 0; i < array->length(); i++) {
5170       mrias_cl.do_cld_nv(array->at(i));
5171     }
5172 


< prev index next >