< prev index next >

src/share/vm/gc/cms/concurrentMarkSweepGeneration.cpp

Print this page




1501 void CMSCollector::compute_new_size() {
1502   assert_locked_or_safepoint(Heap_lock);
1503   FreelistLocker z(this);
1504   MetaspaceGC::compute_new_size();
1505   _cmsGen->compute_new_size_free_list();
1506 }
1507 
1508 // A work method used by the foreground collector to do
1509 // a mark-sweep-compact.
1510 void CMSCollector::do_compaction_work(bool clear_all_soft_refs) {
1511   GenCollectedHeap* gch = GenCollectedHeap::heap();
1512 
1513   STWGCTimer* gc_timer = GenMarkSweep::gc_timer();
1514   gc_timer->register_gc_start();
1515 
1516   SerialOldTracer* gc_tracer = GenMarkSweep::gc_tracer();
1517   gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
1518 
1519   gch->pre_full_gc_dump(gc_timer);
1520 
1521   GCTraceTime(Trace, gc) t("CMS:MSC");
1522 
1523   // Temporarily widen the span of the weak reference processing to
1524   // the entire heap.
1525   MemRegion new_span(GenCollectedHeap::heap()->reserved_region());
1526   ReferenceProcessorSpanMutator rp_mut_span(ref_processor(), new_span);
1527   // Temporarily, clear the "is_alive_non_header" field of the
1528   // reference processor.
1529   ReferenceProcessorIsAliveMutator rp_mut_closure(ref_processor(), NULL);
1530   // Temporarily make reference _processing_ single threaded (non-MT).
1531   ReferenceProcessorMTProcMutator rp_mut_mt_processing(ref_processor(), false);
1532   // Temporarily make refs discovery atomic
1533   ReferenceProcessorAtomicMutator rp_mut_atomic(ref_processor(), true);
1534   // Temporarily make reference _discovery_ single threaded (non-MT)
1535   ReferenceProcessorMTDiscoveryMutator rp_mut_discovery(ref_processor(), false);
1536 
1537   ref_processor()->set_enqueuing_is_done(false);
1538   ref_processor()->enable_discovery();
1539   ref_processor()->setup_policy(clear_all_soft_refs);
1540   // If an asynchronous collection finishes, the _modUnionTable is
1541   // all clear.  If we are assuming the collection from an asynchronous


2217 
2218  public:
2219   VerifyMarkedClosure(CMSBitMap* bm): _marks(bm), _failed(false) {}
2220 
2221   bool do_bit(size_t offset) {
2222     HeapWord* addr = _marks->offsetToHeapWord(offset);
2223     if (!_marks->isMarked(addr)) {
2224       LogHandle(gc, verify) log;
2225       ResourceMark rm;
2226       oop(addr)->print_on(log.error_stream());
2227       log.error(" (" INTPTR_FORMAT " should have been marked)", p2i(addr));
2228       _failed = true;
2229     }
2230     return true;
2231   }
2232 
2233   bool failed() { return _failed; }
2234 };
2235 
2236 bool CMSCollector::verify_after_remark() {
2237   GCTraceTime(Info, gc, verify) tm("Verifying CMS Marking.");
2238   MutexLockerEx ml(verification_mark_bm()->lock(), Mutex::_no_safepoint_check_flag);
2239   static bool init = false;
2240 
2241   assert(SafepointSynchronize::is_at_safepoint(),
2242          "Else mutations in object graph will make answer suspect");
2243   assert(have_cms_token(),
2244          "Else there may be mutual interference in use of "
2245          " verification data structures");
2246   assert(_collectorState > Marking && _collectorState <= Sweeping,
2247          "Else marking info checked here may be obsolete");
2248   assert(haveFreelistLocks(), "must hold free list locks");
2249   assert_lock_strong(bitMapLock());
2250 
2251 
2252   // Allocate marking bit map if not already allocated
2253   if (!init) { // first time
2254     if (!verification_mark_bm()->allocate(_span)) {
2255       return false;
2256     }
2257     init = true;


2801                     Mutex::_no_safepoint_check_flag);
2802     checkpointRootsInitialWork();
2803     // enable ("weak") refs discovery
2804     rp->enable_discovery();
2805     _collectorState = Marking;
2806   }
2807 }
2808 
2809 void CMSCollector::checkpointRootsInitialWork() {
2810   assert(SafepointSynchronize::is_at_safepoint(), "world should be stopped");
2811   assert(_collectorState == InitialMarking, "just checking");
2812 
2813   // Already have locks.
2814   assert_lock_strong(bitMapLock());
2815   assert(_markBitMap.isAllClear(), "was reset at end of previous cycle");
2816 
2817   // Setup the verification and class unloading state for this
2818   // CMS collection cycle.
2819   setup_cms_unloading_and_verification_state();
2820 
2821   GCTraceTime(Trace, gc) ts("checkpointRootsInitialWork", _gc_timer_cm);
2822 
2823   // Reset all the PLAB chunk arrays if necessary.
2824   if (_survivor_plab_array != NULL && !CMSPLABRecordAlways) {
2825     reset_survivor_plab_arrays();
2826   }
2827 
2828   ResourceMark rm;
2829   HandleMark  hm;
2830 
2831   MarkRefsIntoClosure notOlder(_span, &_markBitMap);
2832   GenCollectedHeap* gch = GenCollectedHeap::heap();
2833 
2834   verify_work_stacks_empty();
2835   verify_overflow_empty();
2836 
2837   gch->ensure_parsability(false);  // fill TLABs, but no need to retire them
2838   // Update the saved marks which may affect the root scans.
2839   gch->save_marks();
2840 
2841   // weak reference processing has not started yet.


4085 void CMSCollector::checkpointRootsFinal() {
4086   assert(_collectorState == FinalMarking, "incorrect state transition?");
4087   check_correct_thread_executing();
4088   // world is stopped at this checkpoint
4089   assert(SafepointSynchronize::is_at_safepoint(),
4090          "world should be stopped");
4091   TraceCMSMemoryManagerStats tms(_collectorState,GenCollectedHeap::heap()->gc_cause());
4092 
4093   verify_work_stacks_empty();
4094   verify_overflow_empty();
4095 
4096   log_debug(gc)("YG occupancy: " SIZE_FORMAT " K (" SIZE_FORMAT " K)",
4097                 _young_gen->used() / K, _young_gen->capacity() / K);
4098   {
4099     if (CMSScavengeBeforeRemark) {
4100       GenCollectedHeap* gch = GenCollectedHeap::heap();
4101       // Temporarily set flag to false, GCH->do_collection will
4102       // expect it to be false and set to true
4103       FlagSetting fl(gch->_is_gc_active, false);
4104 
4105       GCTraceTime(Trace, gc) tm("Pause Scavenge Before Remark", _gc_timer_cm);
4106 
4107       gch->do_collection(true,                      // full (i.e. force, see below)
4108                          false,                     // !clear_all_soft_refs
4109                          0,                         // size
4110                          false,                     // is_tlab
4111                          GenCollectedHeap::YoungGen // type
4112         );
4113     }
4114     FreelistLocker x(this);
4115     MutexLockerEx y(bitMapLock(),
4116                     Mutex::_no_safepoint_check_flag);
4117     checkpointRootsFinalWork();
4118   }
4119   verify_work_stacks_empty();
4120   verify_overflow_empty();
4121 }
4122 
4123 void CMSCollector::checkpointRootsFinalWork() {
4124   GCTraceTime(Trace, gc) tm("checkpointRootsFinalWork", _gc_timer_cm);
4125 
4126   assert(haveFreelistLocks(), "must have free list locks");
4127   assert_lock_strong(bitMapLock());
4128 
4129   ResourceMark rm;
4130   HandleMark   hm;
4131 
4132   GenCollectedHeap* gch = GenCollectedHeap::heap();
4133 
4134   if (should_unload_classes()) {
4135     CodeCache::gc_prologue();
4136   }
4137   assert(haveFreelistLocks(), "must have free list locks");
4138   assert_lock_strong(bitMapLock());
4139 
4140   // We might assume that we need not fill TLAB's when
4141   // CMSScavengeBeforeRemark is set, because we may have just done
4142   // a scavenge which would have filled all TLAB's -- and besides
4143   // Eden would be empty. This however may not always be the case --
4144   // for instance although we asked for a scavenge, it may not have


4154   gch->save_marks();
4155 
4156   print_eden_and_survivor_chunk_arrays();
4157 
4158   {
4159 #if defined(COMPILER2) || INCLUDE_JVMCI
4160     DerivedPointerTableDeactivate dpt_deact;
4161 #endif
4162 
4163     // Note on the role of the mod union table:
4164     // Since the marker in "markFromRoots" marks concurrently with
4165     // mutators, it is possible for some reachable objects not to have been
4166     // scanned. For instance, an only reference to an object A was
4167     // placed in object B after the marker scanned B. Unless B is rescanned,
4168     // A would be collected. Such updates to references in marked objects
4169     // are detected via the mod union table which is the set of all cards
4170     // dirtied since the first checkpoint in this GC cycle and prior to
4171     // the most recent young generation GC, minus those cleaned up by the
4172     // concurrent precleaning.
4173     if (CMSParallelRemarkEnabled) {
4174       GCTraceTime(Debug, gc) t("Rescan (parallel)", _gc_timer_cm);
4175       do_remark_parallel();
4176     } else {
4177       GCTraceTime(Debug, gc) t("Rescan (non-parallel)", _gc_timer_cm);
4178       do_remark_non_parallel();
4179     }
4180   }
4181   verify_work_stacks_empty();
4182   verify_overflow_empty();
4183 
4184   {
4185     GCTraceTime(Trace, gc) ts("refProcessingWork", _gc_timer_cm);
4186     refProcessingWork();
4187   }
4188   verify_work_stacks_empty();
4189   verify_overflow_empty();
4190 
4191   if (should_unload_classes()) {
4192     CodeCache::gc_epilogue();
4193   }
4194   JvmtiExport::gc_epilogue();
4195 
4196   // If we encountered any (marking stack / work queue) overflow
4197   // events during the current CMS cycle, take appropriate
4198   // remedial measures, where possible, so as to try and avoid
4199   // recurrence of that condition.
4200   assert(_markStack.isEmpty(), "No grey objects");
4201   size_t ser_ovflw = _ser_pmc_remark_ovflw + _ser_pmc_preclean_ovflw +
4202                      _ser_kac_ovflw        + _ser_kac_preclean_ovflw;
4203   if (ser_ovflw > 0) {
4204     log_trace(gc)("Marking stack overflow (benign) (pmc_pc=" SIZE_FORMAT ", pmc_rm=" SIZE_FORMAT ", kac=" SIZE_FORMAT ", kac_preclean=" SIZE_FORMAT ")",
4205                          _ser_pmc_preclean_ovflw, _ser_pmc_remark_ovflw, _ser_kac_ovflw, _ser_kac_preclean_ovflw);


4888   // as a result of work_q overflow
4889   restore_preserved_marks_if_any();
4890 }
4891 
4892 // Non-parallel version of remark
4893 void CMSCollector::do_remark_non_parallel() {
4894   ResourceMark rm;
4895   HandleMark   hm;
4896   GenCollectedHeap* gch = GenCollectedHeap::heap();
4897   ReferenceProcessorMTDiscoveryMutator mt(ref_processor(), false);
4898 
4899   MarkRefsIntoAndScanClosure
4900     mrias_cl(_span, ref_processor(), &_markBitMap, NULL /* not precleaning */,
4901              &_markStack, this,
4902              false /* should_yield */, false /* not precleaning */);
4903   MarkFromDirtyCardsClosure
4904     markFromDirtyCardsClosure(this, _span,
4905                               NULL,  // space is set further below
4906                               &_markBitMap, &_markStack, &mrias_cl);
4907   {
4908     GCTraceTime(Trace, gc) t("Grey Object Rescan", _gc_timer_cm);
4909     // Iterate over the dirty cards, setting the corresponding bits in the
4910     // mod union table.
4911     {
4912       ModUnionClosure modUnionClosure(&_modUnionTable);
4913       _ct->ct_bs()->dirty_card_iterate(
4914                       _cmsGen->used_region(),
4915                       &modUnionClosure);
4916     }
4917     // Having transferred these marks into the modUnionTable, we just need
4918     // to rescan the marked objects on the dirty cards in the modUnionTable.
4919     // The initial marking may have been done during an asynchronous
4920     // collection so there may be dirty bits in the mod-union table.
4921     const int alignment =
4922       CardTableModRefBS::card_size * BitsPerWord;
4923     {
4924       // ... First handle dirty cards in CMS gen
4925       markFromDirtyCardsClosure.set_space(_cmsGen->cmsSpace());
4926       MemRegion ur = _cmsGen->used_region();
4927       HeapWord* lb = ur.start();
4928       HeapWord* ub = (HeapWord*)round_to((intptr_t)ur.end(), alignment);
4929       MemRegion cms_span(lb, ub);
4930       _modUnionTable.dirty_range_iterate_clear(cms_span,
4931                                                &markFromDirtyCardsClosure);
4932       verify_work_stacks_empty();
4933       log_trace(gc)(" (re-scanned " SIZE_FORMAT " dirty cards in cms gen) ", markFromDirtyCardsClosure.num_dirty_cards());
4934     }
4935   }
4936   if (VerifyDuringGC &&
4937       GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
4938     HandleMark hm;  // Discard invalid handles created during verification
4939     Universe::verify();
4940   }
4941   {
4942     GCTraceTime(Trace, gc) t("Root Rescan", _gc_timer_cm);
4943 
4944     verify_work_stacks_empty();
4945 
4946     gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
4947     StrongRootsScope srs(1);
4948 
4949     gch->gen_process_roots(&srs,
4950                            GenCollectedHeap::OldGen,
4951                            true,  // young gen as roots
4952                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
4953                            should_unload_classes(),
4954                            &mrias_cl,
4955                            NULL,
4956                            NULL); // The dirty klasses will be handled below
4957 
4958     assert(should_unload_classes()
4959            || (roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4960            "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4961   }
4962 
4963   {
4964     GCTraceTime(Trace, gc) t("Visit Unhandled CLDs", _gc_timer_cm);
4965 
4966     verify_work_stacks_empty();
4967 
4968     // Scan all class loader data objects that might have been introduced
4969     // during concurrent marking.
4970     ResourceMark rm;
4971     GrowableArray<ClassLoaderData*>* array = ClassLoaderDataGraph::new_clds();
4972     for (int i = 0; i < array->length(); i++) {
4973       mrias_cl.do_cld_nv(array->at(i));
4974     }
4975 
4976     // We don't need to keep track of new CLDs anymore.
4977     ClassLoaderDataGraph::remember_new_clds(false);
4978 
4979     verify_work_stacks_empty();
4980   }
4981 
4982   {
4983     GCTraceTime(Trace, gc) t("Dirty Klass Scan", _gc_timer_cm);
4984 
4985     verify_work_stacks_empty();
4986 
4987     RemarkKlassClosure remark_klass_closure(&mrias_cl);
4988     ClassLoaderDataGraph::classes_do(&remark_klass_closure);
4989 
4990     verify_work_stacks_empty();
4991   }
4992 
4993   // We might have added oops to ClassLoaderData::_handles during the
4994   // concurrent marking phase. These oops point to newly allocated objects
4995   // that are guaranteed to be kept alive. Either by the direct allocation
4996   // code, or when the young collector processes the roots. Hence,
4997   // we don't have to revisit the _handles block during the remark phase.
4998 
4999   verify_work_stacks_empty();
5000   // Restore evacuated mark words, if any, used for overflow list links
5001   restore_preserved_marks_if_any();
5002 
5003   verify_overflow_empty();


5167   workers->run_task(&enq_task);
5168 }
5169 
5170 void CMSCollector::refProcessingWork() {
5171   ResourceMark rm;
5172   HandleMark   hm;
5173 
5174   ReferenceProcessor* rp = ref_processor();
5175   assert(rp->span().equals(_span), "Spans should be equal");
5176   assert(!rp->enqueuing_is_done(), "Enqueuing should not be complete");
5177   // Process weak references.
5178   rp->setup_policy(false);
5179   verify_work_stacks_empty();
5180 
5181   CMSKeepAliveClosure cmsKeepAliveClosure(this, _span, &_markBitMap,
5182                                           &_markStack, false /* !preclean */);
5183   CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this,
5184                                 _span, &_markBitMap, &_markStack,
5185                                 &cmsKeepAliveClosure, false /* !preclean */);
5186   {
5187     GCTraceTime(Debug, gc) t("Weak Refs Processing", _gc_timer_cm);
5188 
5189     ReferenceProcessorStats stats;
5190     if (rp->processing_is_mt()) {
5191       // Set the degree of MT here.  If the discovery is done MT, there
5192       // may have been a different number of threads doing the discovery
5193       // and a different number of discovered lists may have Ref objects.
5194       // That is OK as long as the Reference lists are balanced (see
5195       // balance_all_queues() and balance_queues()).
5196       GenCollectedHeap* gch = GenCollectedHeap::heap();
5197       uint active_workers = ParallelGCThreads;
5198       WorkGang* workers = gch->workers();
5199       if (workers != NULL) {
5200         active_workers = workers->active_workers();
5201         // The expectation is that active_workers will have already
5202         // been set to a reasonable value.  If it has not been set,
5203         // investigate.
5204         assert(active_workers > 0, "Should have been set during scavenge");
5205       }
5206       rp->set_active_mt_degree(active_workers);
5207       CMSRefProcTaskExecutor task_executor(*this);


5209                                         &cmsKeepAliveClosure,
5210                                         &cmsDrainMarkingStackClosure,
5211                                         &task_executor,
5212                                         _gc_timer_cm);
5213     } else {
5214       stats = rp->process_discovered_references(&_is_alive_closure,
5215                                         &cmsKeepAliveClosure,
5216                                         &cmsDrainMarkingStackClosure,
5217                                         NULL,
5218                                         _gc_timer_cm);
5219     }
5220     _gc_tracer_cm->report_gc_reference_stats(stats);
5221 
5222   }
5223 
5224   // This is the point where the entire marking should have completed.
5225   verify_work_stacks_empty();
5226 
5227   if (should_unload_classes()) {
5228     {
5229       GCTraceTime(Debug, gc) t("Class Unloading", _gc_timer_cm);
5230 
5231       // Unload classes and purge the SystemDictionary.
5232       bool purged_class = SystemDictionary::do_unloading(&_is_alive_closure);
5233 
5234       // Unload nmethods.
5235       CodeCache::do_unloading(&_is_alive_closure, purged_class);
5236 
5237       // Prune dead klasses from subklass/sibling/implementor lists.
5238       Klass::clean_weak_klass_links(&_is_alive_closure);
5239     }
5240 
5241     {
5242       GCTraceTime(Debug, gc) t("Scrub Symbol Table", _gc_timer_cm);
5243       // Clean up unreferenced symbols in symbol table.
5244       SymbolTable::unlink();
5245     }
5246 
5247     {
5248       GCTraceTime(Debug, gc) t("Scrub String Table", _gc_timer_cm);
5249       // Delete entries for dead interned strings.
5250       StringTable::unlink(&_is_alive_closure);
5251     }
5252   }
5253 
5254 
5255   // Restore any preserved marks as a result of mark stack or
5256   // work queue overflow
5257   restore_preserved_marks_if_any();  // done single-threaded for now
5258 
5259   rp->set_enqueuing_is_done(true);
5260   if (rp->processing_is_mt()) {
5261     rp->balance_all_queues();
5262     CMSRefProcTaskExecutor task_executor(*this);
5263     rp->enqueue_discovered_references(&task_executor);
5264   } else {
5265     rp->enqueue_discovered_references(NULL);
5266   }
5267   rp->verify_no_references_recorded();
5268   assert(!rp->discovery_enabled(), "should have been disabled");




1501 void CMSCollector::compute_new_size() {
1502   assert_locked_or_safepoint(Heap_lock);
1503   FreelistLocker z(this);
1504   MetaspaceGC::compute_new_size();
1505   _cmsGen->compute_new_size_free_list();
1506 }
1507 
1508 // A work method used by the foreground collector to do
1509 // a mark-sweep-compact.
1510 void CMSCollector::do_compaction_work(bool clear_all_soft_refs) {
1511   GenCollectedHeap* gch = GenCollectedHeap::heap();
1512 
1513   STWGCTimer* gc_timer = GenMarkSweep::gc_timer();
1514   gc_timer->register_gc_start();
1515 
1516   SerialOldTracer* gc_tracer = GenMarkSweep::gc_tracer();
1517   gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
1518 
1519   gch->pre_full_gc_dump(gc_timer);
1520 
1521   GCTraceTime(Trace, gc, phases) t("CMS:MSC");
1522 
1523   // Temporarily widen the span of the weak reference processing to
1524   // the entire heap.
1525   MemRegion new_span(GenCollectedHeap::heap()->reserved_region());
1526   ReferenceProcessorSpanMutator rp_mut_span(ref_processor(), new_span);
1527   // Temporarily, clear the "is_alive_non_header" field of the
1528   // reference processor.
1529   ReferenceProcessorIsAliveMutator rp_mut_closure(ref_processor(), NULL);
1530   // Temporarily make reference _processing_ single threaded (non-MT).
1531   ReferenceProcessorMTProcMutator rp_mut_mt_processing(ref_processor(), false);
1532   // Temporarily make refs discovery atomic
1533   ReferenceProcessorAtomicMutator rp_mut_atomic(ref_processor(), true);
1534   // Temporarily make reference _discovery_ single threaded (non-MT)
1535   ReferenceProcessorMTDiscoveryMutator rp_mut_discovery(ref_processor(), false);
1536 
1537   ref_processor()->set_enqueuing_is_done(false);
1538   ref_processor()->enable_discovery();
1539   ref_processor()->setup_policy(clear_all_soft_refs);
1540   // If an asynchronous collection finishes, the _modUnionTable is
1541   // all clear.  If we are assuming the collection from an asynchronous


2217 
2218  public:
2219   VerifyMarkedClosure(CMSBitMap* bm): _marks(bm), _failed(false) {}
2220 
2221   bool do_bit(size_t offset) {
2222     HeapWord* addr = _marks->offsetToHeapWord(offset);
2223     if (!_marks->isMarked(addr)) {
2224       LogHandle(gc, verify) log;
2225       ResourceMark rm;
2226       oop(addr)->print_on(log.error_stream());
2227       log.error(" (" INTPTR_FORMAT " should have been marked)", p2i(addr));
2228       _failed = true;
2229     }
2230     return true;
2231   }
2232 
2233   bool failed() { return _failed; }
2234 };
2235 
2236 bool CMSCollector::verify_after_remark() {
2237   GCTraceTime(Info, gc, phases, verify) tm("Verifying CMS Marking.");
2238   MutexLockerEx ml(verification_mark_bm()->lock(), Mutex::_no_safepoint_check_flag);
2239   static bool init = false;
2240 
2241   assert(SafepointSynchronize::is_at_safepoint(),
2242          "Else mutations in object graph will make answer suspect");
2243   assert(have_cms_token(),
2244          "Else there may be mutual interference in use of "
2245          " verification data structures");
2246   assert(_collectorState > Marking && _collectorState <= Sweeping,
2247          "Else marking info checked here may be obsolete");
2248   assert(haveFreelistLocks(), "must hold free list locks");
2249   assert_lock_strong(bitMapLock());
2250 
2251 
2252   // Allocate marking bit map if not already allocated
2253   if (!init) { // first time
2254     if (!verification_mark_bm()->allocate(_span)) {
2255       return false;
2256     }
2257     init = true;


2801                     Mutex::_no_safepoint_check_flag);
2802     checkpointRootsInitialWork();
2803     // enable ("weak") refs discovery
2804     rp->enable_discovery();
2805     _collectorState = Marking;
2806   }
2807 }
2808 
2809 void CMSCollector::checkpointRootsInitialWork() {
2810   assert(SafepointSynchronize::is_at_safepoint(), "world should be stopped");
2811   assert(_collectorState == InitialMarking, "just checking");
2812 
2813   // Already have locks.
2814   assert_lock_strong(bitMapLock());
2815   assert(_markBitMap.isAllClear(), "was reset at end of previous cycle");
2816 
2817   // Setup the verification and class unloading state for this
2818   // CMS collection cycle.
2819   setup_cms_unloading_and_verification_state();
2820 
2821   GCTraceTime(Trace, gc, phases) ts("checkpointRootsInitialWork", _gc_timer_cm);
2822 
2823   // Reset all the PLAB chunk arrays if necessary.
2824   if (_survivor_plab_array != NULL && !CMSPLABRecordAlways) {
2825     reset_survivor_plab_arrays();
2826   }
2827 
2828   ResourceMark rm;
2829   HandleMark  hm;
2830 
2831   MarkRefsIntoClosure notOlder(_span, &_markBitMap);
2832   GenCollectedHeap* gch = GenCollectedHeap::heap();
2833 
2834   verify_work_stacks_empty();
2835   verify_overflow_empty();
2836 
2837   gch->ensure_parsability(false);  // fill TLABs, but no need to retire them
2838   // Update the saved marks which may affect the root scans.
2839   gch->save_marks();
2840 
2841   // weak reference processing has not started yet.


4085 void CMSCollector::checkpointRootsFinal() {
4086   assert(_collectorState == FinalMarking, "incorrect state transition?");
4087   check_correct_thread_executing();
4088   // world is stopped at this checkpoint
4089   assert(SafepointSynchronize::is_at_safepoint(),
4090          "world should be stopped");
4091   TraceCMSMemoryManagerStats tms(_collectorState,GenCollectedHeap::heap()->gc_cause());
4092 
4093   verify_work_stacks_empty();
4094   verify_overflow_empty();
4095 
4096   log_debug(gc)("YG occupancy: " SIZE_FORMAT " K (" SIZE_FORMAT " K)",
4097                 _young_gen->used() / K, _young_gen->capacity() / K);
4098   {
4099     if (CMSScavengeBeforeRemark) {
4100       GenCollectedHeap* gch = GenCollectedHeap::heap();
4101       // Temporarily set flag to false, GCH->do_collection will
4102       // expect it to be false and set to true
4103       FlagSetting fl(gch->_is_gc_active, false);
4104 


4105       gch->do_collection(true,                      // full (i.e. force, see below)
4106                          false,                     // !clear_all_soft_refs
4107                          0,                         // size
4108                          false,                     // is_tlab
4109                          GenCollectedHeap::YoungGen // type
4110         );
4111     }
4112     FreelistLocker x(this);
4113     MutexLockerEx y(bitMapLock(),
4114                     Mutex::_no_safepoint_check_flag);
4115     checkpointRootsFinalWork();
4116   }
4117   verify_work_stacks_empty();
4118   verify_overflow_empty();
4119 }
4120 
4121 void CMSCollector::checkpointRootsFinalWork() {
4122   GCTraceTime(Trace, gc, phases) tm("checkpointRootsFinalWork", _gc_timer_cm);
4123 
4124   assert(haveFreelistLocks(), "must have free list locks");
4125   assert_lock_strong(bitMapLock());
4126 
4127   ResourceMark rm;
4128   HandleMark   hm;
4129 
4130   GenCollectedHeap* gch = GenCollectedHeap::heap();
4131 
4132   if (should_unload_classes()) {
4133     CodeCache::gc_prologue();
4134   }
4135   assert(haveFreelistLocks(), "must have free list locks");
4136   assert_lock_strong(bitMapLock());
4137 
4138   // We might assume that we need not fill TLAB's when
4139   // CMSScavengeBeforeRemark is set, because we may have just done
4140   // a scavenge which would have filled all TLAB's -- and besides
4141   // Eden would be empty. This however may not always be the case --
4142   // for instance although we asked for a scavenge, it may not have


4152   gch->save_marks();
4153 
4154   print_eden_and_survivor_chunk_arrays();
4155 
4156   {
4157 #if defined(COMPILER2) || INCLUDE_JVMCI
4158     DerivedPointerTableDeactivate dpt_deact;
4159 #endif
4160 
4161     // Note on the role of the mod union table:
4162     // Since the marker in "markFromRoots" marks concurrently with
4163     // mutators, it is possible for some reachable objects not to have been
4164     // scanned. For instance, an only reference to an object A was
4165     // placed in object B after the marker scanned B. Unless B is rescanned,
4166     // A would be collected. Such updates to references in marked objects
4167     // are detected via the mod union table which is the set of all cards
4168     // dirtied since the first checkpoint in this GC cycle and prior to
4169     // the most recent young generation GC, minus those cleaned up by the
4170     // concurrent precleaning.
4171     if (CMSParallelRemarkEnabled) {
4172       GCTraceTime(Debug, gc, phases) t("Rescan (parallel)", _gc_timer_cm);
4173       do_remark_parallel();
4174     } else {
4175       GCTraceTime(Debug, gc, phases) t("Rescan (non-parallel)", _gc_timer_cm);
4176       do_remark_non_parallel();
4177     }
4178   }
4179   verify_work_stacks_empty();
4180   verify_overflow_empty();
4181 
4182   {
4183     GCTraceTime(Trace, gc, phases) ts("refProcessingWork", _gc_timer_cm);
4184     refProcessingWork();
4185   }
4186   verify_work_stacks_empty();
4187   verify_overflow_empty();
4188 
4189   if (should_unload_classes()) {
4190     CodeCache::gc_epilogue();
4191   }
4192   JvmtiExport::gc_epilogue();
4193 
4194   // If we encountered any (marking stack / work queue) overflow
4195   // events during the current CMS cycle, take appropriate
4196   // remedial measures, where possible, so as to try and avoid
4197   // recurrence of that condition.
4198   assert(_markStack.isEmpty(), "No grey objects");
4199   size_t ser_ovflw = _ser_pmc_remark_ovflw + _ser_pmc_preclean_ovflw +
4200                      _ser_kac_ovflw        + _ser_kac_preclean_ovflw;
4201   if (ser_ovflw > 0) {
4202     log_trace(gc)("Marking stack overflow (benign) (pmc_pc=" SIZE_FORMAT ", pmc_rm=" SIZE_FORMAT ", kac=" SIZE_FORMAT ", kac_preclean=" SIZE_FORMAT ")",
4203                          _ser_pmc_preclean_ovflw, _ser_pmc_remark_ovflw, _ser_kac_ovflw, _ser_kac_preclean_ovflw);


4886   // as a result of work_q overflow
4887   restore_preserved_marks_if_any();
4888 }
4889 
4890 // Non-parallel version of remark
4891 void CMSCollector::do_remark_non_parallel() {
4892   ResourceMark rm;
4893   HandleMark   hm;
4894   GenCollectedHeap* gch = GenCollectedHeap::heap();
4895   ReferenceProcessorMTDiscoveryMutator mt(ref_processor(), false);
4896 
4897   MarkRefsIntoAndScanClosure
4898     mrias_cl(_span, ref_processor(), &_markBitMap, NULL /* not precleaning */,
4899              &_markStack, this,
4900              false /* should_yield */, false /* not precleaning */);
4901   MarkFromDirtyCardsClosure
4902     markFromDirtyCardsClosure(this, _span,
4903                               NULL,  // space is set further below
4904                               &_markBitMap, &_markStack, &mrias_cl);
4905   {
4906     GCTraceTime(Trace, gc, phases) t("Grey Object Rescan", _gc_timer_cm);
4907     // Iterate over the dirty cards, setting the corresponding bits in the
4908     // mod union table.
4909     {
4910       ModUnionClosure modUnionClosure(&_modUnionTable);
4911       _ct->ct_bs()->dirty_card_iterate(
4912                       _cmsGen->used_region(),
4913                       &modUnionClosure);
4914     }
4915     // Having transferred these marks into the modUnionTable, we just need
4916     // to rescan the marked objects on the dirty cards in the modUnionTable.
4917     // The initial marking may have been done during an asynchronous
4918     // collection so there may be dirty bits in the mod-union table.
4919     const int alignment =
4920       CardTableModRefBS::card_size * BitsPerWord;
4921     {
4922       // ... First handle dirty cards in CMS gen
4923       markFromDirtyCardsClosure.set_space(_cmsGen->cmsSpace());
4924       MemRegion ur = _cmsGen->used_region();
4925       HeapWord* lb = ur.start();
4926       HeapWord* ub = (HeapWord*)round_to((intptr_t)ur.end(), alignment);
4927       MemRegion cms_span(lb, ub);
4928       _modUnionTable.dirty_range_iterate_clear(cms_span,
4929                                                &markFromDirtyCardsClosure);
4930       verify_work_stacks_empty();
4931       log_trace(gc)(" (re-scanned " SIZE_FORMAT " dirty cards in cms gen) ", markFromDirtyCardsClosure.num_dirty_cards());
4932     }
4933   }
4934   if (VerifyDuringGC &&
4935       GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
4936     HandleMark hm;  // Discard invalid handles created during verification
4937     Universe::verify();
4938   }
4939   {
4940     GCTraceTime(Trace, gc, phases) t("Root Rescan", _gc_timer_cm);
4941 
4942     verify_work_stacks_empty();
4943 
4944     gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
4945     StrongRootsScope srs(1);
4946 
4947     gch->gen_process_roots(&srs,
4948                            GenCollectedHeap::OldGen,
4949                            true,  // young gen as roots
4950                            GenCollectedHeap::ScanningOption(roots_scanning_options()),
4951                            should_unload_classes(),
4952                            &mrias_cl,
4953                            NULL,
4954                            NULL); // The dirty klasses will be handled below
4955 
4956     assert(should_unload_classes()
4957            || (roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
4958            "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
4959   }
4960 
4961   {
4962     GCTraceTime(Trace, gc, phases) t("Visit Unhandled CLDs", _gc_timer_cm);
4963 
4964     verify_work_stacks_empty();
4965 
4966     // Scan all class loader data objects that might have been introduced
4967     // during concurrent marking.
4968     ResourceMark rm;
4969     GrowableArray<ClassLoaderData*>* array = ClassLoaderDataGraph::new_clds();
4970     for (int i = 0; i < array->length(); i++) {
4971       mrias_cl.do_cld_nv(array->at(i));
4972     }
4973 
4974     // We don't need to keep track of new CLDs anymore.
4975     ClassLoaderDataGraph::remember_new_clds(false);
4976 
4977     verify_work_stacks_empty();
4978   }
4979 
4980   {
4981     GCTraceTime(Trace, gc, phases) t("Dirty Klass Scan", _gc_timer_cm);
4982 
4983     verify_work_stacks_empty();
4984 
4985     RemarkKlassClosure remark_klass_closure(&mrias_cl);
4986     ClassLoaderDataGraph::classes_do(&remark_klass_closure);
4987 
4988     verify_work_stacks_empty();
4989   }
4990 
4991   // We might have added oops to ClassLoaderData::_handles during the
4992   // concurrent marking phase. These oops point to newly allocated objects
4993   // that are guaranteed to be kept alive. Either by the direct allocation
4994   // code, or when the young collector processes the roots. Hence,
4995   // we don't have to revisit the _handles block during the remark phase.
4996 
4997   verify_work_stacks_empty();
4998   // Restore evacuated mark words, if any, used for overflow list links
4999   restore_preserved_marks_if_any();
5000 
5001   verify_overflow_empty();


5165   workers->run_task(&enq_task);
5166 }
5167 
5168 void CMSCollector::refProcessingWork() {
5169   ResourceMark rm;
5170   HandleMark   hm;
5171 
5172   ReferenceProcessor* rp = ref_processor();
5173   assert(rp->span().equals(_span), "Spans should be equal");
5174   assert(!rp->enqueuing_is_done(), "Enqueuing should not be complete");
5175   // Process weak references.
5176   rp->setup_policy(false);
5177   verify_work_stacks_empty();
5178 
5179   CMSKeepAliveClosure cmsKeepAliveClosure(this, _span, &_markBitMap,
5180                                           &_markStack, false /* !preclean */);
5181   CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this,
5182                                 _span, &_markBitMap, &_markStack,
5183                                 &cmsKeepAliveClosure, false /* !preclean */);
5184   {
5185     GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer_cm);
5186 
5187     ReferenceProcessorStats stats;
5188     if (rp->processing_is_mt()) {
5189       // Set the degree of MT here.  If the discovery is done MT, there
5190       // may have been a different number of threads doing the discovery
5191       // and a different number of discovered lists may have Ref objects.
5192       // That is OK as long as the Reference lists are balanced (see
5193       // balance_all_queues() and balance_queues()).
5194       GenCollectedHeap* gch = GenCollectedHeap::heap();
5195       uint active_workers = ParallelGCThreads;
5196       WorkGang* workers = gch->workers();
5197       if (workers != NULL) {
5198         active_workers = workers->active_workers();
5199         // The expectation is that active_workers will have already
5200         // been set to a reasonable value.  If it has not been set,
5201         // investigate.
5202         assert(active_workers > 0, "Should have been set during scavenge");
5203       }
5204       rp->set_active_mt_degree(active_workers);
5205       CMSRefProcTaskExecutor task_executor(*this);


5207                                         &cmsKeepAliveClosure,
5208                                         &cmsDrainMarkingStackClosure,
5209                                         &task_executor,
5210                                         _gc_timer_cm);
5211     } else {
5212       stats = rp->process_discovered_references(&_is_alive_closure,
5213                                         &cmsKeepAliveClosure,
5214                                         &cmsDrainMarkingStackClosure,
5215                                         NULL,
5216                                         _gc_timer_cm);
5217     }
5218     _gc_tracer_cm->report_gc_reference_stats(stats);
5219 
5220   }
5221 
5222   // This is the point where the entire marking should have completed.
5223   verify_work_stacks_empty();
5224 
5225   if (should_unload_classes()) {
5226     {
5227       GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer_cm);
5228 
5229       // Unload classes and purge the SystemDictionary.
5230       bool purged_class = SystemDictionary::do_unloading(&_is_alive_closure);
5231 
5232       // Unload nmethods.
5233       CodeCache::do_unloading(&_is_alive_closure, purged_class);
5234 
5235       // Prune dead klasses from subklass/sibling/implementor lists.
5236       Klass::clean_weak_klass_links(&_is_alive_closure);
5237     }
5238 
5239     {
5240       GCTraceTime(Debug, gc, phases) t("Scrub Symbol Table", _gc_timer_cm);
5241       // Clean up unreferenced symbols in symbol table.
5242       SymbolTable::unlink();
5243     }
5244 
5245     {
5246       GCTraceTime(Debug, gc, phases) t("Scrub String Table", _gc_timer_cm);
5247       // Delete entries for dead interned strings.
5248       StringTable::unlink(&_is_alive_closure);
5249     }
5250   }
5251 
5252 
5253   // Restore any preserved marks as a result of mark stack or
5254   // work queue overflow
5255   restore_preserved_marks_if_any();  // done single-threaded for now
5256 
5257   rp->set_enqueuing_is_done(true);
5258   if (rp->processing_is_mt()) {
5259     rp->balance_all_queues();
5260     CMSRefProcTaskExecutor task_executor(*this);
5261     rp->enqueue_discovered_references(&task_executor);
5262   } else {
5263     rp->enqueue_discovered_references(NULL);
5264   }
5265   rp->verify_no_references_recorded();
5266   assert(!rp->discovery_enabled(), "should have been disabled");


< prev index next >