5168 CMSRefEnqueueTaskProxy enq_task(task);
5169 workers->run_task(&enq_task);
5170 }
5171
5172 void CMSCollector::refProcessingWork() {
5173 ResourceMark rm;
5174 HandleMark hm;
5175
5176 ReferenceProcessor* rp = ref_processor();
5177 assert(rp->span().equals(_span), "Spans should be equal");
5178 assert(!rp->enqueuing_is_done(), "Enqueuing should not be complete");
5179 // Process weak references.
5180 rp->setup_policy(false);
5181 verify_work_stacks_empty();
5182
5183 CMSKeepAliveClosure cmsKeepAliveClosure(this, _span, &_markBitMap,
5184 &_markStack, false /* !preclean */);
5185 CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this,
5186 _span, &_markBitMap, &_markStack,
5187 &cmsKeepAliveClosure, false /* !preclean */);
5188 {
5189 GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer_cm);
5190
5191 ReferenceProcessorStats stats;
5192 if (rp->processing_is_mt()) {
5193 // Set the degree of MT here. If the discovery is done MT, there
5194 // may have been a different number of threads doing the discovery
5195 // and a different number of discovered lists may have Ref objects.
5196 // That is OK as long as the Reference lists are balanced (see
5197 // balance_all_queues() and balance_queues()).
5198 GenCollectedHeap* gch = GenCollectedHeap::heap();
5199 uint active_workers = ParallelGCThreads;
5200 WorkGang* workers = gch->workers();
5201 if (workers != NULL) {
5202 active_workers = workers->active_workers();
5203 // The expectation is that active_workers will have already
5204 // been set to a reasonable value. If it has not been set,
5205 // investigate.
5206 assert(active_workers > 0, "Should have been set during scavenge");
5207 }
5208 rp->set_active_mt_degree(active_workers);
5209 CMSRefProcTaskExecutor task_executor(*this);
5210 stats = rp->process_discovered_references(&_is_alive_closure,
5211 &cmsKeepAliveClosure,
5212 &cmsDrainMarkingStackClosure,
5213 &task_executor,
5214 _gc_timer_cm);
5215 } else {
5216 stats = rp->process_discovered_references(&_is_alive_closure,
5217 &cmsKeepAliveClosure,
5218 &cmsDrainMarkingStackClosure,
5219 NULL,
5220 _gc_timer_cm);
5221 }
5222 _gc_tracer_cm->report_gc_reference_stats(stats);
5223
5224 }
5225
5226 // This is the point where the entire marking should have completed.
5227 verify_work_stacks_empty();
5228
5229 if (should_unload_classes()) {
5230 {
5231 GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer_cm);
5232
5233 // Unload classes and purge the SystemDictionary.
5234 bool purged_class = SystemDictionary::do_unloading(&_is_alive_closure, _gc_timer_cm);
5235
5236 // Unload nmethods.
5237 CodeCache::do_unloading(&_is_alive_closure, purged_class);
5238
5239 // Prune dead klasses from subklass/sibling/implementor lists.
5240 Klass::clean_weak_klass_links(&_is_alive_closure);
5241 }
5242
5243 {
5244 GCTraceTime(Debug, gc, phases) t("Scrub Symbol Table", _gc_timer_cm);
5245 // Clean up unreferenced symbols in symbol table.
5246 SymbolTable::unlink();
5247 }
5248
5249 {
5250 GCTraceTime(Debug, gc, phases) t("Scrub String Table", _gc_timer_cm);
5251 // Delete entries for dead interned strings.
5252 StringTable::unlink(&_is_alive_closure);
5253 }
5254 }
5255
5256 // Restore any preserved marks as a result of mark stack or
5257 // work queue overflow
5258 restore_preserved_marks_if_any(); // done single-threaded for now
5259
5260 rp->set_enqueuing_is_done(true);
5261 if (rp->processing_is_mt()) {
5262 rp->balance_all_queues();
5263 CMSRefProcTaskExecutor task_executor(*this);
5264 rp->enqueue_discovered_references(&task_executor);
5265 } else {
5266 rp->enqueue_discovered_references(NULL);
5267 }
5268 rp->verify_no_references_recorded();
5269 assert(!rp->discovery_enabled(), "should have been disabled");
5270 }
5271
5272 #ifndef PRODUCT
5273 void CMSCollector::check_correct_thread_executing() {
5274 Thread* t = Thread::current();
5275 // Only the VM thread or the CMS thread should be here.
5276 assert(t->is_ConcurrentGC_thread() || t->is_VM_thread(),
5277 "Unexpected thread type");
5278 // If this is the vm thread, the foreground process
5279 // should not be waiting. Note that _foregroundGCIsActive is
5280 // true while the foreground collector is waiting.
5281 if (_foregroundGCShouldWait) {
5282 // We cannot be the VM thread
5283 assert(t->is_ConcurrentGC_thread(),
5284 "Should be CMS thread");
5285 } else {
5286 // We can be the CMS thread only if we are in a stop-world
5287 // phase of CMS collection.
5288 if (t->is_ConcurrentGC_thread()) {
|
5168 CMSRefEnqueueTaskProxy enq_task(task);
5169 workers->run_task(&enq_task);
5170 }
5171
5172 void CMSCollector::refProcessingWork() {
5173 ResourceMark rm;
5174 HandleMark hm;
5175
5176 ReferenceProcessor* rp = ref_processor();
5177 assert(rp->span().equals(_span), "Spans should be equal");
5178 assert(!rp->enqueuing_is_done(), "Enqueuing should not be complete");
5179 // Process weak references.
5180 rp->setup_policy(false);
5181 verify_work_stacks_empty();
5182
5183 CMSKeepAliveClosure cmsKeepAliveClosure(this, _span, &_markBitMap,
5184 &_markStack, false /* !preclean */);
5185 CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this,
5186 _span, &_markBitMap, &_markStack,
5187 &cmsKeepAliveClosure, false /* !preclean */);
5188 ReferenceProcessorPhaseTimes pt(_gc_timer_cm, rp->num_q());
5189 {
5190 GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer_cm);
5191
5192 ReferenceProcessorStats stats;
5193 if (rp->processing_is_mt()) {
5194 // Set the degree of MT here. If the discovery is done MT, there
5195 // may have been a different number of threads doing the discovery
5196 // and a different number of discovered lists may have Ref objects.
5197 // That is OK as long as the Reference lists are balanced (see
5198 // balance_all_queues() and balance_queues()).
5199 GenCollectedHeap* gch = GenCollectedHeap::heap();
5200 uint active_workers = ParallelGCThreads;
5201 WorkGang* workers = gch->workers();
5202 if (workers != NULL) {
5203 active_workers = workers->active_workers();
5204 // The expectation is that active_workers will have already
5205 // been set to a reasonable value. If it has not been set,
5206 // investigate.
5207 assert(active_workers > 0, "Should have been set during scavenge");
5208 }
5209 rp->set_active_mt_degree(active_workers);
5210 CMSRefProcTaskExecutor task_executor(*this);
5211 stats = rp->process_discovered_references(&_is_alive_closure,
5212 &cmsKeepAliveClosure,
5213 &cmsDrainMarkingStackClosure,
5214 &task_executor,
5215 &pt);
5216 } else {
5217 stats = rp->process_discovered_references(&_is_alive_closure,
5218 &cmsKeepAliveClosure,
5219 &cmsDrainMarkingStackClosure,
5220 NULL,
5221 &pt);
5222 }
5223 _gc_tracer_cm->report_gc_reference_stats(stats);
5224 pt.print_all_references();
5225 }
5226
5227 // This is the point where the entire marking should have completed.
5228 verify_work_stacks_empty();
5229
5230 if (should_unload_classes()) {
5231 {
5232 GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer_cm);
5233
5234 // Unload classes and purge the SystemDictionary.
5235 bool purged_class = SystemDictionary::do_unloading(&_is_alive_closure, _gc_timer_cm);
5236
5237 // Unload nmethods.
5238 CodeCache::do_unloading(&_is_alive_closure, purged_class);
5239
5240 // Prune dead klasses from subklass/sibling/implementor lists.
5241 Klass::clean_weak_klass_links(&_is_alive_closure);
5242 }
5243
5244 {
5245 GCTraceTime(Debug, gc, phases) t("Scrub Symbol Table", _gc_timer_cm);
5246 // Clean up unreferenced symbols in symbol table.
5247 SymbolTable::unlink();
5248 }
5249
5250 {
5251 GCTraceTime(Debug, gc, phases) t("Scrub String Table", _gc_timer_cm);
5252 // Delete entries for dead interned strings.
5253 StringTable::unlink(&_is_alive_closure);
5254 }
5255 }
5256
5257 // Restore any preserved marks as a result of mark stack or
5258 // work queue overflow
5259 restore_preserved_marks_if_any(); // done single-threaded for now
5260
5261 rp->set_enqueuing_is_done(true);
5262 if (rp->processing_is_mt()) {
5263 rp->balance_all_queues();
5264 CMSRefProcTaskExecutor task_executor(*this);
5265 rp->enqueue_discovered_references(&task_executor, &pt);
5266 } else {
5267 rp->enqueue_discovered_references(NULL, &pt);
5268 }
5269 rp->verify_no_references_recorded();
5270 pt.print_enqueue_phase();
5271 assert(!rp->discovery_enabled(), "should have been disabled");
5272 }
5273
5274 #ifndef PRODUCT
5275 void CMSCollector::check_correct_thread_executing() {
5276 Thread* t = Thread::current();
5277 // Only the VM thread or the CMS thread should be here.
5278 assert(t->is_ConcurrentGC_thread() || t->is_VM_thread(),
5279 "Unexpected thread type");
5280 // If this is the vm thread, the foreground process
5281 // should not be waiting. Note that _foregroundGCIsActive is
5282 // true while the foreground collector is waiting.
5283 if (_foregroundGCShouldWait) {
5284 // We cannot be the VM thread
5285 assert(t->is_ConcurrentGC_thread(),
5286 "Should be CMS thread");
5287 } else {
5288 // We can be the CMS thread only if we are in a stop-world
5289 // phase of CMS collection.
5290 if (t->is_ConcurrentGC_thread()) {
|