< prev index next >

src/share/vm/gc/g1/g1CollectedHeap.cpp

Print this page
rev 13070 : [mq]: webrev.0a
rev 13071 : [mq]: webrev.1


1266       // Temporarily make discovery by the STW ref processor single threaded (non-MT).
1267       ReferenceProcessorMTDiscoveryMutator stw_rp_disc_ser(ref_processor_stw(), false);
1268 
1269       // Temporarily clear the STW ref processor's _is_alive_non_header field.
1270       ReferenceProcessorIsAliveMutator stw_rp_is_alive_null(ref_processor_stw(), NULL);
1271 
1272       ref_processor_stw()->enable_discovery();
1273       ref_processor_stw()->setup_policy(do_clear_all_soft_refs);
1274 
1275       // Do collection work
1276       {
1277         HandleMark hm;  // Discard invalid handles created during gc
1278         G1MarkSweep::invoke_at_safepoint(ref_processor_stw(), do_clear_all_soft_refs);
1279       }
1280 
1281       assert(num_free_regions() == 0, "we should not have added any free regions");
1282       rebuild_region_sets(false /* free_list_only */);
1283 
1284       // Enqueue any discovered reference objects that have
1285       // not been removed from the discovered lists.
1286       ref_processor_stw()->enqueue_discovered_references();
1287 
1288 #if defined(COMPILER2) || INCLUDE_JVMCI
1289       DerivedPointerTable::update_pointers();
1290 #endif
1291 
1292       MemoryService::track_memory_usage();
1293 
1294       assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
1295       ref_processor_stw()->verify_no_references_recorded();
1296 
1297       // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1298       ClassLoaderDataGraph::purge();
1299       MetaspaceAux::verify_metrics();
1300 
1301       // Note: since we've just done a full GC, concurrent
1302       // marking is no longer active. Therefore we need not
1303       // re-enable reference discovery for the CM ref processor.
1304       // That will be done at the start of the next marking cycle.
1305       assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
1306       ref_processor_cm()->verify_no_references_recorded();


4404   _gc_tracer_stw->report_gc_reference_stats(stats);
4405 
4406   // We have completed copying any necessary live referent objects.
4407   assert(pss->queue_is_empty(), "both queue and overflow should be empty");
4408 
4409   double ref_proc_time = os::elapsedTime() - ref_proc_start;
4410   g1_policy()->phase_times()->record_ref_proc_time(ref_proc_time * 1000.0);
4411 }
4412 
4413 // Weak Reference processing during an evacuation pause (part 2).
4414 void G1CollectedHeap::enqueue_discovered_references(G1ParScanThreadStateSet* per_thread_states) {
4415   double ref_enq_start = os::elapsedTime();
4416 
4417   ReferenceProcessor* rp = _ref_processor_stw;
4418   assert(!rp->discovery_enabled(), "should have been disabled as part of processing");
4419 
4420   // Now enqueue any remaining on the discovered lists on to
4421   // the pending list.
4422   if (!rp->processing_is_mt()) {
4423     // Serial reference processing...
4424     rp->enqueue_discovered_references();
4425   } else {
4426     // Parallel reference enqueueing
4427 
4428     uint n_workers = workers()->active_workers();
4429 
4430     assert(n_workers <= rp->max_num_q(),
4431            "Mismatch between the number of GC workers %u and the maximum number of Reference process queues %u",
4432            n_workers,  rp->max_num_q());
4433 
4434     G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, n_workers);
4435     rp->enqueue_discovered_references(&par_task_executor, _gc_timer_stw);
4436   }
4437 
4438   rp->verify_no_references_recorded();
4439   assert(!rp->discovery_enabled(), "should have been disabled");
4440 
4441   // FIXME
4442   // CM's reference processing also cleans up the string and symbol tables.
4443   // Should we do that here also? We could, but it is a serial operation
4444   // and could significantly increase the pause time.




1266       // Temporarily make discovery by the STW ref processor single threaded (non-MT).
1267       ReferenceProcessorMTDiscoveryMutator stw_rp_disc_ser(ref_processor_stw(), false);
1268 
1269       // Temporarily clear the STW ref processor's _is_alive_non_header field.
1270       ReferenceProcessorIsAliveMutator stw_rp_is_alive_null(ref_processor_stw(), NULL);
1271 
1272       ref_processor_stw()->enable_discovery();
1273       ref_processor_stw()->setup_policy(do_clear_all_soft_refs);
1274 
1275       // Do collection work
1276       {
1277         HandleMark hm;  // Discard invalid handles created during gc
1278         G1MarkSweep::invoke_at_safepoint(ref_processor_stw(), do_clear_all_soft_refs);
1279       }
1280 
1281       assert(num_free_regions() == 0, "we should not have added any free regions");
1282       rebuild_region_sets(false /* free_list_only */);
1283 
1284       // Enqueue any discovered reference objects that have
1285       // not been removed from the discovered lists.
1286       ref_processor_stw()->enqueue_discovered_references(NULL, gc_timer);
1287 
1288 #if defined(COMPILER2) || INCLUDE_JVMCI
1289       DerivedPointerTable::update_pointers();
1290 #endif
1291 
1292       MemoryService::track_memory_usage();
1293 
1294       assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
1295       ref_processor_stw()->verify_no_references_recorded();
1296 
1297       // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1298       ClassLoaderDataGraph::purge();
1299       MetaspaceAux::verify_metrics();
1300 
1301       // Note: since we've just done a full GC, concurrent
1302       // marking is no longer active. Therefore we need not
1303       // re-enable reference discovery for the CM ref processor.
1304       // That will be done at the start of the next marking cycle.
1305       assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
1306       ref_processor_cm()->verify_no_references_recorded();


4404   _gc_tracer_stw->report_gc_reference_stats(stats);
4405 
4406   // We have completed copying any necessary live referent objects.
4407   assert(pss->queue_is_empty(), "both queue and overflow should be empty");
4408 
4409   double ref_proc_time = os::elapsedTime() - ref_proc_start;
4410   g1_policy()->phase_times()->record_ref_proc_time(ref_proc_time * 1000.0);
4411 }
4412 
4413 // Weak Reference processing during an evacuation pause (part 2).
4414 void G1CollectedHeap::enqueue_discovered_references(G1ParScanThreadStateSet* per_thread_states) {
4415   double ref_enq_start = os::elapsedTime();
4416 
4417   ReferenceProcessor* rp = _ref_processor_stw;
4418   assert(!rp->discovery_enabled(), "should have been disabled as part of processing");
4419 
4420   // Now enqueue any remaining on the discovered lists on to
4421   // the pending list.
4422   if (!rp->processing_is_mt()) {
4423     // Serial reference processing...
4424     rp->enqueue_discovered_references(NULL, _gc_timer_stw);
4425   } else {
4426     // Parallel reference enqueueing
4427 
4428     uint n_workers = workers()->active_workers();
4429 
4430     assert(n_workers <= rp->max_num_q(),
4431            "Mismatch between the number of GC workers %u and the maximum number of Reference process queues %u",
4432            n_workers,  rp->max_num_q());
4433 
4434     G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, n_workers);
4435     rp->enqueue_discovered_references(&par_task_executor, _gc_timer_stw);
4436   }
4437 
4438   rp->verify_no_references_recorded();
4439   assert(!rp->discovery_enabled(), "should have been disabled");
4440 
4441   // FIXME
4442   // CM's reference processing also cleans up the string and symbol tables.
4443   // Should we do that here also? We could, but it is a serial operation
4444   // and could significantly increase the pause time.


< prev index next >