4567
4568 ParallelTaskTerminator* terminator() { return &_terminator; }
4569
4570 // Helps out with CLD processing.
4571 //
4572 // During InitialMark we need to:
4573 // 1) Scavenge all CLDs for the young GC.
4574 // 2) Mark all objects directly reachable from strong CLDs.
4575 template <G1Mark do_mark_object>
4576 class G1CLDClosure : public CLDClosure {
4577 G1ParCopyClosure<G1BarrierNone, do_mark_object>* _oop_closure;
4578 G1ParCopyClosure<G1BarrierKlass, do_mark_object> _oop_in_klass_closure;
4579 G1KlassScanClosure _klass_in_cld_closure;
4580 bool _claim;
4581
4582 public:
4583 G1CLDClosure(G1ParCopyClosure<G1BarrierNone, do_mark_object>* oop_closure,
4584 bool only_young, bool claim)
4585 : _oop_closure(oop_closure),
4586 _oop_in_klass_closure(oop_closure->g1(),
4587 oop_closure->pss(),
4588 oop_closure->rp()),
4589 _klass_in_cld_closure(&_oop_in_klass_closure, only_young),
4590 _claim(claim) {
4591
4592 }
4593
4594 void do_cld(ClassLoaderData* cld) {
4595 cld->oops_do(_oop_closure, &_klass_in_cld_closure, _claim);
4596 }
4597 };
4598
4599 void work(uint worker_id) {
4600 if (worker_id >= _n_workers) return; // no work needed this round
4601
4602 double start_sec = os::elapsedTime();
4603 _g1h->g1_policy()->phase_times()->record_time_secs(G1GCPhaseTimes::GCWorkerStart, worker_id, start_sec);
4604
4605 {
4606 ResourceMark rm;
4607 HandleMark hm;
4608
4609 ReferenceProcessor* rp = _g1h->ref_processor_stw();
4610
4611 G1ParScanThreadState* pss = _pss[worker_id];
4612 pss->set_ref_processor(rp);
4613
4614 bool only_young = _g1h->collector_state()->gcs_are_young();
4615
4616 // Non-IM young GC.
4617 G1ParCopyClosure<G1BarrierNone, G1MarkNone> scan_only_root_cl(_g1h, pss, rp);
4618 G1CLDClosure<G1MarkNone> scan_only_cld_cl(&scan_only_root_cl,
4619 only_young, // Only process dirty klasses.
4620 false); // No need to claim CLDs.
4621 // IM young GC.
4622 // Strong roots closures.
4623 G1ParCopyClosure<G1BarrierNone, G1MarkFromRoot> scan_mark_root_cl(_g1h, pss, rp);
4624 G1CLDClosure<G1MarkFromRoot> scan_mark_cld_cl(&scan_mark_root_cl,
4625 false, // Process all klasses.
4626 true); // Need to claim CLDs.
4627 // Weak roots closures.
4628 G1ParCopyClosure<G1BarrierNone, G1MarkPromotedFromRoot> scan_mark_weak_root_cl(_g1h, pss, rp);
4629 G1CLDClosure<G1MarkPromotedFromRoot> scan_mark_weak_cld_cl(&scan_mark_weak_root_cl,
4630 false, // Process all klasses.
4631 true); // Need to claim CLDs.
4632
4633 OopClosure* strong_root_cl;
4634 OopClosure* weak_root_cl;
4635 CLDClosure* strong_cld_cl;
4636 CLDClosure* weak_cld_cl;
4637
4638 bool trace_metadata = false;
4639
4640 if (_g1h->collector_state()->during_initial_mark_pause()) {
4641 // We also need to mark copied objects.
4642 strong_root_cl = &scan_mark_root_cl;
4643 strong_cld_cl = &scan_mark_cld_cl;
4644 if (ClassUnloadingWithConcurrentMark) {
4645 weak_root_cl = &scan_mark_weak_root_cl;
4646 weak_cld_cl = &scan_mark_weak_cld_cl;
4647 trace_metadata = true;
4648 } else {
5307 RefToScanQueueSet *task_queues,
5308 ParallelTaskTerminator* terminator) :
5309 AbstractGangTask("Process reference objects in parallel"),
5310 _proc_task(proc_task),
5311 _g1h(g1h),
5312 _pss(per_thread_states),
5313 _task_queues(task_queues),
5314 _terminator(terminator)
5315 {}
5316
5317 virtual void work(uint worker_id) {
5318 // The reference processing task executed by a single worker.
5319 ResourceMark rm;
5320 HandleMark hm;
5321
5322 G1STWIsAliveClosure is_alive(_g1h);
5323
5324 G1ParScanThreadState* pss = _pss[worker_id];
5325 pss->set_ref_processor(NULL);
5326
5327 G1ParScanExtRootClosure only_copy_non_heap_cl(_g1h, pss, NULL);
5328
5329 G1ParScanAndMarkExtRootClosure copy_mark_non_heap_cl(_g1h, pss, NULL);
5330
5331 OopClosure* copy_non_heap_cl = &only_copy_non_heap_cl;
5332
5333 if (_g1h->collector_state()->during_initial_mark_pause()) {
5334 // We also need to mark copied objects.
5335 copy_non_heap_cl = ©_mark_non_heap_cl;
5336 }
5337
5338 // Keep alive closure.
5339 G1CopyingKeepAliveClosure keep_alive(_g1h, copy_non_heap_cl, pss);
5340
5341 // Complete GC closure
5342 G1ParEvacuateFollowersClosure drain_queue(_g1h, pss, _task_queues, _terminator);
5343
5344 // Call the reference processing task's work routine.
5345 _proc_task.work(worker_id, is_alive, keep_alive, drain_queue);
5346
5347 // Note we cannot assert that the refs array is empty here as not all
5348 // of the processing tasks (specifically phase2 - pp2_work) execute
5349 // the complete_gc closure (which ordinarily would drain the queue) so
5407 uint _n_workers;
5408
5409 public:
5410 G1ParPreserveCMReferentsTask(G1CollectedHeap* g1h, G1ParScanThreadState** per_thread_states, int workers, RefToScanQueueSet *task_queues) :
5411 AbstractGangTask("ParPreserveCMReferents"),
5412 _g1h(g1h),
5413 _pss(per_thread_states),
5414 _queues(task_queues),
5415 _terminator(workers, _queues),
5416 _n_workers(workers)
5417 { }
5418
5419 void work(uint worker_id) {
5420 ResourceMark rm;
5421 HandleMark hm;
5422
5423 G1ParScanThreadState* pss = _pss[worker_id];
5424 pss->set_ref_processor(NULL);
5425 assert(pss->queue_is_empty(), "both queue and overflow should be empty");
5426
5427 G1ParScanExtRootClosure only_copy_non_heap_cl(_g1h, pss, NULL);
5428
5429 G1ParScanAndMarkExtRootClosure copy_mark_non_heap_cl(_g1h, pss, NULL);
5430
5431 OopClosure* copy_non_heap_cl = &only_copy_non_heap_cl;
5432
5433 if (_g1h->collector_state()->during_initial_mark_pause()) {
5434 // We also need to mark copied objects.
5435 copy_non_heap_cl = ©_mark_non_heap_cl;
5436 }
5437
5438 // Is alive closure
5439 G1AlwaysAliveClosure always_alive(_g1h);
5440
5441 // Copying keep alive closure. Applied to referent objects that need
5442 // to be copied.
5443 G1CopyingKeepAliveClosure keep_alive(_g1h, copy_non_heap_cl, pss);
5444
5445 ReferenceProcessor* rp = _g1h->ref_processor_cm();
5446
5447 uint limit = ReferenceProcessor::number_of_subclasses_of_ref() * rp->max_num_q();
5448 uint stride = MIN2(MAX2(_n_workers, 1U), limit);
5449
5517
5518 workers()->run_task(&keep_cm_referents);
5519
5520 // Closure to test whether a referent is alive.
5521 G1STWIsAliveClosure is_alive(this);
5522
5523 // Even when parallel reference processing is enabled, the processing
5524 // of JNI refs is serial and performed serially by the current thread
5525 // rather than by a worker. The following PSS will be used for processing
5526 // JNI refs.
5527
5528 // Use only a single queue for this PSS.
5529 G1ParScanThreadState* pss = per_thread_states[0];
5530 pss->set_ref_processor(NULL);
5531 assert(pss->queue_is_empty(), "pre-condition");
5532
5533 // We do not embed a reference processor in the copying/scanning
5534 // closures while we're actually processing the discovered
5535 // reference objects.
5536
5537 G1ParScanExtRootClosure only_copy_non_heap_cl(this, pss, NULL);
5538
5539 G1ParScanAndMarkExtRootClosure copy_mark_non_heap_cl(this, pss, NULL);
5540
5541 OopClosure* copy_non_heap_cl = &only_copy_non_heap_cl;
5542
5543 if (collector_state()->during_initial_mark_pause()) {
5544 // We also need to mark copied objects.
5545 copy_non_heap_cl = ©_mark_non_heap_cl;
5546 }
5547
5548 // Keep alive closure.
5549 G1CopyingKeepAliveClosure keep_alive(this, copy_non_heap_cl, pss);
5550
5551 // Serial Complete GC closure
5552 G1STWDrainQueueClosure drain_queue(this, pss);
5553
5554 // Setup the soft refs policy...
5555 rp->setup_policy(false);
5556
5557 ReferenceProcessorStats stats;
5558 if (!rp->processing_is_mt()) {
5559 // Serial reference processing...
|
4567
4568 ParallelTaskTerminator* terminator() { return &_terminator; }
4569
4570 // Helps out with CLD processing.
4571 //
4572 // During InitialMark we need to:
4573 // 1) Scavenge all CLDs for the young GC.
4574 // 2) Mark all objects directly reachable from strong CLDs.
4575 template <G1Mark do_mark_object>
4576 class G1CLDClosure : public CLDClosure {
4577 G1ParCopyClosure<G1BarrierNone, do_mark_object>* _oop_closure;
4578 G1ParCopyClosure<G1BarrierKlass, do_mark_object> _oop_in_klass_closure;
4579 G1KlassScanClosure _klass_in_cld_closure;
4580 bool _claim;
4581
4582 public:
4583 G1CLDClosure(G1ParCopyClosure<G1BarrierNone, do_mark_object>* oop_closure,
4584 bool only_young, bool claim)
4585 : _oop_closure(oop_closure),
4586 _oop_in_klass_closure(oop_closure->g1(),
4587 oop_closure->pss()),
4588 _klass_in_cld_closure(&_oop_in_klass_closure, only_young),
4589 _claim(claim) {
4590
4591 }
4592
4593 void do_cld(ClassLoaderData* cld) {
4594 cld->oops_do(_oop_closure, &_klass_in_cld_closure, _claim);
4595 }
4596 };
4597
4598 void work(uint worker_id) {
4599 if (worker_id >= _n_workers) return; // no work needed this round
4600
4601 double start_sec = os::elapsedTime();
4602 _g1h->g1_policy()->phase_times()->record_time_secs(G1GCPhaseTimes::GCWorkerStart, worker_id, start_sec);
4603
4604 {
4605 ResourceMark rm;
4606 HandleMark hm;
4607
4608 ReferenceProcessor* rp = _g1h->ref_processor_stw();
4609
4610 G1ParScanThreadState* pss = _pss[worker_id];
4611 pss->set_ref_processor(rp);
4612
4613 bool only_young = _g1h->collector_state()->gcs_are_young();
4614
4615 // Non-IM young GC.
4616 G1ParCopyClosure<G1BarrierNone, G1MarkNone> scan_only_root_cl(_g1h, pss);
4617 G1CLDClosure<G1MarkNone> scan_only_cld_cl(&scan_only_root_cl,
4618 only_young, // Only process dirty klasses.
4619 false); // No need to claim CLDs.
4620 // IM young GC.
4621 // Strong roots closures.
4622 G1ParCopyClosure<G1BarrierNone, G1MarkFromRoot> scan_mark_root_cl(_g1h, pss);
4623 G1CLDClosure<G1MarkFromRoot> scan_mark_cld_cl(&scan_mark_root_cl,
4624 false, // Process all klasses.
4625 true); // Need to claim CLDs.
4626 // Weak roots closures.
4627 G1ParCopyClosure<G1BarrierNone, G1MarkPromotedFromRoot> scan_mark_weak_root_cl(_g1h, pss);
4628 G1CLDClosure<G1MarkPromotedFromRoot> scan_mark_weak_cld_cl(&scan_mark_weak_root_cl,
4629 false, // Process all klasses.
4630 true); // Need to claim CLDs.
4631
4632 OopClosure* strong_root_cl;
4633 OopClosure* weak_root_cl;
4634 CLDClosure* strong_cld_cl;
4635 CLDClosure* weak_cld_cl;
4636
4637 bool trace_metadata = false;
4638
4639 if (_g1h->collector_state()->during_initial_mark_pause()) {
4640 // We also need to mark copied objects.
4641 strong_root_cl = &scan_mark_root_cl;
4642 strong_cld_cl = &scan_mark_cld_cl;
4643 if (ClassUnloadingWithConcurrentMark) {
4644 weak_root_cl = &scan_mark_weak_root_cl;
4645 weak_cld_cl = &scan_mark_weak_cld_cl;
4646 trace_metadata = true;
4647 } else {
5306 RefToScanQueueSet *task_queues,
5307 ParallelTaskTerminator* terminator) :
5308 AbstractGangTask("Process reference objects in parallel"),
5309 _proc_task(proc_task),
5310 _g1h(g1h),
5311 _pss(per_thread_states),
5312 _task_queues(task_queues),
5313 _terminator(terminator)
5314 {}
5315
5316 virtual void work(uint worker_id) {
5317 // The reference processing task executed by a single worker.
5318 ResourceMark rm;
5319 HandleMark hm;
5320
5321 G1STWIsAliveClosure is_alive(_g1h);
5322
5323 G1ParScanThreadState* pss = _pss[worker_id];
5324 pss->set_ref_processor(NULL);
5325
5326 G1ParScanExtRootClosure only_copy_non_heap_cl(_g1h, pss);
5327
5328 G1ParScanAndMarkExtRootClosure copy_mark_non_heap_cl(_g1h, pss);
5329
5330 OopClosure* copy_non_heap_cl = &only_copy_non_heap_cl;
5331
5332 if (_g1h->collector_state()->during_initial_mark_pause()) {
5333 // We also need to mark copied objects.
5334 copy_non_heap_cl = ©_mark_non_heap_cl;
5335 }
5336
5337 // Keep alive closure.
5338 G1CopyingKeepAliveClosure keep_alive(_g1h, copy_non_heap_cl, pss);
5339
5340 // Complete GC closure
5341 G1ParEvacuateFollowersClosure drain_queue(_g1h, pss, _task_queues, _terminator);
5342
5343 // Call the reference processing task's work routine.
5344 _proc_task.work(worker_id, is_alive, keep_alive, drain_queue);
5345
5346 // Note we cannot assert that the refs array is empty here as not all
5347 // of the processing tasks (specifically phase2 - pp2_work) execute
5348 // the complete_gc closure (which ordinarily would drain the queue) so
5406 uint _n_workers;
5407
5408 public:
5409 G1ParPreserveCMReferentsTask(G1CollectedHeap* g1h, G1ParScanThreadState** per_thread_states, int workers, RefToScanQueueSet *task_queues) :
5410 AbstractGangTask("ParPreserveCMReferents"),
5411 _g1h(g1h),
5412 _pss(per_thread_states),
5413 _queues(task_queues),
5414 _terminator(workers, _queues),
5415 _n_workers(workers)
5416 { }
5417
5418 void work(uint worker_id) {
5419 ResourceMark rm;
5420 HandleMark hm;
5421
5422 G1ParScanThreadState* pss = _pss[worker_id];
5423 pss->set_ref_processor(NULL);
5424 assert(pss->queue_is_empty(), "both queue and overflow should be empty");
5425
5426 G1ParScanExtRootClosure only_copy_non_heap_cl(_g1h, pss);
5427
5428 G1ParScanAndMarkExtRootClosure copy_mark_non_heap_cl(_g1h, pss);
5429
5430 OopClosure* copy_non_heap_cl = &only_copy_non_heap_cl;
5431
5432 if (_g1h->collector_state()->during_initial_mark_pause()) {
5433 // We also need to mark copied objects.
5434 copy_non_heap_cl = ©_mark_non_heap_cl;
5435 }
5436
5437 // Is alive closure
5438 G1AlwaysAliveClosure always_alive(_g1h);
5439
5440 // Copying keep alive closure. Applied to referent objects that need
5441 // to be copied.
5442 G1CopyingKeepAliveClosure keep_alive(_g1h, copy_non_heap_cl, pss);
5443
5444 ReferenceProcessor* rp = _g1h->ref_processor_cm();
5445
5446 uint limit = ReferenceProcessor::number_of_subclasses_of_ref() * rp->max_num_q();
5447 uint stride = MIN2(MAX2(_n_workers, 1U), limit);
5448
5516
5517 workers()->run_task(&keep_cm_referents);
5518
5519 // Closure to test whether a referent is alive.
5520 G1STWIsAliveClosure is_alive(this);
5521
5522 // Even when parallel reference processing is enabled, the processing
5523 // of JNI refs is serial and performed serially by the current thread
5524 // rather than by a worker. The following PSS will be used for processing
5525 // JNI refs.
5526
5527 // Use only a single queue for this PSS.
5528 G1ParScanThreadState* pss = per_thread_states[0];
5529 pss->set_ref_processor(NULL);
5530 assert(pss->queue_is_empty(), "pre-condition");
5531
5532 // We do not embed a reference processor in the copying/scanning
5533 // closures while we're actually processing the discovered
5534 // reference objects.
5535
5536 G1ParScanExtRootClosure only_copy_non_heap_cl(this, pss);
5537
5538 G1ParScanAndMarkExtRootClosure copy_mark_non_heap_cl(this, pss);
5539
5540 OopClosure* copy_non_heap_cl = &only_copy_non_heap_cl;
5541
5542 if (collector_state()->during_initial_mark_pause()) {
5543 // We also need to mark copied objects.
5544 copy_non_heap_cl = ©_mark_non_heap_cl;
5545 }
5546
5547 // Keep alive closure.
5548 G1CopyingKeepAliveClosure keep_alive(this, copy_non_heap_cl, pss);
5549
5550 // Serial Complete GC closure
5551 G1STWDrainQueueClosure drain_queue(this, pss);
5552
5553 // Setup the soft refs policy...
5554 rp->setup_policy(false);
5555
5556 ReferenceProcessorStats stats;
5557 if (!rp->processing_is_mt()) {
5558 // Serial reference processing...
|