3585 if (pll_head != NULL) {
3586 // Any valid worker id is fine here as we are in the VM thread and single-threaded.
3587 _cm->mark_in_next_bitmap(0 /* worker_id */, pll_head);
3588 }
3589 }
3590 }
3591
3592 void G1CollectedHeap::merge_per_thread_state_info(G1ParScanThreadStateSet* per_thread_states) {
3593 double merge_pss_time_start = os::elapsedTime();
3594 per_thread_states->flush();
3595 phase_times()->record_merge_pss_time_ms((os::elapsedTime() - merge_pss_time_start) * 1000.0);
3596 }
3597
3598 class G1PrepareEvacuationTask : public AbstractGangTask {
3599 class G1PrepareRegionsClosure : public HeapRegionClosure {
3600 G1CollectedHeap* _g1h;
3601 G1PrepareEvacuationTask* _parent_task;
3602 size_t _worker_humongous_total;
3603 size_t _worker_humongous_candidates;
3604
3605
3606 bool humongous_region_is_candidate(HeapRegion* region) const {
3607 assert(region->is_starts_humongous(), "Must start a humongous object");
3608
3609 oop obj = oop(region->bottom());
3610
3611 // Dead objects cannot be eager reclaim candidates. Due to class
3612 // unloading it is unsafe to query their classes so we return early.
3613 if (_g1h->is_obj_dead(obj, region)) {
3614 return false;
3615 }
3616
3617 // If we do not have a complete remembered set for the region, then we can
3618 // not be sure that we have all references to it.
3619 if (!region->rem_set()->is_complete()) {
3620 return false;
3621 }
3622 // Candidate selection must satisfy the following constraints
3623 // while concurrent marking is in progress:
3624 //
3625 // * In order to maintain SATB invariants, an object must not be
3656 // may reduce needed headroom.
3657
3658 return obj->is_typeArray() &&
3659 _g1h->is_potential_eager_reclaim_candidate(region);
3660 }
3661
3662 public:
3663 G1PrepareRegionsClosure(G1CollectedHeap* g1h, G1PrepareEvacuationTask* parent_task) :
3664 _g1h(g1h),
3665 _parent_task(parent_task),
3666 _worker_humongous_total(0),
3667 _worker_humongous_candidates(0) { }
3668
3669 ~G1PrepareRegionsClosure() {
3670 _parent_task->add_humongous_candidates(_worker_humongous_candidates);
3671 _parent_task->add_humongous_total(_worker_humongous_total);
3672 }
3673
3674 virtual bool do_heap_region(HeapRegion* hr) {
3675 // First prepare the region for scanning
3676 _g1h->rem_set()->prepare_region_for_scanning(hr);
3677
3678 // Now check if region is a humongous candidate
3679 if (!hr->is_starts_humongous()) {
3680 _g1h->register_region_with_region_attr(hr);
3681 return false;
3682 }
3683
3684 uint index = hr->hrm_index();
3685 if (humongous_region_is_candidate(hr)) {
3686 _g1h->set_humongous_reclaim_candidate(index, true);
3687 _g1h->register_humongous_region_with_region_attr(index);
3688 _worker_humongous_candidates++;
3689 // We will later handle the remembered sets of these regions.
3690 } else {
3691 _g1h->set_humongous_reclaim_candidate(index, false);
3692 _g1h->register_region_with_region_attr(hr);
3693 }
3694 _worker_humongous_total++;
3695
3696 return false;
3697 }
3698
3699 };
3700
3701 G1CollectedHeap* _g1h;
3702 HeapRegionClaimer _claimer;
3703 volatile size_t _humongous_total;
3704 volatile size_t _humongous_candidates;
3705 public:
3706 G1PrepareEvacuationTask(G1CollectedHeap* g1h) :
3707 AbstractGangTask("Prepare Evacuation"),
3708 _g1h(g1h),
3709 _claimer(_g1h->workers()->active_workers()),
3710 _humongous_total(0),
3711 _humongous_candidates(0) {
3712
3713 }
3714
3715 ~G1PrepareEvacuationTask() {
3716 _g1h->set_has_humongous_reclaim_candidate(_humongous_candidates > 0);
3717 }
3718
3719 void work(uint worker_id) {
3720 G1PrepareRegionsClosure cl(_g1h, this);
3721 _g1h->heap_region_par_iterate_from_worker_offset(&cl, &_claimer, worker_id);
3722 }
3723
3724 void add_humongous_candidates(size_t candidates) {
3725 Atomic::add(candidates, &_humongous_candidates);
3726 }
3727
3728 void add_humongous_total(size_t total) {
3729 Atomic::add(total, &_humongous_total);
3730 }
3731
3732 size_t humongous_candidates() {
3733 return _humongous_candidates;
|
3585 if (pll_head != NULL) {
3586 // Any valid worker id is fine here as we are in the VM thread and single-threaded.
3587 _cm->mark_in_next_bitmap(0 /* worker_id */, pll_head);
3588 }
3589 }
3590 }
3591
3592 void G1CollectedHeap::merge_per_thread_state_info(G1ParScanThreadStateSet* per_thread_states) {
3593 double merge_pss_time_start = os::elapsedTime();
3594 per_thread_states->flush();
3595 phase_times()->record_merge_pss_time_ms((os::elapsedTime() - merge_pss_time_start) * 1000.0);
3596 }
3597
3598 class G1PrepareEvacuationTask : public AbstractGangTask {
3599 class G1PrepareRegionsClosure : public HeapRegionClosure {
3600 G1CollectedHeap* _g1h;
3601 G1PrepareEvacuationTask* _parent_task;
3602 size_t _worker_humongous_total;
3603 size_t _worker_humongous_candidates;
3604
3605 bool humongous_region_is_candidate(HeapRegion* region) const {
3606 assert(region->is_starts_humongous(), "Must start a humongous object");
3607
3608 oop obj = oop(region->bottom());
3609
3610 // Dead objects cannot be eager reclaim candidates. Due to class
3611 // unloading it is unsafe to query their classes so we return early.
3612 if (_g1h->is_obj_dead(obj, region)) {
3613 return false;
3614 }
3615
3616 // If we do not have a complete remembered set for the region, then we can
3617 // not be sure that we have all references to it.
3618 if (!region->rem_set()->is_complete()) {
3619 return false;
3620 }
3621 // Candidate selection must satisfy the following constraints
3622 // while concurrent marking is in progress:
3623 //
3624 // * In order to maintain SATB invariants, an object must not be
3655 // may reduce needed headroom.
3656
3657 return obj->is_typeArray() &&
3658 _g1h->is_potential_eager_reclaim_candidate(region);
3659 }
3660
3661 public:
3662 G1PrepareRegionsClosure(G1CollectedHeap* g1h, G1PrepareEvacuationTask* parent_task) :
3663 _g1h(g1h),
3664 _parent_task(parent_task),
3665 _worker_humongous_total(0),
3666 _worker_humongous_candidates(0) { }
3667
3668 ~G1PrepareRegionsClosure() {
3669 _parent_task->add_humongous_candidates(_worker_humongous_candidates);
3670 _parent_task->add_humongous_total(_worker_humongous_total);
3671 }
3672
3673 virtual bool do_heap_region(HeapRegion* hr) {
3674 // First prepare the region for scanning
3675 _g1h->rem_set()->prepare_region_for_scan(hr);
3676
3677 // Now check if region is a humongous candidate
3678 if (!hr->is_starts_humongous()) {
3679 _g1h->register_region_with_region_attr(hr);
3680 return false;
3681 }
3682
3683 uint index = hr->hrm_index();
3684 if (humongous_region_is_candidate(hr)) {
3685 _g1h->set_humongous_reclaim_candidate(index, true);
3686 _g1h->register_humongous_region_with_region_attr(index);
3687 _worker_humongous_candidates++;
3688 // We will later handle the remembered sets of these regions.
3689 } else {
3690 _g1h->set_humongous_reclaim_candidate(index, false);
3691 _g1h->register_region_with_region_attr(hr);
3692 }
3693 _worker_humongous_total++;
3694
3695 return false;
3696 }
3697 };
3698
3699 G1CollectedHeap* _g1h;
3700 HeapRegionClaimer _claimer;
3701 volatile size_t _humongous_total;
3702 volatile size_t _humongous_candidates;
3703 public:
3704 G1PrepareEvacuationTask(G1CollectedHeap* g1h) :
3705 AbstractGangTask("Prepare Evacuation"),
3706 _g1h(g1h),
3707 _claimer(_g1h->workers()->active_workers()),
3708 _humongous_total(0),
3709 _humongous_candidates(0) { }
3710
3711 ~G1PrepareEvacuationTask() {
3712 _g1h->set_has_humongous_reclaim_candidate(_humongous_candidates > 0);
3713 }
3714
3715 void work(uint worker_id) {
3716 G1PrepareRegionsClosure cl(_g1h, this);
3717 _g1h->heap_region_par_iterate_from_worker_offset(&cl, &_claimer, worker_id);
3718 }
3719
3720 void add_humongous_candidates(size_t candidates) {
3721 Atomic::add(candidates, &_humongous_candidates);
3722 }
3723
3724 void add_humongous_total(size_t total) {
3725 Atomic::add(total, &_humongous_total);
3726 }
3727
3728 size_t humongous_candidates() {
3729 return _humongous_candidates;
|