597 r = _root_regions_iterator.claim_next();
598 }
599
600 if (check_and_handle_cancelled_gc(terminator)) return;
601
602 // Normal loop.
603 q = queues->queue(worker_id);
604
605 ShenandoahTraversalSATBBufferClosure drain_satb(q);
606 SATBMarkQueueSet& satb_mq_set = ShenandoahBarrierSet::satb_mark_queue_set();
607
608 int seed = 17;
609
610 while (true) {
611 if (check_and_handle_cancelled_gc(terminator)) return;
612
613 while (satb_mq_set.completed_buffers_num() > 0) {
614 satb_mq_set.apply_closure_to_completed_buffer(&drain_satb);
615 }
616
617 if (_arraycopy_task_queue.length() > 0) {
618 process_arraycopy_task(cl);
619 }
620
621 uint work = 0;
622 for (uint i = 0; i < stride; i++) {
623 if (q->pop_buffer(task) ||
624 q->pop_local(task) ||
625 q->pop_overflow(task) ||
626 queues->steal(worker_id, &seed, task)) {
627 conc_mark->do_task<T>(q, cl, live_data, &task);
628 work++;
629 } else {
630 break;
631 }
632 }
633
634 if (work == 0 &&
635 _arraycopy_task_queue.length() == 0) {
636 // No more work, try to terminate
637 ShenandoahEvacOOMScopeLeaver oom_scope_leaver;
638 ShenandoahTerminationTimingsTracker term_tracker(worker_id);
639 if (terminator->offer_termination()) return;
640 }
641 }
642 }
643
644 bool ShenandoahTraversalGC::check_and_handle_cancelled_gc(ParallelTaskTerminator* terminator) {
645 if (_heap->cancelled_gc()) {
646 ShenandoahCancelledTerminatorTerminator tt;
647 ShenandoahEvacOOMScopeLeaver oom_scope_leaver;
648 while (! terminator->offer_termination(&tt));
649 return true;
650 }
651 return false;
652 }
653
654 void ShenandoahTraversalGC::concurrent_traversal_collection() {
655 ClassLoaderDataGraph::clear_claimed_marks();
709
710 if (!_heap->cancelled_gc() && _heap->process_references()) {
711 weak_refs_work();
712 }
713
714 if (!_heap->cancelled_gc() && _heap->unload_classes()) {
715 _heap->unload_classes_and_cleanup_tables(false);
716 fixup_roots();
717 }
718
719 if (!_heap->cancelled_gc()) {
720 assert(_task_queues->is_empty(), "queues must be empty after traversal GC");
721 TASKQUEUE_STATS_ONLY(_task_queues->print_taskqueue_stats());
722 TASKQUEUE_STATS_ONLY(_task_queues->reset_taskqueue_stats());
723
724 // Still good? We can now trash the cset, and make final verification
725 {
726 ShenandoahGCPhase phase_cleanup(ShenandoahPhaseTimings::traversal_gc_cleanup);
727 ShenandoahHeapLocker lock(_heap->lock());
728
729 assert(_arraycopy_task_queue.length() == 0, "arraycopy tasks must be done");
730
731 // Trash everything
732 // Clear immediate garbage regions.
733 size_t num_regions = _heap->num_regions();
734
735 ShenandoahHeapRegionSet* traversal_regions = traversal_set();
736 ShenandoahFreeSet* free_regions = _heap->free_set();
737 ShenandoahMarkingContext* const ctx = _heap->next_marking_context();
738 free_regions->clear();
739 for (size_t i = 0; i < num_regions; i++) {
740 ShenandoahHeapRegion* r = _heap->get_region(i);
741 bool not_allocated = ctx->top_at_mark_start(r->region_number()) == r->top();
742
743 bool candidate = traversal_regions->is_in(r) && !r->has_live() && not_allocated;
744 if (r->is_humongous_start() && candidate) {
745 // Trash humongous.
746 HeapWord* humongous_obj = r->bottom() + BrooksPointer::word_size();
747 assert(!ctx->is_marked(oop(humongous_obj)), "must not be marked");
748 r->make_trash();
749 while (i + 1 < num_regions && _heap->get_region(i + 1)->is_humongous_continuation()) {
750 i++;
810 CLDToOopClosure cldCl(&cl);
811 _rp->process_all_roots(&cl, &cl, &cldCl, &blobsCl, NULL, worker_id);
812 }
813 };
814
815 void ShenandoahTraversalGC::fixup_roots() {
816 #if defined(COMPILER2) || INCLUDE_JVMCI
817 DerivedPointerTable::clear();
818 #endif
819 ShenandoahHeap* heap = ShenandoahHeap::heap();
820 ShenandoahRootProcessor rp(heap, heap->workers()->active_workers(), ShenandoahPhaseTimings::final_traversal_update_roots);
821 ShenandoahTraversalFixRootsTask update_roots_task(&rp);
822 heap->workers()->run_task(&update_roots_task);
823 #if defined(COMPILER2) || INCLUDE_JVMCI
824 DerivedPointerTable::update_pointers();
825 #endif
826 }
827
828 void ShenandoahTraversalGC::reset() {
829 _task_queues->clear();
830 _arraycopy_task_queue.clear();
831 }
832
833 ShenandoahObjToScanQueueSet* ShenandoahTraversalGC::task_queues() {
834 return _task_queues;
835 }
836
837 jushort* ShenandoahTraversalGC::get_liveness(uint worker_id) {
838 return _liveness_local[worker_id];
839 }
840
841 class ShenandoahTraversalCancelledGCYieldClosure : public YieldClosure {
842 private:
843 ShenandoahHeap* const _heap;
844 public:
845 ShenandoahTraversalCancelledGCYieldClosure() : _heap(ShenandoahHeap::heap()) {};
846 virtual bool should_return() { return _heap->cancelled_gc(); }
847 };
848
849 class ShenandoahTraversalPrecleanCompleteGCClosure : public VoidClosure {
850 public:
1183 } else {
1184 if (!_heap->is_degenerated_gc_in_progress()) {
1185 ShenandoahTraversalKeepAliveUpdateClosure keep_alive(task_queues()->queue(serial_worker_id));
1186 rp->process_discovered_references(&is_alive, &keep_alive,
1187 &complete_gc, &executor,
1188 &pt);
1189 pt.print_all_references();
1190 WeakProcessor::weak_oops_do(&is_alive, &keep_alive);
1191 } else {
1192 ShenandoahTraversalKeepAliveUpdateDegenClosure keep_alive(task_queues()->queue(serial_worker_id));
1193 rp->process_discovered_references(&is_alive, &keep_alive,
1194 &complete_gc, &executor,
1195 &pt);
1196 pt.print_all_references();
1197 WeakProcessor::weak_oops_do(&is_alive, &keep_alive);
1198 }
1199 }
1200
1201 assert(!_heap->cancelled_gc() || task_queues()->is_empty(), "Should be empty");
1202 }
1203 }
1204
1205 void ShenandoahTraversalGC::push_arraycopy(HeapWord* start, size_t count) {
1206 _arraycopy_task_queue.push(start, count);
1207 }
1208
1209 template <class T>
1210 bool ShenandoahTraversalGC::process_arraycopy_task(T* cl) {
1211 ShenandoahArrayCopyTask task = _arraycopy_task_queue.pop();
1212 if (task.start() == NULL) {
1213 return false;
1214 }
1215 if (task.count() == 0) {
1216 // Handle clone.
1217 oop obj = oop(task.start());
1218 obj->oop_iterate(cl);
1219 } else {
1220 HeapWord* array = task.start();
1221 size_t count = task.count();
1222 if (UseCompressedOops) {
1223 narrowOop* p = reinterpret_cast<narrowOop*>(array);
1224 for (size_t i = 0; i < count; i++) {
1225 cl->do_oop(p++);
1226 }
1227 } else {
1228 oop* p = reinterpret_cast<oop*>(array);
1229 for (size_t i = 0; i < count; i++) {
1230 cl->do_oop(p++);
1231 }
1232 }
1233 }
1234 return true;
1235 }
|
597 r = _root_regions_iterator.claim_next();
598 }
599
600 if (check_and_handle_cancelled_gc(terminator)) return;
601
602 // Normal loop.
603 q = queues->queue(worker_id);
604
605 ShenandoahTraversalSATBBufferClosure drain_satb(q);
606 SATBMarkQueueSet& satb_mq_set = ShenandoahBarrierSet::satb_mark_queue_set();
607
608 int seed = 17;
609
610 while (true) {
611 if (check_and_handle_cancelled_gc(terminator)) return;
612
613 while (satb_mq_set.completed_buffers_num() > 0) {
614 satb_mq_set.apply_closure_to_completed_buffer(&drain_satb);
615 }
616
617 uint work = 0;
618 for (uint i = 0; i < stride; i++) {
619 if (q->pop_buffer(task) ||
620 q->pop_local(task) ||
621 q->pop_overflow(task) ||
622 queues->steal(worker_id, &seed, task)) {
623 conc_mark->do_task<T>(q, cl, live_data, &task);
624 work++;
625 } else {
626 break;
627 }
628 }
629
630 if (work == 0) {
631 // No more work, try to terminate
632 ShenandoahEvacOOMScopeLeaver oom_scope_leaver;
633 ShenandoahTerminationTimingsTracker term_tracker(worker_id);
634 if (terminator->offer_termination()) return;
635 }
636 }
637 }
638
639 bool ShenandoahTraversalGC::check_and_handle_cancelled_gc(ParallelTaskTerminator* terminator) {
640 if (_heap->cancelled_gc()) {
641 ShenandoahCancelledTerminatorTerminator tt;
642 ShenandoahEvacOOMScopeLeaver oom_scope_leaver;
643 while (! terminator->offer_termination(&tt));
644 return true;
645 }
646 return false;
647 }
648
649 void ShenandoahTraversalGC::concurrent_traversal_collection() {
650 ClassLoaderDataGraph::clear_claimed_marks();
704
705 if (!_heap->cancelled_gc() && _heap->process_references()) {
706 weak_refs_work();
707 }
708
709 if (!_heap->cancelled_gc() && _heap->unload_classes()) {
710 _heap->unload_classes_and_cleanup_tables(false);
711 fixup_roots();
712 }
713
714 if (!_heap->cancelled_gc()) {
715 assert(_task_queues->is_empty(), "queues must be empty after traversal GC");
716 TASKQUEUE_STATS_ONLY(_task_queues->print_taskqueue_stats());
717 TASKQUEUE_STATS_ONLY(_task_queues->reset_taskqueue_stats());
718
719 // Still good? We can now trash the cset, and make final verification
720 {
721 ShenandoahGCPhase phase_cleanup(ShenandoahPhaseTimings::traversal_gc_cleanup);
722 ShenandoahHeapLocker lock(_heap->lock());
723
724 // Trash everything
725 // Clear immediate garbage regions.
726 size_t num_regions = _heap->num_regions();
727
728 ShenandoahHeapRegionSet* traversal_regions = traversal_set();
729 ShenandoahFreeSet* free_regions = _heap->free_set();
730 ShenandoahMarkingContext* const ctx = _heap->next_marking_context();
731 free_regions->clear();
732 for (size_t i = 0; i < num_regions; i++) {
733 ShenandoahHeapRegion* r = _heap->get_region(i);
734 bool not_allocated = ctx->top_at_mark_start(r->region_number()) == r->top();
735
736 bool candidate = traversal_regions->is_in(r) && !r->has_live() && not_allocated;
737 if (r->is_humongous_start() && candidate) {
738 // Trash humongous.
739 HeapWord* humongous_obj = r->bottom() + BrooksPointer::word_size();
740 assert(!ctx->is_marked(oop(humongous_obj)), "must not be marked");
741 r->make_trash();
742 while (i + 1 < num_regions && _heap->get_region(i + 1)->is_humongous_continuation()) {
743 i++;
803 CLDToOopClosure cldCl(&cl);
804 _rp->process_all_roots(&cl, &cl, &cldCl, &blobsCl, NULL, worker_id);
805 }
806 };
807
808 void ShenandoahTraversalGC::fixup_roots() {
809 #if defined(COMPILER2) || INCLUDE_JVMCI
810 DerivedPointerTable::clear();
811 #endif
812 ShenandoahHeap* heap = ShenandoahHeap::heap();
813 ShenandoahRootProcessor rp(heap, heap->workers()->active_workers(), ShenandoahPhaseTimings::final_traversal_update_roots);
814 ShenandoahTraversalFixRootsTask update_roots_task(&rp);
815 heap->workers()->run_task(&update_roots_task);
816 #if defined(COMPILER2) || INCLUDE_JVMCI
817 DerivedPointerTable::update_pointers();
818 #endif
819 }
820
821 void ShenandoahTraversalGC::reset() {
822 _task_queues->clear();
823 }
824
825 ShenandoahObjToScanQueueSet* ShenandoahTraversalGC::task_queues() {
826 return _task_queues;
827 }
828
829 jushort* ShenandoahTraversalGC::get_liveness(uint worker_id) {
830 return _liveness_local[worker_id];
831 }
832
833 class ShenandoahTraversalCancelledGCYieldClosure : public YieldClosure {
834 private:
835 ShenandoahHeap* const _heap;
836 public:
837 ShenandoahTraversalCancelledGCYieldClosure() : _heap(ShenandoahHeap::heap()) {};
838 virtual bool should_return() { return _heap->cancelled_gc(); }
839 };
840
841 class ShenandoahTraversalPrecleanCompleteGCClosure : public VoidClosure {
842 public:
1175 } else {
1176 if (!_heap->is_degenerated_gc_in_progress()) {
1177 ShenandoahTraversalKeepAliveUpdateClosure keep_alive(task_queues()->queue(serial_worker_id));
1178 rp->process_discovered_references(&is_alive, &keep_alive,
1179 &complete_gc, &executor,
1180 &pt);
1181 pt.print_all_references();
1182 WeakProcessor::weak_oops_do(&is_alive, &keep_alive);
1183 } else {
1184 ShenandoahTraversalKeepAliveUpdateDegenClosure keep_alive(task_queues()->queue(serial_worker_id));
1185 rp->process_discovered_references(&is_alive, &keep_alive,
1186 &complete_gc, &executor,
1187 &pt);
1188 pt.print_all_references();
1189 WeakProcessor::weak_oops_do(&is_alive, &keep_alive);
1190 }
1191 }
1192
1193 assert(!_heap->cancelled_gc() || task_queues()->is_empty(), "Should be empty");
1194 }
1195 }
|