1689 assert(_global_mark_stack.is_empty(), "Marking should have completed");
1690
1691 {
1692 GCTraceTime(Debug, gc, phases) debug("Weak Processing", _gc_timer_cm);
1693 WeakProcessor::weak_oops_do(&g1_is_alive, &do_nothing_cl);
1694 }
1695
1696 // Unload Klasses, String, Symbols, Code Cache, etc.
1697 if (ClassUnloadingWithConcurrentMark) {
1698 GCTraceTime(Debug, gc, phases) debug("Class Unloading", _gc_timer_cm);
1699 bool purged_classes = SystemDictionary::do_unloading(&g1_is_alive, _gc_timer_cm, false /* Defer cleaning */);
1700 _g1h->complete_cleaning(&g1_is_alive, purged_classes);
1701 } else {
1702 GCTraceTime(Debug, gc, phases) debug("Cleanup", _gc_timer_cm);
1703 // No need to clean string table and symbol table as they are treated as strong roots when
1704 // class unloading is disabled.
1705 _g1h->partial_cleaning(&g1_is_alive, false, false, G1StringDedup::is_enabled());
1706 }
1707 }
1708
1709 // When sampling object counts, we already swapped the mark bitmaps, so we need to use
1710 // the prev bitmap determining liveness.
1711 class G1ObjectCountIsAliveClosure: public BoolObjectClosure {
1712 G1CollectedHeap* _g1h;
1713 public:
1714 G1ObjectCountIsAliveClosure(G1CollectedHeap* g1h) : _g1h(g1h) { }
1715
1716 bool do_object_b(oop obj) {
1717 HeapWord* addr = (HeapWord*)obj;
1718 return addr != NULL &&
1719 (!_g1h->is_in_g1_reserved(addr) || !_g1h->is_obj_dead(obj));
1720 }
1721 };
1722
1723 void G1ConcurrentMark::report_object_count(bool mark_completed) {
1724 // Depending on the completion of the marking liveness needs to be determined
1725 // using either the next or prev bitmap.
1726 if (mark_completed) {
1727 G1ObjectCountIsAliveClosure is_alive(_g1h);
1728 _gc_tracer_cm->report_object_count_after_gc(&is_alive);
|
1689 assert(_global_mark_stack.is_empty(), "Marking should have completed");
1690
1691 {
1692 GCTraceTime(Debug, gc, phases) debug("Weak Processing", _gc_timer_cm);
1693 WeakProcessor::weak_oops_do(&g1_is_alive, &do_nothing_cl);
1694 }
1695
1696 // Unload Klasses, String, Symbols, Code Cache, etc.
1697 if (ClassUnloadingWithConcurrentMark) {
1698 GCTraceTime(Debug, gc, phases) debug("Class Unloading", _gc_timer_cm);
1699 bool purged_classes = SystemDictionary::do_unloading(&g1_is_alive, _gc_timer_cm, false /* Defer cleaning */);
1700 _g1h->complete_cleaning(&g1_is_alive, purged_classes);
1701 } else {
1702 GCTraceTime(Debug, gc, phases) debug("Cleanup", _gc_timer_cm);
1703 // No need to clean string table and symbol table as they are treated as strong roots when
1704 // class unloading is disabled.
1705 _g1h->partial_cleaning(&g1_is_alive, false, false, G1StringDedup::is_enabled());
1706 }
1707 }
1708
1709 class G1PrecleanYieldClosure : public YieldClosure {
1710 G1ConcurrentMark* _cm;
1711
1712 public:
1713 G1PrecleanYieldClosure(G1ConcurrentMark* cm) : _cm(cm) { }
1714
1715 virtual bool should_return() {
1716 return _cm->has_aborted();
1717 }
1718
1719 virtual bool should_return_fine_grain() {
1720 _cm->do_yield_check();
1721 return _cm->has_aborted();
1722 }
1723 };
1724
1725 void G1ConcurrentMark::preclean() {
1726 assert(G1UseReferencePrecleaning, "Precleaning must be enabled.");
1727
1728 SuspendibleThreadSetJoiner joiner;
1729
1730 G1CMKeepAliveAndDrainClosure keep_alive(this, task(0), true /* is_serial */);
1731 G1CMDrainMarkingStackClosure drain_mark_stack(this, task(0), true /* is_serial */);
1732
1733 set_concurrency_and_phase(1, true);
1734
1735 G1PrecleanYieldClosure yield_cl(this);
1736
1737 ReferenceProcessor* rp = _g1h->ref_processor_cm();
1738 // Precleaning is single threaded. Temporarily disable MT discovery.
1739 ReferenceProcessorMTDiscoveryMutator rp_mut_discovery(rp, false);
1740 rp->preclean_discovered_references(rp->is_alive_non_header(),
1741 &keep_alive,
1742 &drain_mark_stack,
1743 &yield_cl,
1744 _gc_timer_cm);
1745 }
1746
1747 // When sampling object counts, we already swapped the mark bitmaps, so we need to use
1748 // the prev bitmap determining liveness.
1749 class G1ObjectCountIsAliveClosure: public BoolObjectClosure {
1750 G1CollectedHeap* _g1h;
1751 public:
1752 G1ObjectCountIsAliveClosure(G1CollectedHeap* g1h) : _g1h(g1h) { }
1753
1754 bool do_object_b(oop obj) {
1755 HeapWord* addr = (HeapWord*)obj;
1756 return addr != NULL &&
1757 (!_g1h->is_in_g1_reserved(addr) || !_g1h->is_obj_dead(obj));
1758 }
1759 };
1760
1761 void G1ConcurrentMark::report_object_count(bool mark_completed) {
1762 // Depending on the completion of the marking liveness needs to be determined
1763 // using either the next or prev bitmap.
1764 if (mark_completed) {
1765 G1ObjectCountIsAliveClosure is_alive(_g1h);
1766 _gc_tracer_cm->report_object_count_after_gc(&is_alive);
|