1538
1539 heuristics()->choose_collection_set(_collection_set);
1540
1541 _free_set->rebuild();
1542 }
1543
1544 if (!is_degenerated_gc_in_progress()) {
1545 prepare_concurrent_roots();
1546 prepare_concurrent_unloading();
1547 }
1548
1549 // If collection set has candidates, start evacuation.
1550 // Otherwise, bypass the rest of the cycle.
1551 if (!collection_set()->is_empty()) {
1552 ShenandoahGCPhase init_evac(ShenandoahPhaseTimings::init_evac);
1553
1554 if (ShenandoahVerify) {
1555 verifier()->verify_before_evacuation();
1556 }
1557
1558 set_evacuation_in_progress(true);
1559 // From here on, we need to update references.
1560 set_has_forwarded_objects(true);
1561
1562 if (!is_degenerated_gc_in_progress()) {
1563 evacuate_and_update_roots();
1564 }
1565
1566 if (ShenandoahPacing) {
1567 pacer()->setup_for_evac();
1568 }
1569
1570 if (ShenandoahVerify) {
1571 ShenandoahRootVerifier::RootTypes types = ShenandoahRootVerifier::None;
1572 if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
1573 types = ShenandoahRootVerifier::combine(ShenandoahRootVerifier::JNIHandleRoots, ShenandoahRootVerifier::WeakRoots);
1574 types = ShenandoahRootVerifier::combine(types, ShenandoahRootVerifier::CLDGRoots);
1575 }
1576
1577 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
1639 update_heap_references(true);
1640 }
1641
1642 void ShenandoahHeap::op_cleanup() {
1643 free_set()->recycle_trash();
1644 }
1645
1646 class ShenandoahConcurrentRootsEvacUpdateTask : public AbstractGangTask {
1647 private:
1648 ShenandoahVMRoots<true /*concurrent*/> _vm_roots;
1649 ShenandoahWeakRoots<true /*concurrent*/> _weak_roots;
1650 ShenandoahClassLoaderDataRoots<true /*concurrent*/, false /*single threaded*/> _cld_roots;
1651
1652 public:
1653 ShenandoahConcurrentRootsEvacUpdateTask() :
1654 AbstractGangTask("Shenandoah Evacuate/Update Concurrent Roots Task") {
1655 }
1656
1657 void work(uint worker_id) {
1658 ShenandoahEvacOOMScope oom;
1659 {
1660 // jni_roots and weak_roots are OopStorage backed roots, concurrent iteration
1661 // may race against OopStorage::release() calls.
1662 ShenandoahEvacUpdateOopStorageRootsClosure cl;
1663 _vm_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
1664 _weak_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
1665 }
1666
1667 {
1668 ShenandoahEvacuateUpdateRootsClosure cl;
1669 CLDToOopClosure clds(&cl, ClassLoaderData::_claim_strong);
1670 _cld_roots.cld_do(&clds);
1671 }
1672 }
1673 };
1674
1675 void ShenandoahHeap::op_roots() {
1676 if (is_concurrent_root_in_progress()) {
1677 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
1678 _unloader.unload();
1679 }
1680
1681 if (ShenandoahConcurrentRoots::should_do_concurrent_roots() && is_concurrent_root_in_progress()) {
1682 ShenandoahConcurrentRootsEvacUpdateTask task;
1683 workers()->run_task(&task);
1684 }
1685 }
1686
1687 set_concurrent_root_in_progress(false);
1688 }
1689
1690 void ShenandoahHeap::op_reset() {
1691 reset_mark_bitmap();
1692 }
1693
1694 void ShenandoahHeap::op_preclean() {
1695 concurrent_mark()->preclean_weak_refs();
1696 }
1697
1698 void ShenandoahHeap::op_init_traversal() {
1699 traversal_gc()->init_traversal_collection();
1700 }
1701
1702 void ShenandoahHeap::op_traversal() {
1703 traversal_gc()->concurrent_traversal_collection();
1704 }
2222 assert((r->is_pinned() && r->pin_count() > 0) || (!r->is_pinned() && r->pin_count() == 0),
2223 "Region " SIZE_FORMAT " pinning status is inconsistent", i);
2224 }
2225 }
2226 #endif
2227
2228 GCTimer* ShenandoahHeap::gc_timer() const {
2229 return _gc_timer;
2230 }
2231
2232 void ShenandoahHeap::prepare_concurrent_roots() {
2233 assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2234 if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
2235 set_concurrent_root_in_progress(true);
2236 }
2237 }
2238
2239 void ShenandoahHeap::prepare_concurrent_unloading() {
2240 assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2241 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
2242 ShenandoahCodeRoots::prepare_concurrent_unloading();
2243 _unloader.prepare();
2244 }
2245 }
2246
2247 void ShenandoahHeap::finish_concurrent_unloading() {
2248 assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2249 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
2250 _unloader.finish();
2251 }
2252 }
2253
2254 #ifdef ASSERT
2255 void ShenandoahHeap::assert_gc_workers(uint nworkers) {
2256 assert(nworkers > 0 && nworkers <= max_workers(), "Sanity");
2257
2258 if (ShenandoahSafepoint::is_at_shenandoah_safepoint()) {
2259 if (UseDynamicNumberOfGCThreads ||
2260 (FLAG_IS_DEFAULT(ParallelGCThreads) && ForceDynamicNumberOfGCThreads)) {
2261 assert(nworkers <= ParallelGCThreads, "Cannot use more than it has");
2262 } else {
|
1538
1539 heuristics()->choose_collection_set(_collection_set);
1540
1541 _free_set->rebuild();
1542 }
1543
1544 if (!is_degenerated_gc_in_progress()) {
1545 prepare_concurrent_roots();
1546 prepare_concurrent_unloading();
1547 }
1548
1549 // If collection set has candidates, start evacuation.
1550 // Otherwise, bypass the rest of the cycle.
1551 if (!collection_set()->is_empty()) {
1552 ShenandoahGCPhase init_evac(ShenandoahPhaseTimings::init_evac);
1553
1554 if (ShenandoahVerify) {
1555 verifier()->verify_before_evacuation();
1556 }
1557
1558 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
1559 ShenandoahCodeRoots::prepare_concurrent_unloading();
1560 }
1561
1562 set_evacuation_in_progress(true);
1563 // From here on, we need to update references.
1564 set_has_forwarded_objects(true);
1565
1566 if (!is_degenerated_gc_in_progress()) {
1567 evacuate_and_update_roots();
1568 }
1569
1570 if (ShenandoahPacing) {
1571 pacer()->setup_for_evac();
1572 }
1573
1574 if (ShenandoahVerify) {
1575 ShenandoahRootVerifier::RootTypes types = ShenandoahRootVerifier::None;
1576 if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
1577 types = ShenandoahRootVerifier::combine(ShenandoahRootVerifier::JNIHandleRoots, ShenandoahRootVerifier::WeakRoots);
1578 types = ShenandoahRootVerifier::combine(types, ShenandoahRootVerifier::CLDGRoots);
1579 }
1580
1581 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
1643 update_heap_references(true);
1644 }
1645
1646 void ShenandoahHeap::op_cleanup() {
1647 free_set()->recycle_trash();
1648 }
1649
1650 class ShenandoahConcurrentRootsEvacUpdateTask : public AbstractGangTask {
1651 private:
1652 ShenandoahVMRoots<true /*concurrent*/> _vm_roots;
1653 ShenandoahWeakRoots<true /*concurrent*/> _weak_roots;
1654 ShenandoahClassLoaderDataRoots<true /*concurrent*/, false /*single threaded*/> _cld_roots;
1655
1656 public:
1657 ShenandoahConcurrentRootsEvacUpdateTask() :
1658 AbstractGangTask("Shenandoah Evacuate/Update Concurrent Roots Task") {
1659 }
1660
1661 void work(uint worker_id) {
1662 ShenandoahEvacOOMScope oom;
1663 ShenandoahEvacUpdateCleanupRootsClosure cl;
1664 {
1665 _vm_roots.oops_do<>(&cl);
1666 _weak_roots.oops_do<>(&cl);
1667 CLDToOopClosure clds(&cl, ClassLoaderData::_claim_strong);
1668 _cld_roots.cld_do(&clds);
1669 }
1670 }
1671 };
1672
1673 void ShenandoahHeap::op_roots() {
1674 if (is_concurrent_root_in_progress()) {
1675 if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
1676 ShenandoahConcurrentRootsEvacUpdateTask task;
1677 workers()->run_task(&task);
1678 }
1679 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
1680 _unloader.unload();
1681 }
1682 }
1683
1684 set_concurrent_root_in_progress(false);
1685 }
1686
1687 void ShenandoahHeap::op_reset() {
1688 reset_mark_bitmap();
1689 }
1690
1691 void ShenandoahHeap::op_preclean() {
1692 concurrent_mark()->preclean_weak_refs();
1693 }
1694
1695 void ShenandoahHeap::op_init_traversal() {
1696 traversal_gc()->init_traversal_collection();
1697 }
1698
1699 void ShenandoahHeap::op_traversal() {
1700 traversal_gc()->concurrent_traversal_collection();
1701 }
2219 assert((r->is_pinned() && r->pin_count() > 0) || (!r->is_pinned() && r->pin_count() == 0),
2220 "Region " SIZE_FORMAT " pinning status is inconsistent", i);
2221 }
2222 }
2223 #endif
2224
2225 GCTimer* ShenandoahHeap::gc_timer() const {
2226 return _gc_timer;
2227 }
2228
2229 void ShenandoahHeap::prepare_concurrent_roots() {
2230 assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2231 if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
2232 set_concurrent_root_in_progress(true);
2233 }
2234 }
2235
2236 void ShenandoahHeap::prepare_concurrent_unloading() {
2237 assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2238 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
2239 _unloader.prepare();
2240 }
2241 }
2242
2243 void ShenandoahHeap::finish_concurrent_unloading() {
2244 assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2245 if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
2246 _unloader.finish();
2247 }
2248 }
2249
2250 #ifdef ASSERT
2251 void ShenandoahHeap::assert_gc_workers(uint nworkers) {
2252 assert(nworkers > 0 && nworkers <= max_workers(), "Sanity");
2253
2254 if (ShenandoahSafepoint::is_at_shenandoah_safepoint()) {
2255 if (UseDynamicNumberOfGCThreads ||
2256 (FLAG_IS_DEFAULT(ParallelGCThreads) && ForceDynamicNumberOfGCThreads)) {
2257 assert(nworkers <= ParallelGCThreads, "Cannot use more than it has");
2258 } else {
|