# HG changeset patch # Parent e3f1f696063f3c4bb12f29aef02058720be8f047 diff -r e3f1f696063f src/hotspot/share/gc/shenandoah/c2/shenandoahBarrierSetC2.cpp --- a/src/hotspot/share/gc/shenandoah/c2/shenandoahBarrierSetC2.cpp Wed Mar 04 19:23:13 2020 +0100 +++ b/src/hotspot/share/gc/shenandoah/c2/shenandoahBarrierSetC2.cpp Mon Mar 23 21:44:46 2020 +0100 @@ -563,9 +563,14 @@ if (on_weak_ref) { // Use the pre-barrier to record the value in the referent field - satb_write_barrier_pre(kit, false /* do_load */, - NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */, - load /* pre_val */, T_OBJECT); + if (ShenandoahAggressiveReferenceDiscovery) { + load = shenandoah_enqueue_barrier(kit, load); + } else { + satb_write_barrier_pre(kit, false /* do_load */, + NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, + NULL /* val_type */, + load /* pre_val */, T_OBJECT); + } // Add memory barrier to prevent commoning reads from this field // across safepoint since GC can change its value. kit->insert_mem_bar(Op_MemBarCPUOrder); diff -r e3f1f696063f src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.cpp --- a/src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.cpp Wed Mar 04 19:23:13 2020 +0100 +++ b/src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.cpp Mon Mar 23 21:44:46 2020 +0100 @@ -863,7 +863,7 @@ } void ShenandoahBarrierC2Support::test_heap_stable(Node*& ctrl, Node* raw_mem, Node*& heap_stable_ctrl, - PhaseIdealLoop* phase) { + PhaseIdealLoop* phase, int flags) { IdealLoopTree* loop = phase->get_loop(ctrl); Node* thread = new ThreadLocalNode(); phase->register_new_node(thread, ctrl); @@ -877,7 +877,7 @@ Node* gc_state = new LoadBNode(ctrl, raw_mem, gc_state_addr, gc_state_adr_type, TypeInt::BYTE, MemNode::unordered); phase->register_new_node(gc_state, ctrl); - Node* heap_stable_and = new AndINode(gc_state, phase->igvn().intcon(ShenandoahHeap::HAS_FORWARDED)); + Node* heap_stable_and = new AndINode(gc_state, phase->igvn().intcon(flags)); phase->register_new_node(heap_stable_and, ctrl); Node* heap_stable_cmp = new CmpINode(heap_stable_and, phase->igvn().zerocon(T_INT)); phase->register_new_node(heap_stable_cmp, ctrl); @@ -891,7 +891,7 @@ ctrl = new IfTrueNode(heap_stable_iff); phase->register_control(ctrl, loop, heap_stable_iff); - assert(is_heap_stable_test(heap_stable_iff), "Should match the shape"); + assert(is_heap_state_test(heap_stable_iff, flags), "Should match the shape"); } void ShenandoahBarrierC2Support::test_null(Node*& ctrl, Node* val, Node*& null_ctrl, PhaseIdealLoop* phase) { @@ -1417,7 +1417,7 @@ Node* raw_mem_phi = PhiNode::make(region, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM); // Stable path. - test_heap_stable(ctrl, raw_mem, heap_stable_ctrl, phase); + test_heap_stable(ctrl, raw_mem, heap_stable_ctrl, phase, ShenandoahHeap::HAS_FORWARDED); IfNode* heap_stable_iff = heap_stable_ctrl->in(0)->as_If(); // Heap stable case @@ -1553,8 +1553,10 @@ Node* barrier = state->enqueue_barrier(i); Node* pre_val = barrier->in(1); - if (phase->igvn().type(pre_val)->higher_equal(TypePtr::NULL_PTR)) { - ShouldNotReachHere(); + assert(!phase->igvn().type(pre_val)->higher_equal(TypePtr::NULL_PTR), "no known-NULLs here"); + + if (((ShenandoahEnqueueBarrierNode*)barrier)->is_redundant()) { + phase->igvn().replace_node(barrier, pre_val); continue; } @@ -1588,7 +1590,7 @@ Node* phi2 = PhiNode::make(region2, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM); // Stable path. - test_heap_stable(ctrl, raw_mem, heap_stable_ctrl, phase); + test_heap_stable(ctrl, raw_mem, heap_stable_ctrl, phase, ShenandoahHeap::TRAVERSAL | ShenandoahHeap::MARKING); region->init_req(_heap_stable, heap_stable_ctrl); phi->init_req(_heap_stable, raw_mem); @@ -2148,6 +2150,84 @@ return NULL; } +bool ShenandoahEnqueueBarrierNode::is_redundant() { + if (!ShenandoahAggressiveReferenceDiscovery) { + return false; + } + Unique_Node_List visited; + Node_Stack stack(0); + stack.push(this, 0); + + while (stack.size() > 0) { + Node* n = stack.node(); + if (visited.member(n)) { + stack.pop(); + continue; + } + visited.push(n); + bool visit_users = false; + switch (n->Opcode()) { + case Op_CallStaticJava: + if (n->as_CallStaticJava()->uncommon_trap_request() == 0) { + return false; + } + break; + case Op_CallDynamicJava: + case Op_CompareAndExchangeN: + case Op_CompareAndExchangeP: + case Op_CompareAndSwapN: + case Op_CompareAndSwapP: + case Op_ShenandoahCompareAndSwapN: + case Op_ShenandoahCompareAndSwapP: + case Op_GetAndSetN: + case Op_GetAndSetP: + case Op_Return: + case Op_StoreN: + case Op_StoreP: + return false; + break; + case Op_AddP: + case Op_Allocate: + case Op_AllocateArray: + case Op_ArrayCopy: + case Op_CmpP: + case Op_LoadL: + case Op_SafePoint: + case Op_StoreLConditional: + case Op_StoreIConditional: + case Op_FastUnlock: + break; + case Op_CastPP: + case Op_CheckCastPP: + case Op_CMoveN: + case Op_CMoveP: + case Op_EncodeP: + case Op_Phi: + case Op_ShenandoahEnqueueBarrier: + visit_users = true; + break; + default: { +#ifdef ASSERT + fatal("Unknown node in is_redundant: %s", NodeClassNames[n->Opcode()]); +#endif + // Default to useful: better to have excess barriers, rather than miss some. + return false; + } + } + + stack.pop(); + if (visit_users) { + for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) { + Node* user = n->fast_out(i); + if (user != NULL) { + stack.push(user, 0); + } + } + } + } + return true; +} + Node* ShenandoahEnqueueBarrierNode::Identity(PhaseGVN* phase) { PhaseIterGVN* igvn = phase->is_IterGVN(); @@ -3206,7 +3286,6 @@ case Op_GetAndAddI: case Op_GetAndAddB: case Op_GetAndAddS: - case Op_ShenandoahEnqueueBarrier: case Op_FastLock: case Op_FastUnlock: case Op_Rethrow: @@ -3284,6 +3363,7 @@ case Op_CMoveP: case Op_Phi: case Op_ShenandoahLoadReferenceBarrier: + case Op_ShenandoahEnqueueBarrier: // Whether or not these need the barriers depends on their users visit_users = true; break; diff -r e3f1f696063f src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.hpp --- a/src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.hpp Wed Mar 04 19:23:13 2020 +0100 +++ b/src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.hpp Mon Mar 23 21:44:46 2020 +0100 @@ -58,7 +58,7 @@ static void follow_barrier_uses(Node* n, Node* ctrl, Unique_Node_List& uses, PhaseIdealLoop* phase); static void test_null(Node*& ctrl, Node* val, Node*& null_ctrl, PhaseIdealLoop* phase); static void test_heap_stable(Node*& ctrl, Node* raw_mem, Node*& heap_stable_ctrl, - PhaseIdealLoop* phase); + PhaseIdealLoop* phase, int flags); static void call_lrb_stub(Node*& ctrl, Node*& val, Node* load_addr, Node*& result_mem, Node* raw_mem, bool is_native, PhaseIdealLoop* phase); static Node* clone_null_check(Node*& c, Node* val, Node* unc_ctrl, PhaseIdealLoop* phase); static void fix_null_check(Node* unc, Node* unc_ctrl, Node* new_unc_ctrl, Unique_Node_List& uses, @@ -96,6 +96,7 @@ Node* Identity(PhaseGVN* phase); int Opcode() const; + bool is_redundant(); private: enum { Needed, NotNeeded, MaybeNeeded }; diff -r e3f1f696063f src/hotspot/share/gc/shenandoah/shenandoahConcurrentMark.cpp --- a/src/hotspot/share/gc/shenandoah/shenandoahConcurrentMark.cpp Wed Mar 04 19:23:13 2020 +0100 +++ b/src/hotspot/share/gc/shenandoah/shenandoahConcurrentMark.cpp Mon Mar 23 21:44:46 2020 +0100 @@ -176,11 +176,14 @@ class ShenandoahSATBThreadsClosure : public ThreadClosure { private: ShenandoahConcMarkSATBBufferClosure* _satb_cl; + OopClosure* _cl; + MarkingCodeBlobClosure* _code_cl; int _thread_parity; public: - ShenandoahSATBThreadsClosure(ShenandoahConcMarkSATBBufferClosure* satb_cl) : + ShenandoahSATBThreadsClosure(ShenandoahConcMarkSATBBufferClosure* satb_cl, OopClosure* cl, MarkingCodeBlobClosure* code_cl) : _satb_cl(satb_cl), + _cl(cl), _code_cl(code_cl), _thread_parity(Threads::thread_claim_parity()) {} void do_thread(Thread* thread) { @@ -188,10 +191,18 @@ if (thread->claim_oops_do(true, _thread_parity)) { JavaThread* jt = (JavaThread*)thread; ShenandoahThreadLocalData::satb_mark_queue(jt).apply_closure_and_empty(_satb_cl); + if (ShenandoahAggressiveReferenceDiscovery) { + ResourceMark rm; + thread->oops_do(_cl, _code_cl); + } } } else if (thread->is_VM_thread()) { if (thread->claim_oops_do(true, _thread_parity)) { ShenandoahBarrierSet::satb_mark_queue_set().shared_satb_queue()->apply_closure_and_empty(_satb_cl); + if (ShenandoahAggressiveReferenceDiscovery) { + ResourceMark rm; + thread->oops_do(_cl, _code_cl); + } } } } @@ -211,6 +222,14 @@ void work(uint worker_id) { ShenandoahHeap* heap = ShenandoahHeap::heap(); + ReferenceProcessor* rp; + if (heap->process_references()) { + rp = heap->ref_processor(); + shenandoah_assert_rp_isalive_installed(); + } else { + rp = NULL; + } + ShenandoahParallelWorkerSession worker_session(worker_id); // First drain remaining SATB buffers. // Notice that this is not strictly necessary for mark-compact. But since @@ -222,16 +241,17 @@ ShenandoahConcMarkSATBBufferClosure cl(q); ShenandoahSATBMarkQueueSet& satb_mq_set = ShenandoahBarrierSet::satb_mark_queue_set(); while (satb_mq_set.apply_closure_to_completed_buffer(&cl)); - ShenandoahSATBThreadsClosure tc(&cl); - Threads::threads_do(&tc); - } - - ReferenceProcessor* rp; - if (heap->process_references()) { - rp = heap->ref_processor(); - shenandoah_assert_rp_isalive_installed(); - } else { - rp = NULL; + if (heap->has_forwarded_objects()) { + ShenandoahMarkResolveRefsClosure resolve_mark_cl(q, rp); + MarkingCodeBlobClosure blobsCl(&resolve_mark_cl, !CodeBlobToOopClosure::FixRelocations); + ShenandoahSATBThreadsClosure tc(&cl, &resolve_mark_cl, &blobsCl); + Threads::threads_do(&tc); + } else { + ShenandoahMarkRefsClosure mark_cl(q, rp); + MarkingCodeBlobClosure blobsCl(&mark_cl, !CodeBlobToOopClosure::FixRelocations); + ShenandoahSATBThreadsClosure tc(&cl, &mark_cl, &blobsCl); + Threads::threads_do(&tc); + } } if (heap->is_degenerated_gc_in_progress()) { diff -r e3f1f696063f src/hotspot/share/gc/shenandoah/shenandoah_globals.hpp --- a/src/hotspot/share/gc/shenandoah/shenandoah_globals.hpp Wed Mar 04 19:23:13 2020 +0100 +++ b/src/hotspot/share/gc/shenandoah/shenandoah_globals.hpp Mon Mar 23 21:44:46 2020 +0100 @@ -151,6 +151,11 @@ "cleanup policy. This minimizes footprint at expense of more " \ "softref churn in applications.") \ \ + experimental(bool, ShenandoahAggressiveReferenceDiscovery, false, \ + "Aggressively avoid keeping alive references upon " \ + "Reference.get(), at the expense of some extra latency, " \ + "caused by an extra threads-stacks-scan at final-mark") \ + \ experimental(bool, ShenandoahUncommit, true, \ "Allow Shenandoah to uncommit unused memory.") \ \