457 }
458
459 bool LoadBarrierNode::has_true_uses() const {
460 Node* out_res = proj_out_or_null(Oop);
461 if (out_res == NULL) {
462 return false;
463 }
464
465 for (DUIterator_Fast imax, i = out_res->fast_outs(imax); i < imax; i++) {
466 Node* u = out_res->fast_out(i);
467 if (!u->is_LoadBarrier() || u->in(Similar) != out_res) {
468 return true;
469 }
470 }
471
472 return false;
473 }
474
475 // == Accesses ==
476
477 Node* ZBarrierSetC2::make_cas_loadbarrier(C2AtomicAccess& access) const {
478 assert(!UseCompressedOops, "Not allowed");
479 CompareAndSwapNode* cas = (CompareAndSwapNode*)access.raw_access();
480 PhaseGVN& gvn = access.kit()->gvn();
481 Compile* C = Compile::current();
482 GraphKit* kit = access.kit();
483
484 Node* in_ctrl = cas->in(MemNode::Control);
485 Node* in_mem = cas->in(MemNode::Memory);
486 Node* in_adr = cas->in(MemNode::Address);
487 Node* in_val = cas->in(MemNode::ValueIn);
488 Node* in_expected = cas->in(LoadStoreConditionalNode::ExpectedIn);
489
490 float likely = PROB_LIKELY(0.999);
491
492 const TypePtr *adr_type = gvn.type(in_adr)->isa_ptr();
493 Compile::AliasType* alias_type = C->alias_type(adr_type);
494 int alias_idx = C->get_alias_index(adr_type);
495
496 // Outer check - true: continue, false: load and check
497 Node* region = new RegionNode(3);
498 Node* phi = new PhiNode(region, TypeInt::BOOL);
499 Node* phi_mem = new PhiNode(region, Type::MEMORY, adr_type);
500
549 region->set_req(1, then);
550 region->set_req(2, region2);
551 phi->set_req(1, kit->intcon(1));
552 phi->set_req(2, phi2);
553 phi_mem->init_req(2, phi_mem2);
554 kit->set_memory(phi_mem, alias_idx);
555
556 gvn.transform(region2);
557 gvn.transform(phi2);
558 gvn.transform(phi_mem2);
559 gvn.transform(region);
560 gvn.transform(phi);
561 gvn.transform(phi_mem);
562
563 kit->set_control(region);
564 kit->insert_mem_bar(Op_MemBarCPUOrder);
565
566 return phi;
567 }
568
569 Node* ZBarrierSetC2::make_cmpx_loadbarrier(C2AtomicAccess& access) const {
570 CompareAndExchangePNode* cmpx = (CompareAndExchangePNode*)access.raw_access();
571 GraphKit* kit = access.kit();
572 PhaseGVN& gvn = kit->gvn();
573 Compile* C = Compile::current();
574
575 Node* in_ctrl = cmpx->in(MemNode::Control);
576 Node* in_mem = cmpx->in(MemNode::Memory);
577 Node* in_adr = cmpx->in(MemNode::Address);
578 Node* in_val = cmpx->in(MemNode::ValueIn);
579 Node* in_expected = cmpx->in(LoadStoreConditionalNode::ExpectedIn);
580
581 float likely = PROB_LIKELY(0.999);
582
583 const TypePtr *adr_type = cmpx->get_ptr_type();
584 Compile::AliasType* alias_type = C->alias_type(adr_type);
585 int alias_idx = C->get_alias_index(adr_type);
586
587 // Outer check - true: continue, false: load and check
588 Node* region = new RegionNode(3);
589 Node* phi = new PhiNode(region, adr_type);
648 }
649
650 Node* ZBarrierSetC2::load_barrier(GraphKit* kit, Node* val, Node* adr, bool weak, bool writeback, bool oop_reload_allowed) const {
651 PhaseGVN& gvn = kit->gvn();
652 Node* barrier = new LoadBarrierNode(Compile::current(), kit->control(), kit->memory(TypeRawPtr::BOTTOM), val, adr, weak, writeback, oop_reload_allowed);
653 Node* transformed_barrier = gvn.transform(barrier);
654
655 if (transformed_barrier->is_LoadBarrier()) {
656 if (barrier == transformed_barrier) {
657 kit->set_control(gvn.transform(new ProjNode(barrier, LoadBarrierNode::Control)));
658 }
659 Node* result = gvn.transform(new ProjNode(transformed_barrier, LoadBarrierNode::Oop));
660 assert(is_gc_barrier_node(result), "sanity");
661 assert(step_over_gc_barrier(result) == val, "sanity");
662 return result;
663 } else {
664 return val;
665 }
666 }
667
668 static bool barrier_needed(C2Access access) {
669 return ZBarrierSet::barrier_needed(access.decorators(), access.type());
670 }
671
672 Node* ZBarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) const {
673 Node* p = BarrierSetC2::load_at_resolved(access, val_type);
674 if (!barrier_needed(access)) {
675 return p;
676 }
677
678 bool weak = (access.decorators() & ON_WEAK_OOP_REF) != 0;
679
680 GraphKit* kit = access.kit();
681 PhaseGVN& gvn = kit->gvn();
682 Node* adr = access.addr().node();
683 Node* heap_base_oop = access.base();
684 bool unsafe = (access.decorators() & C2_UNSAFE_ACCESS) != 0;
685 if (unsafe) {
686 if (!ZVerifyLoadBarriers) {
687 p = load_barrier(kit, p, adr);
688 } else {
689 if (!TypePtr::NULL_PTR->higher_equal(gvn.type(heap_base_oop))) {
690 p = load_barrier(kit, p, adr);
691 } else {
692 IdealKit ideal(kit);
693 IdealVariable res(ideal);
694 #define __ ideal.
695 __ declarations_done();
696 __ set(res, p);
697 __ if_then(heap_base_oop, BoolTest::ne, kit->null(), PROB_UNLIKELY(0.999)); {
698 kit->sync_kit(ideal);
699 p = load_barrier(kit, p, adr);
700 __ set(res, p);
701 __ sync_kit(kit);
702 } __ end_if();
703 kit->final_sync(ideal);
704 p = __ value(res);
705 #undef __
706 }
707 }
708 return p;
709 } else {
710 return load_barrier(access.kit(), p, access.addr().node(), weak, true, true);
711 }
712 }
713
714 Node* ZBarrierSetC2::atomic_cmpxchg_val_at_resolved(C2AtomicAccess& access, Node* expected_val,
715 Node* new_val, const Type* val_type) const {
716 Node* result = BarrierSetC2::atomic_cmpxchg_val_at_resolved(access, expected_val, new_val, val_type);
717 if (!barrier_needed(access)) {
718 return result;
719 }
720
721 access.set_needs_pinning(false);
722 return make_cmpx_loadbarrier(access);
723 }
724
725 Node* ZBarrierSetC2::atomic_cmpxchg_bool_at_resolved(C2AtomicAccess& access, Node* expected_val,
726 Node* new_val, const Type* value_type) const {
727 Node* result = BarrierSetC2::atomic_cmpxchg_bool_at_resolved(access, expected_val, new_val, value_type);
728 if (!barrier_needed(access)) {
729 return result;
730 }
731
732 Node* load_store = access.raw_access();
733 bool weak_cas = (access.decorators() & C2_WEAK_CMPXCHG) != 0;
734 bool expected_is_null = (expected_val->get_ptr_type() == TypePtr::NULL_PTR);
735
736 if (!expected_is_null) {
737 if (weak_cas) {
738 access.set_needs_pinning(false);
739 load_store = make_cas_loadbarrier(access);
740 } else {
741 access.set_needs_pinning(false);
742 load_store = make_cas_loadbarrier(access);
743 }
744 }
745
746 return load_store;
747 }
748
749 Node* ZBarrierSetC2::atomic_xchg_at_resolved(C2AtomicAccess& access, Node* new_val, const Type* val_type) const {
750 Node* result = BarrierSetC2::atomic_xchg_at_resolved(access, new_val, val_type);
751 if (!barrier_needed(access)) {
752 return result;
753 }
754
755 Node* load_store = access.raw_access();
756 Node* adr = access.addr().node();
757
758 return load_barrier(access.kit(), load_store, adr, false, false, false);
759 }
760
761 // == Macro Expansion ==
762
763 void ZBarrierSetC2::expand_loadbarrier_node(PhaseMacroExpand* phase, LoadBarrierNode* barrier) const {
764 Node* in_ctrl = barrier->in(LoadBarrierNode::Control);
765 Node* in_mem = barrier->in(LoadBarrierNode::Memory);
766 Node* in_val = barrier->in(LoadBarrierNode::Oop);
767 Node* in_adr = barrier->in(LoadBarrierNode::Address);
768
769 Node* out_ctrl = barrier->proj_out(LoadBarrierNode::Control);
770 Node* out_res = barrier->proj_out(LoadBarrierNode::Oop);
771
772 PhaseIterGVN &igvn = phase->igvn();
773
774 if (ZVerifyLoadBarriers) {
775 igvn.replace_node(out_res, in_val);
776 igvn.replace_node(out_ctrl, in_ctrl);
777 return;
778 }
|
457 }
458
459 bool LoadBarrierNode::has_true_uses() const {
460 Node* out_res = proj_out_or_null(Oop);
461 if (out_res == NULL) {
462 return false;
463 }
464
465 for (DUIterator_Fast imax, i = out_res->fast_outs(imax); i < imax; i++) {
466 Node* u = out_res->fast_out(i);
467 if (!u->is_LoadBarrier() || u->in(Similar) != out_res) {
468 return true;
469 }
470 }
471
472 return false;
473 }
474
475 // == Accesses ==
476
477 Node* ZBarrierSetC2::make_cas_loadbarrier(C2AtomicParseAccess& access) const {
478 assert(!UseCompressedOops, "Not allowed");
479 CompareAndSwapNode* cas = (CompareAndSwapNode*)access.raw_access();
480 PhaseGVN& gvn = access.gvn();
481 Compile* C = Compile::current();
482 GraphKit* kit = access.kit();
483
484 Node* in_ctrl = cas->in(MemNode::Control);
485 Node* in_mem = cas->in(MemNode::Memory);
486 Node* in_adr = cas->in(MemNode::Address);
487 Node* in_val = cas->in(MemNode::ValueIn);
488 Node* in_expected = cas->in(LoadStoreConditionalNode::ExpectedIn);
489
490 float likely = PROB_LIKELY(0.999);
491
492 const TypePtr *adr_type = gvn.type(in_adr)->isa_ptr();
493 Compile::AliasType* alias_type = C->alias_type(adr_type);
494 int alias_idx = C->get_alias_index(adr_type);
495
496 // Outer check - true: continue, false: load and check
497 Node* region = new RegionNode(3);
498 Node* phi = new PhiNode(region, TypeInt::BOOL);
499 Node* phi_mem = new PhiNode(region, Type::MEMORY, adr_type);
500
549 region->set_req(1, then);
550 region->set_req(2, region2);
551 phi->set_req(1, kit->intcon(1));
552 phi->set_req(2, phi2);
553 phi_mem->init_req(2, phi_mem2);
554 kit->set_memory(phi_mem, alias_idx);
555
556 gvn.transform(region2);
557 gvn.transform(phi2);
558 gvn.transform(phi_mem2);
559 gvn.transform(region);
560 gvn.transform(phi);
561 gvn.transform(phi_mem);
562
563 kit->set_control(region);
564 kit->insert_mem_bar(Op_MemBarCPUOrder);
565
566 return phi;
567 }
568
569 Node* ZBarrierSetC2::make_cmpx_loadbarrier(C2AtomicParseAccess& access) const {
570 CompareAndExchangePNode* cmpx = (CompareAndExchangePNode*)access.raw_access();
571 GraphKit* kit = access.kit();
572 PhaseGVN& gvn = kit->gvn();
573 Compile* C = Compile::current();
574
575 Node* in_ctrl = cmpx->in(MemNode::Control);
576 Node* in_mem = cmpx->in(MemNode::Memory);
577 Node* in_adr = cmpx->in(MemNode::Address);
578 Node* in_val = cmpx->in(MemNode::ValueIn);
579 Node* in_expected = cmpx->in(LoadStoreConditionalNode::ExpectedIn);
580
581 float likely = PROB_LIKELY(0.999);
582
583 const TypePtr *adr_type = cmpx->get_ptr_type();
584 Compile::AliasType* alias_type = C->alias_type(adr_type);
585 int alias_idx = C->get_alias_index(adr_type);
586
587 // Outer check - true: continue, false: load and check
588 Node* region = new RegionNode(3);
589 Node* phi = new PhiNode(region, adr_type);
648 }
649
650 Node* ZBarrierSetC2::load_barrier(GraphKit* kit, Node* val, Node* adr, bool weak, bool writeback, bool oop_reload_allowed) const {
651 PhaseGVN& gvn = kit->gvn();
652 Node* barrier = new LoadBarrierNode(Compile::current(), kit->control(), kit->memory(TypeRawPtr::BOTTOM), val, adr, weak, writeback, oop_reload_allowed);
653 Node* transformed_barrier = gvn.transform(barrier);
654
655 if (transformed_barrier->is_LoadBarrier()) {
656 if (barrier == transformed_barrier) {
657 kit->set_control(gvn.transform(new ProjNode(barrier, LoadBarrierNode::Control)));
658 }
659 Node* result = gvn.transform(new ProjNode(transformed_barrier, LoadBarrierNode::Oop));
660 assert(is_gc_barrier_node(result), "sanity");
661 assert(step_over_gc_barrier(result) == val, "sanity");
662 return result;
663 } else {
664 return val;
665 }
666 }
667
668 static bool barrier_needed(C2Access& access) {
669 return ZBarrierSet::barrier_needed(access.decorators(), access.type());
670 }
671
672 Node* ZBarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) const {
673 Node* p = BarrierSetC2::load_at_resolved(access, val_type);
674 if (!barrier_needed(access)) {
675 return p;
676 }
677
678 bool weak = (access.decorators() & ON_WEAK_OOP_REF) != 0;
679
680 assert(access.is_parse_access(), "entry not supported at optimization time");
681 C2ParseAccess& parse_access = static_cast<C2ParseAccess&>(access);
682 GraphKit* kit = parse_access.kit();
683 PhaseGVN& gvn = kit->gvn();
684 Node* adr = access.addr().node();
685 Node* heap_base_oop = access.base();
686 bool unsafe = (access.decorators() & C2_UNSAFE_ACCESS) != 0;
687 if (unsafe) {
688 if (!ZVerifyLoadBarriers) {
689 p = load_barrier(kit, p, adr);
690 } else {
691 if (!TypePtr::NULL_PTR->higher_equal(gvn.type(heap_base_oop))) {
692 p = load_barrier(kit, p, adr);
693 } else {
694 IdealKit ideal(kit);
695 IdealVariable res(ideal);
696 #define __ ideal.
697 __ declarations_done();
698 __ set(res, p);
699 __ if_then(heap_base_oop, BoolTest::ne, kit->null(), PROB_UNLIKELY(0.999)); {
700 kit->sync_kit(ideal);
701 p = load_barrier(kit, p, adr);
702 __ set(res, p);
703 __ sync_kit(kit);
704 } __ end_if();
705 kit->final_sync(ideal);
706 p = __ value(res);
707 #undef __
708 }
709 }
710 return p;
711 } else {
712 return load_barrier(parse_access.kit(), p, access.addr().node(), weak, true, true);
713 }
714 }
715
716 Node* ZBarrierSetC2::atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
717 Node* new_val, const Type* val_type) const {
718 Node* result = BarrierSetC2::atomic_cmpxchg_val_at_resolved(access, expected_val, new_val, val_type);
719 if (!barrier_needed(access)) {
720 return result;
721 }
722
723 access.set_needs_pinning(false);
724 return make_cmpx_loadbarrier(access);
725 }
726
727 Node* ZBarrierSetC2::atomic_cmpxchg_bool_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
728 Node* new_val, const Type* value_type) const {
729 Node* result = BarrierSetC2::atomic_cmpxchg_bool_at_resolved(access, expected_val, new_val, value_type);
730 if (!barrier_needed(access)) {
731 return result;
732 }
733
734 Node* load_store = access.raw_access();
735 bool weak_cas = (access.decorators() & C2_WEAK_CMPXCHG) != 0;
736 bool expected_is_null = (expected_val->get_ptr_type() == TypePtr::NULL_PTR);
737
738 if (!expected_is_null) {
739 if (weak_cas) {
740 access.set_needs_pinning(false);
741 load_store = make_cas_loadbarrier(access);
742 } else {
743 access.set_needs_pinning(false);
744 load_store = make_cas_loadbarrier(access);
745 }
746 }
747
748 return load_store;
749 }
750
751 Node* ZBarrierSetC2::atomic_xchg_at_resolved(C2AtomicParseAccess& access, Node* new_val, const Type* val_type) const {
752 Node* result = BarrierSetC2::atomic_xchg_at_resolved(access, new_val, val_type);
753 if (!barrier_needed(access)) {
754 return result;
755 }
756
757 Node* load_store = access.raw_access();
758 Node* adr = access.addr().node();
759
760 assert(access.is_parse_access(), "entry not supported at optimization time");
761 C2ParseAccess& parse_access = static_cast<C2ParseAccess&>(access);
762 return load_barrier(parse_access.kit(), load_store, adr, false, false, false);
763 }
764
765 // == Macro Expansion ==
766
767 void ZBarrierSetC2::expand_loadbarrier_node(PhaseMacroExpand* phase, LoadBarrierNode* barrier) const {
768 Node* in_ctrl = barrier->in(LoadBarrierNode::Control);
769 Node* in_mem = barrier->in(LoadBarrierNode::Memory);
770 Node* in_val = barrier->in(LoadBarrierNode::Oop);
771 Node* in_adr = barrier->in(LoadBarrierNode::Address);
772
773 Node* out_ctrl = barrier->proj_out(LoadBarrierNode::Control);
774 Node* out_res = barrier->proj_out(LoadBarrierNode::Oop);
775
776 PhaseIterGVN &igvn = phase->igvn();
777
778 if (ZVerifyLoadBarriers) {
779 igvn.replace_node(out_res, in_val);
780 igvn.replace_node(out_ctrl, in_ctrl);
781 return;
782 }
|