744 Node* in_mem = barrier->in(LoadBarrierNode::Memory);
745 Node* in_val = barrier->in(LoadBarrierNode::Oop);
746 Node* in_adr = barrier->in(LoadBarrierNode::Address);
747
748 Node* out_ctrl = barrier->proj_out(LoadBarrierNode::Control);
749 Node* out_res = barrier->proj_out(LoadBarrierNode::Oop);
750
751 PhaseIterGVN &igvn = phase->igvn();
752
753 if (ZVerifyLoadBarriers) {
754 igvn.replace_node(out_res, in_val);
755 igvn.replace_node(out_ctrl, in_ctrl);
756 return;
757 }
758
759 if (barrier->can_be_eliminated()) {
760 // Clone and pin the load for this barrier below the dominating
761 // barrier: the load cannot be allowed to float above the
762 // dominating barrier
763 Node* load = in_val;
764 Node* decode = NULL;
765 if (load->is_DecodeN()) {
766 decode = load;
767 load = load->in(1);
768 }
769 if (load->is_Load()) {
770 Node* new_load = load->clone();
771 Node* addp = new_load->in(MemNode::Address);
772 assert(addp->is_AddP() || addp->is_Phi(), "bad address");
773 Node* cast = new CastPPNode(addp, igvn.type(addp), true);
774 Node* ctrl = NULL;
775 Node* similar = barrier->in(LoadBarrierNode::Similar);
776 if (similar->is_Phi()) {
777 // already expanded
778 ctrl = similar->in(0);
779 } else {
780 assert(similar->is_Proj() && similar->in(0)->is_LoadBarrier(), "unexpected graph shape");
781 ctrl = similar->in(0)->as_LoadBarrier()->proj_out(LoadBarrierNode::Control);
782 }
783 assert(ctrl != NULL, "bad control");
784 cast->set_req(0, ctrl);
785 igvn.transform(cast);
786 new_load->set_req(MemNode::Address, cast);
787 igvn.transform(new_load);
788
789 Node* new_in_val = new_load;
790 if (decode != NULL) {
791 new_in_val = decode->clone();
792 new_in_val->set_req(1, new_load);
793 igvn.transform(new_in_val);
794 }
795
796 igvn.replace_node(out_res, new_in_val);
797 igvn.replace_node(out_ctrl, in_ctrl);
798 return;
799 }
800 // cannot eliminate
801 }
802
803 // There are two cases that require the basic loadbarrier
804 // 1) When the writeback of a healed oop must be avoided (swap)
805 // 2) When we must guarantee that no reload of is done (swap, cas, cmpx)
806 if (!barrier->is_writeback()) {
807 assert(!barrier->oop_reload_allowed(), "writeback barriers should be marked as requires oop");
808 }
809
810 #ifdef SPARC
811 bool basic_load_barrier = true;
812 #else
813 bool basic_load_barrier = !barrier->oop_reload_allowed();
814 #endif
815
816 if (basic_load_barrier) {
|
744 Node* in_mem = barrier->in(LoadBarrierNode::Memory);
745 Node* in_val = barrier->in(LoadBarrierNode::Oop);
746 Node* in_adr = barrier->in(LoadBarrierNode::Address);
747
748 Node* out_ctrl = barrier->proj_out(LoadBarrierNode::Control);
749 Node* out_res = barrier->proj_out(LoadBarrierNode::Oop);
750
751 PhaseIterGVN &igvn = phase->igvn();
752
753 if (ZVerifyLoadBarriers) {
754 igvn.replace_node(out_res, in_val);
755 igvn.replace_node(out_ctrl, in_ctrl);
756 return;
757 }
758
759 if (barrier->can_be_eliminated()) {
760 // Clone and pin the load for this barrier below the dominating
761 // barrier: the load cannot be allowed to float above the
762 // dominating barrier
763 Node* load = in_val;
764
765 if (load->is_Load()) {
766 Node* new_load = load->clone();
767 Node* addp = new_load->in(MemNode::Address);
768 assert(addp->is_AddP() || addp->is_Phi(), "bad address");
769 Node* cast = new CastPPNode(addp, igvn.type(addp), true);
770 Node* ctrl = NULL;
771 Node* similar = barrier->in(LoadBarrierNode::Similar);
772 if (similar->is_Phi()) {
773 // already expanded
774 ctrl = similar->in(0);
775 } else {
776 assert(similar->is_Proj() && similar->in(0)->is_LoadBarrier(), "unexpected graph shape");
777 ctrl = similar->in(0)->as_LoadBarrier()->proj_out(LoadBarrierNode::Control);
778 }
779 assert(ctrl != NULL, "bad control");
780 cast->set_req(0, ctrl);
781 igvn.transform(cast);
782 new_load->set_req(MemNode::Address, cast);
783 igvn.transform(new_load);
784
785 igvn.replace_node(out_res, new_load);
786 igvn.replace_node(out_ctrl, in_ctrl);
787 return;
788 }
789 // cannot eliminate
790 }
791
792 // There are two cases that require the basic loadbarrier
793 // 1) When the writeback of a healed oop must be avoided (swap)
794 // 2) When we must guarantee that no reload of is done (swap, cas, cmpx)
795 if (!barrier->is_writeback()) {
796 assert(!barrier->oop_reload_allowed(), "writeback barriers should be marked as requires oop");
797 }
798
799 #ifdef SPARC
800 bool basic_load_barrier = true;
801 #else
802 bool basic_load_barrier = !barrier->oop_reload_allowed();
803 #endif
804
805 if (basic_load_barrier) {
|