725 //=============================================================================
726 // Should LoadNode::Ideal() attempt to remove control edges?
727 bool LoadNode::can_remove_control() const {
728 return true;
729 }
730 uint LoadNode::size_of() const { return sizeof(*this); }
731 uint LoadNode::cmp( const Node &n ) const
732 { return !Type::cmp( _type, ((LoadNode&)n)._type ); }
733 const Type *LoadNode::bottom_type() const { return _type; }
734 uint LoadNode::ideal_reg() const {
735 return _type->ideal_reg();
736 }
737
738 #ifndef PRODUCT
739 void LoadNode::dump_spec(outputStream *st) const {
740 MemNode::dump_spec(st);
741 if( !Verbose && !WizardMode ) {
742 // standard dump does this in Verbose and WizardMode
743 st->print(" #"); _type->dump_on(st);
744 }
745 if (!_depends_only_on_test) {
746 st->print(" (does not depend only on test)");
747 }
748 }
749 #endif
750
751 #ifdef ASSERT
752 //----------------------------is_immutable_value-------------------------------
753 // Helper function to allow a raw load without control edge for some cases
754 bool LoadNode::is_immutable_value(Node* adr) {
755 return (adr->is_AddP() && adr->in(AddPNode::Base)->is_top() &&
756 adr->in(AddPNode::Address)->Opcode() == Op_ThreadLocal &&
757 (adr->in(AddPNode::Offset)->find_intptr_t_con(-1) ==
758 in_bytes(JavaThread::osthread_offset())));
759 }
760 #endif
761
762 //----------------------------LoadNode::make-----------------------------------
763 // Polymorphic factory method:
764 Node *LoadNode::make(PhaseGVN& gvn, Node *ctl, Node *mem, Node *adr, const TypePtr* adr_type, const Type *rt, BasicType bt, MemOrd mo,
765 ControlDependency control_dependency, bool unaligned, bool mismatched) {
897 BasicType ary_elem = ary_t->klass()->as_array_klass()->element_type()->basic_type();
898 uint header = arrayOopDesc::base_offset_in_bytes(ary_elem);
899 uint shift = exact_log2(type2aelembytes(ary_elem));
900
901 Node* diff = phase->transform(new SubINode(ac->in(ArrayCopyNode::SrcPos), ac->in(ArrayCopyNode::DestPos)));
902 #ifdef _LP64
903 diff = phase->transform(new ConvI2LNode(diff));
904 #endif
905 diff = phase->transform(new LShiftXNode(diff, phase->intcon(shift)));
906
907 Node* offset = phase->transform(new AddXNode(addp->in(AddPNode::Offset), diff));
908 addp->set_req(AddPNode::Offset, offset);
909 ld->set_req(MemNode::Address, phase->transform(addp));
910
911 if (in(0) != NULL) {
912 assert(ac->in(0) != NULL, "alloc must have control");
913 ld->set_req(0, ac->in(0));
914 }
915 }
916 // load depends on the tests that validate the arraycopy
917 ld->as_Load()->_depends_only_on_test = Pinned;
918 return ld;
919 }
920 return NULL;
921 }
922
923
924 //---------------------------can_see_stored_value------------------------------
925 // This routine exists to make sure this set of tests is done the same
926 // everywhere. We need to make a coordinated change: first LoadNode::Ideal
927 // will change the graph shape in a way which makes memory alive twice at the
928 // same time (uses the Oracle model of aliasing), then some
929 // LoadXNode::Identity will fold things back to the equivalence-class model
930 // of aliasing.
931 Node* MemNode::can_see_stored_value(Node* st, PhaseTransform* phase) const {
932 Node* ld_adr = in(MemNode::Address);
933 intptr_t ld_off = 0;
934 AllocateNode* ld_alloc = AllocateNode::Ideal_allocation(ld_adr, phase, ld_off);
935 const TypeInstPtr* tp = phase->type(ld_adr)->isa_instptr();
936 Compile::AliasType* atp = (tp != NULL) ? phase->C->alias_type(tp) : NULL;
937 // This is more general than load from boxing objects.
1101 if (!addr_t->is_known_instance() &&
1102 addr_t->is_ptr_to_boxed_value()) {
1103 // Use _idx of address base (could be Phi node) for boxed values.
1104 intptr_t ignore = 0;
1105 Node* base = AddPNode::Ideal_base_and_offset(in(Address), phase, ignore);
1106 this_iid = base->_idx;
1107 }
1108 const Type* this_type = bottom_type();
1109 for (DUIterator_Fast imax, i = region->fast_outs(imax); i < imax; i++) {
1110 Node* phi = region->fast_out(i);
1111 if (phi->is_Phi() && phi != mem &&
1112 phi->as_Phi()->is_same_inst_field(this_type, this_iid, this_index, this_offset)) {
1113 return phi;
1114 }
1115 }
1116 }
1117
1118 return this;
1119 }
1120
1121 // We're loading from an object which has autobox behaviour.
1122 // If this object is result of a valueOf call we'll have a phi
1123 // merging a newly allocated object and a load from the cache.
1124 // We want to replace this load with the original incoming
1125 // argument to the valueOf call.
1126 Node* LoadNode::eliminate_autobox(PhaseGVN* phase) {
1127 assert(phase->C->eliminate_boxing(), "sanity");
1128 intptr_t ignore = 0;
1129 Node* base = AddPNode::Ideal_base_and_offset(in(Address), phase, ignore);
1130 if ((base == NULL) || base->is_Phi()) {
1131 // Push the loads from the phi that comes from valueOf up
1132 // through it to allow elimination of the loads and the recovery
1133 // of the original value. It is done in split_through_phi().
1134 return NULL;
1135 } else if (base->is_Load() ||
1136 base->is_DecodeN() && base->in(1)->is_Load()) {
1137 // Eliminate the load of boxed value for integer types from the cache
1138 // array by deriving the value from the index into the array.
1139 // Capture the offset of the load and then reverse the computation.
1140
|
725 //=============================================================================
726 // Should LoadNode::Ideal() attempt to remove control edges?
727 bool LoadNode::can_remove_control() const {
728 return true;
729 }
730 uint LoadNode::size_of() const { return sizeof(*this); }
731 uint LoadNode::cmp( const Node &n ) const
732 { return !Type::cmp( _type, ((LoadNode&)n)._type ); }
733 const Type *LoadNode::bottom_type() const { return _type; }
734 uint LoadNode::ideal_reg() const {
735 return _type->ideal_reg();
736 }
737
738 #ifndef PRODUCT
739 void LoadNode::dump_spec(outputStream *st) const {
740 MemNode::dump_spec(st);
741 if( !Verbose && !WizardMode ) {
742 // standard dump does this in Verbose and WizardMode
743 st->print(" #"); _type->dump_on(st);
744 }
745 if (!depends_only_on_test()) {
746 st->print(" (does not depend only on test)");
747 }
748 }
749 #endif
750
751 #ifdef ASSERT
752 //----------------------------is_immutable_value-------------------------------
753 // Helper function to allow a raw load without control edge for some cases
754 bool LoadNode::is_immutable_value(Node* adr) {
755 return (adr->is_AddP() && adr->in(AddPNode::Base)->is_top() &&
756 adr->in(AddPNode::Address)->Opcode() == Op_ThreadLocal &&
757 (adr->in(AddPNode::Offset)->find_intptr_t_con(-1) ==
758 in_bytes(JavaThread::osthread_offset())));
759 }
760 #endif
761
762 //----------------------------LoadNode::make-----------------------------------
763 // Polymorphic factory method:
764 Node *LoadNode::make(PhaseGVN& gvn, Node *ctl, Node *mem, Node *adr, const TypePtr* adr_type, const Type *rt, BasicType bt, MemOrd mo,
765 ControlDependency control_dependency, bool unaligned, bool mismatched) {
897 BasicType ary_elem = ary_t->klass()->as_array_klass()->element_type()->basic_type();
898 uint header = arrayOopDesc::base_offset_in_bytes(ary_elem);
899 uint shift = exact_log2(type2aelembytes(ary_elem));
900
901 Node* diff = phase->transform(new SubINode(ac->in(ArrayCopyNode::SrcPos), ac->in(ArrayCopyNode::DestPos)));
902 #ifdef _LP64
903 diff = phase->transform(new ConvI2LNode(diff));
904 #endif
905 diff = phase->transform(new LShiftXNode(diff, phase->intcon(shift)));
906
907 Node* offset = phase->transform(new AddXNode(addp->in(AddPNode::Offset), diff));
908 addp->set_req(AddPNode::Offset, offset);
909 ld->set_req(MemNode::Address, phase->transform(addp));
910
911 if (in(0) != NULL) {
912 assert(ac->in(0) != NULL, "alloc must have control");
913 ld->set_req(0, ac->in(0));
914 }
915 }
916 // load depends on the tests that validate the arraycopy
917 ld->as_Load()->_control_dependency = Pinned;
918 return ld;
919 }
920 return NULL;
921 }
922
923
924 //---------------------------can_see_stored_value------------------------------
925 // This routine exists to make sure this set of tests is done the same
926 // everywhere. We need to make a coordinated change: first LoadNode::Ideal
927 // will change the graph shape in a way which makes memory alive twice at the
928 // same time (uses the Oracle model of aliasing), then some
929 // LoadXNode::Identity will fold things back to the equivalence-class model
930 // of aliasing.
931 Node* MemNode::can_see_stored_value(Node* st, PhaseTransform* phase) const {
932 Node* ld_adr = in(MemNode::Address);
933 intptr_t ld_off = 0;
934 AllocateNode* ld_alloc = AllocateNode::Ideal_allocation(ld_adr, phase, ld_off);
935 const TypeInstPtr* tp = phase->type(ld_adr)->isa_instptr();
936 Compile::AliasType* atp = (tp != NULL) ? phase->C->alias_type(tp) : NULL;
937 // This is more general than load from boxing objects.
1101 if (!addr_t->is_known_instance() &&
1102 addr_t->is_ptr_to_boxed_value()) {
1103 // Use _idx of address base (could be Phi node) for boxed values.
1104 intptr_t ignore = 0;
1105 Node* base = AddPNode::Ideal_base_and_offset(in(Address), phase, ignore);
1106 this_iid = base->_idx;
1107 }
1108 const Type* this_type = bottom_type();
1109 for (DUIterator_Fast imax, i = region->fast_outs(imax); i < imax; i++) {
1110 Node* phi = region->fast_out(i);
1111 if (phi->is_Phi() && phi != mem &&
1112 phi->as_Phi()->is_same_inst_field(this_type, this_iid, this_index, this_offset)) {
1113 return phi;
1114 }
1115 }
1116 }
1117
1118 return this;
1119 }
1120
1121 // Construct an equivalent unsigned load.
1122 Node* LoadNode::convert_to_unsigned_load(PhaseGVN& gvn) {
1123 BasicType bt = T_ILLEGAL;
1124 const Type* rt = NULL;
1125 switch (Opcode()) {
1126 case Op_LoadUB: return this;
1127 case Op_LoadUS: return this;
1128 case Op_LoadB: bt = T_BOOLEAN; rt = TypeInt::UBYTE; break;
1129 case Op_LoadS: bt = T_CHAR; rt = TypeInt::CHAR; break;
1130 default:
1131 assert(false, "no unsigned variant");
1132 return NULL;
1133 }
1134 return LoadNode::make(gvn, in(MemNode::Control), in(MemNode::Memory), in(MemNode::Address),
1135 adr_type(), rt, bt, _mo, _control_dependency,
1136 is_unaligned_access(), is_mismatched_access());
1137 }
1138
1139 // Construct an equivalent signed load.
1140 Node* LoadNode::convert_to_signed_load(PhaseGVN& gvn) {
1141 BasicType bt = T_ILLEGAL;
1142 const Type* rt = NULL;
1143 switch (Opcode()) {
1144 case Op_LoadUB: bt = T_BYTE; rt = TypeInt::BYTE; break;
1145 case Op_LoadUS: bt = T_SHORT; rt = TypeInt::SHORT; break;
1146 default:
1147 return this; // All other loads are signed.
1148 }
1149 return LoadNode::make(gvn, in(MemNode::Control), in(MemNode::Memory), in(MemNode::Address),
1150 adr_type(), rt, bt, _mo, _control_dependency,
1151 is_unaligned_access(), is_mismatched_access());
1152 }
1153
1154 // We're loading from an object which has autobox behaviour.
1155 // If this object is result of a valueOf call we'll have a phi
1156 // merging a newly allocated object and a load from the cache.
1157 // We want to replace this load with the original incoming
1158 // argument to the valueOf call.
1159 Node* LoadNode::eliminate_autobox(PhaseGVN* phase) {
1160 assert(phase->C->eliminate_boxing(), "sanity");
1161 intptr_t ignore = 0;
1162 Node* base = AddPNode::Ideal_base_and_offset(in(Address), phase, ignore);
1163 if ((base == NULL) || base->is_Phi()) {
1164 // Push the loads from the phi that comes from valueOf up
1165 // through it to allow elimination of the loads and the recovery
1166 // of the original value. It is done in split_through_phi().
1167 return NULL;
1168 } else if (base->is_Load() ||
1169 base->is_DecodeN() && base->in(1)->is_Load()) {
1170 // Eliminate the load of boxed value for integer types from the cache
1171 // array by deriving the value from the index into the array.
1172 // Capture the offset of the load and then reverse the computation.
1173
|