src/share/vm/opto/callnode.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/callnode.cpp

Print this page
rev 8568 : 8086046: escape analysis generates incorrect code as of B67
Summary: load bypasses arraycopy that sets the value after the ArrayCopyNode is expanded
Reviewed-by:


 707 
 708   case TypeFunc::ReturnAdr:
 709   case TypeFunc::FramePtr:
 710   default:
 711     ShouldNotReachHere();
 712   }
 713   return NULL;
 714 }
 715 
 716 // Do we Match on this edge index or not?  Match no edges
 717 uint CallNode::match_edge(uint idx) const {
 718   return 0;
 719 }
 720 
 721 //
 722 // Determine whether the call could modify the field of the specified
 723 // instance at the specified offset.
 724 //
 725 bool CallNode::may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) {
 726   assert((t_oop != NULL), "sanity");




















 727   if (t_oop->is_known_instance()) {
 728     // The instance_id is set only for scalar-replaceable allocations which
 729     // are not passed as arguments according to Escape Analysis.
 730     return false;
 731   }
 732   if (t_oop->is_ptr_to_boxed_value()) {
 733     ciKlass* boxing_klass = t_oop->klass();
 734     if (is_CallStaticJava() && as_CallStaticJava()->is_boxing_method()) {
 735       // Skip unrelated boxing methods.
 736       Node* proj = proj_out(TypeFunc::Parms);
 737       if ((proj == NULL) || (phase->type(proj)->is_instptr()->klass() != boxing_klass)) {
 738         return false;
 739       }
 740     }
 741     if (is_CallJava() && as_CallJava()->method() != NULL) {
 742       ciMethod* meth = as_CallJava()->method();
 743       if (meth->is_accessor()) {
 744         return false;
 745       }
 746       // May modify (by reflection) if an boxing object is passed


 892   if (can_reshape && cg != NULL && cg->is_mh_late_inline() && !cg->already_attempted()) {
 893     // Check whether this MH handle call becomes a candidate for inlining
 894     ciMethod* callee = cg->method();
 895     vmIntrinsics::ID iid = callee->intrinsic_id();
 896     if (iid == vmIntrinsics::_invokeBasic) {
 897       if (in(TypeFunc::Parms)->Opcode() == Op_ConP) {
 898         phase->C->prepend_late_inline(cg);
 899         set_generator(NULL);
 900       }
 901     } else {
 902       assert(callee->has_member_arg(), "wrong type of call?");
 903       if (in(TypeFunc::Parms + callee->arg_size() - 1)->Opcode() == Op_ConP) {
 904         phase->C->prepend_late_inline(cg);
 905         set_generator(NULL);
 906       }
 907     }
 908   }
 909   return SafePointNode::Ideal(phase, can_reshape);
 910 }
 911 






 912 
 913 //=============================================================================
 914 uint CallJavaNode::size_of() const { return sizeof(*this); }
 915 uint CallJavaNode::cmp( const Node &n ) const {
 916   CallJavaNode &call = (CallJavaNode&)n;
 917   return CallNode::cmp(call) && _method == call._method;
 918 }
 919 #ifndef PRODUCT
 920 void CallJavaNode::dump_spec(outputStream *st) const {
 921   if( _method ) _method->print_short_name(st);
 922   CallNode::dump_spec(st);
 923 }
 924 #endif
 925 
 926 //=============================================================================
 927 uint CallStaticJavaNode::size_of() const { return sizeof(*this); }
 928 uint CallStaticJavaNode::cmp( const Node &n ) const {
 929   CallStaticJavaNode &call = (CallStaticJavaNode&)n;
 930   return CallJavaNode::cmp(call);
 931 }


 990   return CallNode::cmp(call) && !strcmp(_name,call._name);
 991 }
 992 #ifndef PRODUCT
 993 void CallRuntimeNode::dump_spec(outputStream *st) const {
 994   st->print("# ");
 995   st->print("%s", _name);
 996   CallNode::dump_spec(st);
 997 }
 998 #endif
 999 
1000 //------------------------------calling_convention-----------------------------
1001 void CallRuntimeNode::calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const {
1002   Matcher::c_calling_convention( sig_bt, parm_regs, argcnt );
1003 }
1004 
1005 //=============================================================================
1006 //------------------------------calling_convention-----------------------------
1007 
1008 
1009 //=============================================================================
1010 bool CallLeafNode::is_call_to_arraycopystub() const {
1011   if (_name != NULL && strstr(_name, "arraycopy") != 0) {
1012     return true;
1013   }
1014   return false;
1015 }
1016 
1017 
1018 #ifndef PRODUCT
1019 void CallLeafNode::dump_spec(outputStream *st) const {
1020   st->print("# ");
1021   st->print("%s", _name);
1022   CallNode::dump_spec(st);
1023 }
1024 #endif
1025 
1026 //=============================================================================
1027 
1028 void SafePointNode::set_local(JVMState* jvms, uint idx, Node *c) {
1029   assert(verify_jvms(jvms), "jvms must match");
1030   int loc = jvms->locoff() + idx;
1031   if (in(loc)->is_top() && idx > 0 && !c->is_top() ) {
1032     // If current local idx is top then local idx - 1 could
1033     // be a long/double that needs to be killed since top could
1034     // represent the 2nd half ofthe long/double.
1035     uint ideal = in(loc -1)->ideal_reg();
1036     if (ideal == Op_RegD || ideal == Op_RegL) {
1037       // set other (low index) half to top


1913     if (!t_oop->isa_aryptr()) {
1914       return true;
1915     }
1916 
1917     const Type* elem = dest_t->is_aryptr()->elem();
1918     if (elem == Type::BOTTOM) {
1919       // An array but we don't know what elements are
1920       return true;
1921     }
1922 
1923     dest_t = dest_t->add_offset(Type::OffsetBot)->is_oopptr();
1924     uint dest_alias = phase->C->get_alias_index(dest_t);
1925     uint t_oop_alias = phase->C->get_alias_index(t_oop);
1926 
1927     return dest_alias == t_oop_alias;
1928   }
1929 
1930   return true;
1931 }
1932 
1933 bool CallLeafNode::may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) {
1934   if (is_call_to_arraycopystub()) {
1935     const TypeTuple* args = _tf->domain();
1936     Node* dest = NULL;
1937     // Stubs that can be called once an ArrayCopyNode is expanded have
1938     // different signatures. Look for the second pointer argument,
1939     // that is the destination of the copy.
1940     for (uint i = TypeFunc::Parms, j = 0; i < args->cnt(); i++) {
1941       if (args->field_at(i)->isa_ptr()) {
1942         j++;
1943         if (j == 2) {
1944           dest = in(i);
1945           break;
1946         }
1947       }
1948     }
1949     if (!dest->is_top() && may_modify_arraycopy_helper(phase->type(dest)->is_oopptr(), t_oop, phase)) {
1950       return true;
1951     }
1952     return false;
1953   }
1954   return CallNode::may_modify(t_oop, phase);
1955 }


 707 
 708   case TypeFunc::ReturnAdr:
 709   case TypeFunc::FramePtr:
 710   default:
 711     ShouldNotReachHere();
 712   }
 713   return NULL;
 714 }
 715 
 716 // Do we Match on this edge index or not?  Match no edges
 717 uint CallNode::match_edge(uint idx) const {
 718   return 0;
 719 }
 720 
 721 //
 722 // Determine whether the call could modify the field of the specified
 723 // instance at the specified offset.
 724 //
 725 bool CallNode::may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) {
 726   assert((t_oop != NULL), "sanity");
 727   if (is_call_to_arraycopystub()) {
 728     const TypeTuple* args = _tf->domain();
 729     Node* dest = NULL;
 730     // Stubs that can be called once an ArrayCopyNode is expanded have
 731     // different signatures. Look for the second pointer argument,
 732     // that is the destination of the copy.
 733     for (uint i = TypeFunc::Parms, j = 0; i < args->cnt(); i++) {
 734       if (args->field_at(i)->isa_ptr()) {
 735         j++;
 736         if (j == 2) {
 737           dest = in(i);
 738           break;
 739         }
 740       }
 741     }
 742     if (!dest->is_top() && may_modify_arraycopy_helper(phase->type(dest)->is_oopptr(), t_oop, phase)) {
 743       return true;
 744     }
 745     return false;
 746   }
 747   if (t_oop->is_known_instance()) {
 748     // The instance_id is set only for scalar-replaceable allocations which
 749     // are not passed as arguments according to Escape Analysis.
 750     return false;
 751   }
 752   if (t_oop->is_ptr_to_boxed_value()) {
 753     ciKlass* boxing_klass = t_oop->klass();
 754     if (is_CallStaticJava() && as_CallStaticJava()->is_boxing_method()) {
 755       // Skip unrelated boxing methods.
 756       Node* proj = proj_out(TypeFunc::Parms);
 757       if ((proj == NULL) || (phase->type(proj)->is_instptr()->klass() != boxing_klass)) {
 758         return false;
 759       }
 760     }
 761     if (is_CallJava() && as_CallJava()->method() != NULL) {
 762       ciMethod* meth = as_CallJava()->method();
 763       if (meth->is_accessor()) {
 764         return false;
 765       }
 766       // May modify (by reflection) if an boxing object is passed


 912   if (can_reshape && cg != NULL && cg->is_mh_late_inline() && !cg->already_attempted()) {
 913     // Check whether this MH handle call becomes a candidate for inlining
 914     ciMethod* callee = cg->method();
 915     vmIntrinsics::ID iid = callee->intrinsic_id();
 916     if (iid == vmIntrinsics::_invokeBasic) {
 917       if (in(TypeFunc::Parms)->Opcode() == Op_ConP) {
 918         phase->C->prepend_late_inline(cg);
 919         set_generator(NULL);
 920       }
 921     } else {
 922       assert(callee->has_member_arg(), "wrong type of call?");
 923       if (in(TypeFunc::Parms + callee->arg_size() - 1)->Opcode() == Op_ConP) {
 924         phase->C->prepend_late_inline(cg);
 925         set_generator(NULL);
 926       }
 927     }
 928   }
 929   return SafePointNode::Ideal(phase, can_reshape);
 930 }
 931 
 932 bool CallNode::is_call_to_arraycopystub() const {
 933   if (_name != NULL && strstr(_name, "arraycopy") != 0) {
 934     return true;
 935   }
 936   return false;
 937 }
 938 
 939 //=============================================================================
 940 uint CallJavaNode::size_of() const { return sizeof(*this); }
 941 uint CallJavaNode::cmp( const Node &n ) const {
 942   CallJavaNode &call = (CallJavaNode&)n;
 943   return CallNode::cmp(call) && _method == call._method;
 944 }
 945 #ifndef PRODUCT
 946 void CallJavaNode::dump_spec(outputStream *st) const {
 947   if( _method ) _method->print_short_name(st);
 948   CallNode::dump_spec(st);
 949 }
 950 #endif
 951 
 952 //=============================================================================
 953 uint CallStaticJavaNode::size_of() const { return sizeof(*this); }
 954 uint CallStaticJavaNode::cmp( const Node &n ) const {
 955   CallStaticJavaNode &call = (CallStaticJavaNode&)n;
 956   return CallJavaNode::cmp(call);
 957 }


1016   return CallNode::cmp(call) && !strcmp(_name,call._name);
1017 }
1018 #ifndef PRODUCT
1019 void CallRuntimeNode::dump_spec(outputStream *st) const {
1020   st->print("# ");
1021   st->print("%s", _name);
1022   CallNode::dump_spec(st);
1023 }
1024 #endif
1025 
1026 //------------------------------calling_convention-----------------------------
1027 void CallRuntimeNode::calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const {
1028   Matcher::c_calling_convention( sig_bt, parm_regs, argcnt );
1029 }
1030 
1031 //=============================================================================
1032 //------------------------------calling_convention-----------------------------
1033 
1034 
1035 //=============================================================================








1036 #ifndef PRODUCT
1037 void CallLeafNode::dump_spec(outputStream *st) const {
1038   st->print("# ");
1039   st->print("%s", _name);
1040   CallNode::dump_spec(st);
1041 }
1042 #endif
1043 
1044 //=============================================================================
1045 
1046 void SafePointNode::set_local(JVMState* jvms, uint idx, Node *c) {
1047   assert(verify_jvms(jvms), "jvms must match");
1048   int loc = jvms->locoff() + idx;
1049   if (in(loc)->is_top() && idx > 0 && !c->is_top() ) {
1050     // If current local idx is top then local idx - 1 could
1051     // be a long/double that needs to be killed since top could
1052     // represent the 2nd half ofthe long/double.
1053     uint ideal = in(loc -1)->ideal_reg();
1054     if (ideal == Op_RegD || ideal == Op_RegL) {
1055       // set other (low index) half to top


1931     if (!t_oop->isa_aryptr()) {
1932       return true;
1933     }
1934 
1935     const Type* elem = dest_t->is_aryptr()->elem();
1936     if (elem == Type::BOTTOM) {
1937       // An array but we don't know what elements are
1938       return true;
1939     }
1940 
1941     dest_t = dest_t->add_offset(Type::OffsetBot)->is_oopptr();
1942     uint dest_alias = phase->C->get_alias_index(dest_t);
1943     uint t_oop_alias = phase->C->get_alias_index(t_oop);
1944 
1945     return dest_alias == t_oop_alias;
1946   }
1947 
1948   return true;
1949 }
1950 























src/share/vm/opto/callnode.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File