< prev index next >

src/hotspot/share/opto/callnode.cpp

Print this page




 998       }
 999     }
1000   }
1001   return SafePointNode::Ideal(phase, can_reshape);
1002 }
1003 
1004 bool CallNode::is_call_to_arraycopystub() const {
1005   if (_name != NULL && strstr(_name, "arraycopy") != 0) {
1006     return true;
1007   }
1008   return false;
1009 }
1010 
1011 //=============================================================================
1012 uint CallJavaNode::size_of() const { return sizeof(*this); }
1013 bool CallJavaNode::cmp( const Node &n ) const {
1014   CallJavaNode &call = (CallJavaNode&)n;
1015   return CallNode::cmp(call) && _method == call._method &&
1016          _override_symbolic_info == call._override_symbolic_info;
1017 }








































1018 #ifdef ASSERT
1019 bool CallJavaNode::validate_symbolic_info() const {
1020   if (method() == NULL) {
1021     return true; // call into runtime or uncommon trap
1022   }
1023   Bytecodes::Code bc = jvms()->method()->java_code_at_bci(_bci);
1024   if (EnableValhalla && (bc == Bytecodes::_if_acmpeq || bc == Bytecodes::_if_acmpne)) {
1025     return true;
1026   }
1027   ciMethod* symbolic_info = jvms()->method()->get_method_at_bci(_bci);
1028   ciMethod* callee = method();
1029   if (symbolic_info->is_method_handle_intrinsic() && !callee->is_method_handle_intrinsic()) {
1030     assert(override_symbolic_info(), "should be set");
1031   }
1032   assert(ciMethod::is_consistent_info(symbolic_info, callee), "inconsistent info");
1033   return true;
1034 }
1035 #endif
1036 
1037 #ifndef PRODUCT


1061 int CallStaticJavaNode::uncommon_trap_request() const {
1062   if (_name != NULL && !strcmp(_name, "uncommon_trap")) {
1063     return extract_uncommon_trap_request(this);
1064   }
1065   return 0;
1066 }
1067 int CallStaticJavaNode::extract_uncommon_trap_request(const Node* call) {
1068 #ifndef PRODUCT
1069   if (!(call->req() > TypeFunc::Parms &&
1070         call->in(TypeFunc::Parms) != NULL &&
1071         call->in(TypeFunc::Parms)->is_Con() &&
1072         call->in(TypeFunc::Parms)->bottom_type()->isa_int())) {
1073     assert(in_dump() != 0, "OK if dumping");
1074     tty->print("[bad uncommon trap]");
1075     return 0;
1076   }
1077 #endif
1078   return call->in(TypeFunc::Parms)->bottom_type()->is_int()->get_con();
1079 }
1080 















































































































































1081 #ifndef PRODUCT
1082 void CallStaticJavaNode::dump_spec(outputStream *st) const {
1083   st->print("# Static ");
1084   if (_name != NULL) {
1085     st->print("%s", _name);
1086     int trap_req = uncommon_trap_request();
1087     if (trap_req != 0) {
1088       char buf[100];
1089       st->print("(%s)",
1090                  Deoptimization::format_trap_request(buf, sizeof(buf),
1091                                                      trap_req));
1092     }
1093     st->print(" ");
1094   }
1095   CallJavaNode::dump_spec(st);
1096 }
1097 
1098 void CallStaticJavaNode::dump_compact_spec(outputStream* st) const {
1099   if (_method) {
1100     _method->print_short_name(st);




 998       }
 999     }
1000   }
1001   return SafePointNode::Ideal(phase, can_reshape);
1002 }
1003 
1004 bool CallNode::is_call_to_arraycopystub() const {
1005   if (_name != NULL && strstr(_name, "arraycopy") != 0) {
1006     return true;
1007   }
1008   return false;
1009 }
1010 
1011 //=============================================================================
1012 uint CallJavaNode::size_of() const { return sizeof(*this); }
1013 bool CallJavaNode::cmp( const Node &n ) const {
1014   CallJavaNode &call = (CallJavaNode&)n;
1015   return CallNode::cmp(call) && _method == call._method &&
1016          _override_symbolic_info == call._override_symbolic_info;
1017 }
1018 
1019 void CallJavaNode::copy_call_debug_info(PhaseIterGVN* phase, CallNode *oldcall) {
1020   // Copy debug information and adjust JVMState information
1021   uint old_dbg_start = oldcall->tf()->domain_sig()->cnt();
1022   uint new_dbg_start = tf()->domain_sig()->cnt();
1023   int jvms_adj  = new_dbg_start - old_dbg_start;
1024   assert (new_dbg_start == req(), "argument count mismatch");
1025   Compile* C = phase->C;
1026   
1027   // SafePointScalarObject node could be referenced several times in debug info.
1028   // Use Dict to record cloned nodes.
1029   Dict* sosn_map = new Dict(cmpkey,hashkey);
1030   for (uint i = old_dbg_start; i < oldcall->req(); i++) {
1031     Node* old_in = oldcall->in(i);
1032     // Clone old SafePointScalarObjectNodes, adjusting their field contents.
1033     if (old_in != NULL && old_in->is_SafePointScalarObject()) {
1034       SafePointScalarObjectNode* old_sosn = old_in->as_SafePointScalarObject();
1035       uint old_unique = C->unique();
1036       Node* new_in = old_sosn->clone(sosn_map);
1037       if (old_unique != C->unique()) { // New node?
1038         new_in->set_req(0, C->root()); // reset control edge
1039         new_in = phase->transform(new_in); // Register new node.
1040       }
1041       old_in = new_in;
1042     }
1043     add_req(old_in);
1044   }
1045 
1046   // JVMS may be shared so clone it before we modify it
1047   set_jvms(oldcall->jvms() != NULL ? oldcall->jvms()->clone_deep(C) : NULL);
1048   for (JVMState *jvms = this->jvms(); jvms != NULL; jvms = jvms->caller()) {
1049     jvms->set_map(this);
1050     jvms->set_locoff(jvms->locoff()+jvms_adj);
1051     jvms->set_stkoff(jvms->stkoff()+jvms_adj);
1052     jvms->set_monoff(jvms->monoff()+jvms_adj);
1053     jvms->set_scloff(jvms->scloff()+jvms_adj);
1054     jvms->set_endoff(jvms->endoff()+jvms_adj);
1055   }
1056 }
1057 
1058 #ifdef ASSERT
1059 bool CallJavaNode::validate_symbolic_info() const {
1060   if (method() == NULL) {
1061     return true; // call into runtime or uncommon trap
1062   }
1063   Bytecodes::Code bc = jvms()->method()->java_code_at_bci(_bci);
1064   if (EnableValhalla && (bc == Bytecodes::_if_acmpeq || bc == Bytecodes::_if_acmpne)) {
1065     return true;
1066   }
1067   ciMethod* symbolic_info = jvms()->method()->get_method_at_bci(_bci);
1068   ciMethod* callee = method();
1069   if (symbolic_info->is_method_handle_intrinsic() && !callee->is_method_handle_intrinsic()) {
1070     assert(override_symbolic_info(), "should be set");
1071   }
1072   assert(ciMethod::is_consistent_info(symbolic_info, callee), "inconsistent info");
1073   return true;
1074 }
1075 #endif
1076 
1077 #ifndef PRODUCT


1101 int CallStaticJavaNode::uncommon_trap_request() const {
1102   if (_name != NULL && !strcmp(_name, "uncommon_trap")) {
1103     return extract_uncommon_trap_request(this);
1104   }
1105   return 0;
1106 }
1107 int CallStaticJavaNode::extract_uncommon_trap_request(const Node* call) {
1108 #ifndef PRODUCT
1109   if (!(call->req() > TypeFunc::Parms &&
1110         call->in(TypeFunc::Parms) != NULL &&
1111         call->in(TypeFunc::Parms)->is_Con() &&
1112         call->in(TypeFunc::Parms)->bottom_type()->isa_int())) {
1113     assert(in_dump() != 0, "OK if dumping");
1114     tty->print("[bad uncommon trap]");
1115     return 0;
1116   }
1117 #endif
1118   return call->in(TypeFunc::Parms)->bottom_type()->is_int()->get_con();
1119 }
1120 
1121 bool CallStaticJavaNode::remove_useless_allocation(PhaseGVN *phase, Node* ctl, Node* mem, Node* unc_arg) {
1122   // Split if can cause the flattened array branch of an array load to
1123   // end in an uncommon trap. In that case, the allocation of the
1124   // loaded value and its initialization is useless. Eliminate it. use
1125   // the jvm state of the allocation to create a new uncommon trap
1126   // call at the load.
1127   if (ctl == NULL || ctl->is_top() || mem == NULL || mem->is_top() || !mem->is_MergeMem()) {
1128     return false;
1129   }
1130   PhaseIterGVN* igvn = phase->is_IterGVN();
1131   if (ctl->is_Region()) {
1132     bool res = false;
1133     for (uint i = 1; i < ctl->req(); i++) {
1134       MergeMemNode* mm = mem->clone()->as_MergeMem();
1135       for (MergeMemStream mms(mm); mms.next_non_empty(); ) {
1136         Node* m = mms.memory();
1137         if (m->is_Phi() && m->in(0) == ctl) {
1138           mms.set_memory(m->in(i));
1139         }
1140       }
1141       if (remove_useless_allocation(phase, ctl->in(i), mm, unc_arg)) {
1142         res = true;
1143         if (!ctl->in(i)->is_Region()) {
1144           igvn->replace_input_of(ctl, i, phase->C->top());
1145         }
1146       }
1147       igvn->remove_dead_node(mm);
1148     }
1149     return res;
1150   }
1151   // verify the control flow is ok
1152   Node* c = ctl;
1153   Node* copy = NULL;
1154   Node* alloc = NULL;
1155   for (;;) {
1156     if (c == NULL || c->is_top()) {
1157       return false;
1158     }
1159     if (c->is_Proj() || c->is_Catch() || c->is_MemBar()) {
1160       c = c->in(0);
1161     } else if (c->Opcode() == Op_CallLeaf &&
1162                c->as_Call()->entry_point() == CAST_FROM_FN_PTR(address, OptoRuntime::load_unknown_value)) {
1163       copy = c;
1164       c = c->in(0);
1165     } else if (c->is_Allocate()) {
1166       Node* new_obj = c->as_Allocate()->result_cast();
1167       if (copy == NULL || new_obj == NULL) {
1168         return false;
1169       }
1170       Node* copy_dest = copy->in(TypeFunc::Parms + 2);
1171       if (copy_dest != new_obj) {
1172         return false;
1173       }
1174       alloc = c;
1175       break;
1176     } else {
1177       return false;
1178     }
1179   }
1180   
1181   JVMState* jvms = alloc->jvms();
1182   if (phase->C->too_many_traps(jvms->method(), jvms->bci(), Deoptimization::trap_request_reason(uncommon_trap_request()))) {
1183     return false;
1184   }
1185   
1186   Node* alloc_mem = alloc->in(TypeFunc::Memory);
1187   if (alloc_mem == NULL || alloc_mem->is_top()) {
1188     return false;
1189   }
1190   if (!alloc_mem->is_MergeMem()) {
1191     alloc_mem = MergeMemNode::make(alloc_mem);
1192   }
1193   
1194   // and that there's no unexpected side effect
1195   for (MergeMemStream mms2(mem->as_MergeMem(), alloc_mem->as_MergeMem()); mms2.next_non_empty2(); ) {
1196     Node* m1 = mms2.is_empty() ? mms2.base_memory() : mms2.memory();
1197     Node* m2 = mms2.memory2();
1198     
1199     for (uint i = 0; i < 100; i++) {
1200       if (m1 == m2) {
1201         break;
1202       } else if (m1->is_Proj()) {
1203         m1 = m1->in(0);
1204       } else if (m1->is_MemBar()) {
1205         m1 = m1->in(TypeFunc::Memory);
1206       } else if (m1->Opcode() == Op_CallLeaf &&
1207                  m1->as_Call()->entry_point() == CAST_FROM_FN_PTR(address, OptoRuntime::load_unknown_value)) {
1208         if (m1 != copy) {
1209           return false;
1210         }
1211         m1 = m1->in(TypeFunc::Memory);
1212       } else if (m1->is_Allocate()) {
1213         if (m1 != alloc) {
1214           return false;
1215         }
1216         break;
1217       } else {
1218         return false;
1219       }
1220     }
1221   }
1222   if (alloc_mem->outcnt() == 0) {
1223     igvn->remove_dead_node(alloc_mem);
1224   }
1225 
1226   address call_addr = SharedRuntime::uncommon_trap_blob()->entry_point();
1227   CallNode* unc = new CallStaticJavaNode(OptoRuntime::uncommon_trap_Type(), call_addr, "uncommon_trap",
1228                                          jvms->bci(), NULL);
1229   unc->init_req(TypeFunc::Control, alloc->in(0));
1230   unc->init_req(TypeFunc::I_O, alloc->in(TypeFunc::I_O));
1231   unc->init_req(TypeFunc::Memory, alloc->in(TypeFunc::Memory));
1232   unc->init_req(TypeFunc::FramePtr,  alloc->in(TypeFunc::FramePtr));
1233   unc->init_req(TypeFunc::ReturnAdr, alloc->in(TypeFunc::ReturnAdr));
1234   unc->init_req(TypeFunc::Parms+0, unc_arg);
1235   unc->set_cnt(PROB_UNLIKELY_MAG(4));
1236   unc->copy_call_debug_info(igvn, alloc->as_Allocate());
1237   
1238   igvn->replace_input_of(alloc, 0, phase->C->top());
1239   
1240   igvn->register_new_node_with_optimizer(unc);
1241   
1242   Node* ctrl = phase->transform(new ProjNode(unc, TypeFunc::Control));
1243   Node* halt = phase->transform(new HaltNode(ctrl, alloc->in(TypeFunc::FramePtr)));
1244   phase->C->root()->add_req(halt);
1245 
1246   return true;
1247 }
1248 
1249 
1250 Node* CallStaticJavaNode::Ideal(PhaseGVN *phase, bool can_reshape) {
1251   if (can_reshape && uncommon_trap_request() != 0) {
1252     if (remove_useless_allocation(phase, in(0), in(TypeFunc::Memory), in(TypeFunc::Parms))) {
1253       if (!in(0)->is_Region()) {
1254         PhaseIterGVN* igvn = phase->is_IterGVN();
1255         igvn->replace_input_of(this, 0, phase->C->top());
1256       }
1257       return this;
1258     }
1259   }
1260   return CallNode::Ideal(phase, can_reshape);
1261 }
1262 
1263 
1264 #ifndef PRODUCT
1265 void CallStaticJavaNode::dump_spec(outputStream *st) const {
1266   st->print("# Static ");
1267   if (_name != NULL) {
1268     st->print("%s", _name);
1269     int trap_req = uncommon_trap_request();
1270     if (trap_req != 0) {
1271       char buf[100];
1272       st->print("(%s)",
1273                  Deoptimization::format_trap_request(buf, sizeof(buf),
1274                                                      trap_req));
1275     }
1276     st->print(" ");
1277   }
1278   CallJavaNode::dump_spec(st);
1279 }
1280 
1281 void CallStaticJavaNode::dump_compact_spec(outputStream* st) const {
1282   if (_method) {
1283     _method->print_short_name(st);


< prev index next >