< prev index next >

src/share/vm/opto/library_call.cpp

Print this page




 135   void fatal_unexpected_iid(vmIntrinsics::ID iid) {
 136     fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid));
 137   }
 138 
 139   void  set_result(Node* n) { assert(_result == NULL, "only set once"); _result = n; }
 140   void  set_result(RegionNode* region, PhiNode* value);
 141   Node*     result() { return _result; }
 142 
 143   virtual int reexecute_sp() { return _reexecute_sp; }
 144 
 145   // Helper functions to inline natives
 146   Node* generate_guard(Node* test, RegionNode* region, float true_prob);
 147   Node* generate_slow_guard(Node* test, RegionNode* region);
 148   Node* generate_fair_guard(Node* test, RegionNode* region);
 149   Node* generate_negative_guard(Node* index, RegionNode* region,
 150                                 // resulting CastII of index:
 151                                 Node* *pos_index = NULL);
 152   Node* generate_limit_guard(Node* offset, Node* subseq_length,
 153                              Node* array_length,
 154                              RegionNode* region);


 155   Node* generate_current_thread(Node* &tls_output);
 156   Node* load_mirror_from_klass(Node* klass);
 157   Node* load_klass_from_mirror_common(Node* mirror, bool never_see_null,
 158                                       RegionNode* region, int null_path,
 159                                       int offset);
 160   Node* load_klass_from_mirror(Node* mirror, bool never_see_null,
 161                                RegionNode* region, int null_path) {
 162     int offset = java_lang_Class::klass_offset_in_bytes();
 163     return load_klass_from_mirror_common(mirror, never_see_null,
 164                                          region, null_path,
 165                                          offset);
 166   }
 167   Node* load_array_klass_from_mirror(Node* mirror, bool never_see_null,
 168                                      RegionNode* region, int null_path) {
 169     int offset = java_lang_Class::array_klass_offset_in_bytes();
 170     return load_klass_from_mirror_common(mirror, never_see_null,
 171                                          region, null_path,
 172                                          offset);
 173   }
 174   Node* generate_access_flags_guard(Node* kls,


 187   Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
 188     return generate_array_guard_common(kls, region, true, true);
 189   }
 190   Node* generate_array_guard_common(Node* kls, RegionNode* region,
 191                                     bool obj_array, bool not_array);
 192   Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
 193   CallJavaNode* generate_method_call(vmIntrinsics::ID method_id,
 194                                      bool is_virtual = false, bool is_static = false);
 195   CallJavaNode* generate_method_call_static(vmIntrinsics::ID method_id) {
 196     return generate_method_call(method_id, false, true);
 197   }
 198   CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
 199     return generate_method_call(method_id, true, false);
 200   }
 201   Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 202 
 203   Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae);
 204   bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae);
 205   bool inline_string_indexOf(StrIntrinsicNode::ArgEnc ae);
 206   bool inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae);


 207   bool inline_string_indexOfChar();
 208   bool inline_string_equals(StrIntrinsicNode::ArgEnc ae);
 209   bool inline_string_toBytesU();
 210   bool inline_string_getCharsU();
 211   bool inline_string_copy(bool compress);
 212   bool inline_string_char_access(bool is_store);
 213   Node* round_double_node(Node* n);
 214   bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
 215   bool inline_math_native(vmIntrinsics::ID id);
 216   bool inline_trig(vmIntrinsics::ID id);
 217   bool inline_math(vmIntrinsics::ID id);
 218   template <typename OverflowOp>
 219   bool inline_math_overflow(Node* arg1, Node* arg2);
 220   void inline_math_mathExact(Node* math, Node* test);
 221   bool inline_math_addExactI(bool is_increment);
 222   bool inline_math_addExactL(bool is_increment);
 223   bool inline_math_multiplyExactI();
 224   bool inline_math_multiplyExactL();
 225   bool inline_math_negateExactI();
 226   bool inline_math_negateExactL();


 880 // check they generally use Plan B instead of Plan A.
 881 // For the moment we use Plan A.
 882 inline Node* LibraryCallKit::generate_limit_guard(Node* offset,
 883                                                   Node* subseq_length,
 884                                                   Node* array_length,
 885                                                   RegionNode* region) {
 886   if (stopped())
 887     return NULL;                // already stopped
 888   bool zero_offset = _gvn.type(offset) == TypeInt::ZERO;
 889   if (zero_offset && subseq_length->eqv_uncast(array_length))
 890     return NULL;                // common case of whole-array copy
 891   Node* last = subseq_length;
 892   if (!zero_offset)             // last += offset
 893     last = _gvn.transform(new AddINode(last, offset));
 894   Node* cmp_lt = _gvn.transform(new CmpUNode(array_length, last));
 895   Node* bol_lt = _gvn.transform(new BoolNode(cmp_lt, BoolTest::lt));
 896   Node* is_over = generate_guard(bol_lt, region, PROB_MIN);
 897   return is_over;
 898 }
 899 

























 900 
 901 //--------------------------generate_current_thread--------------------
 902 Node* LibraryCallKit::generate_current_thread(Node* &tls_output) {
 903   ciKlass*    thread_klass = env()->Thread_klass();
 904   const Type* thread_type  = TypeOopPtr::make_from_klass(thread_klass)->cast_to_ptr_type(TypePtr::NotNull);
 905   Node* thread = _gvn.transform(new ThreadLocalNode());
 906   Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::threadObj_offset()));
 907   Node* threadObj = make_load(NULL, p, thread_type, T_OBJECT, MemNode::unordered);
 908   tls_output = thread;
 909   return threadObj;
 910 }
 911 
 912 
 913 //------------------------------make_string_method_node------------------------
 914 // Helper method for String intrinsic functions. This version is called with
 915 // str1 and str2 pointing to byte[] nodes containing Latin1 or UTF16 encoded
 916 // characters (depending on 'is_byte'). cnt1 and cnt2 are pointing to Int nodes
 917 // containing the lengths of str1 and str2.
 918 Node* LibraryCallKit::make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae) {
 919   Node* result = NULL;


 999   set_control(_gvn.transform(region));
1000   record_for_igvn(region);
1001 
1002   set_result(_gvn.transform(phi));
1003   return true;
1004 }
1005 
1006 //------------------------------inline_array_equals----------------------------
1007 bool LibraryCallKit::inline_array_equals(StrIntrinsicNode::ArgEnc ae) {
1008   assert(ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode::LL, "unsupported array types");
1009   Node* arg1 = argument(0);
1010   Node* arg2 = argument(1);
1011 
1012   const TypeAryPtr* mtype = (ae == StrIntrinsicNode::UU) ? TypeAryPtr::CHARS : TypeAryPtr::BYTES;
1013   set_result(_gvn.transform(new AryEqNode(control(), memory(mtype), arg1, arg2, ae)));
1014   return true;
1015 }
1016 
1017 //------------------------------inline_hasNegatives------------------------------
1018 bool LibraryCallKit::inline_hasNegatives() {
1019   if (too_many_traps(Deoptimization::Reason_intrinsic))  return false;


1020 
1021   assert(callee()->signature()->size() == 3, "hasNegatives has 3 parameters");
1022   // no receiver since it is static method
1023   Node* ba         = argument(0);
1024   Node* offset     = argument(1);
1025   Node* len        = argument(2);
1026 
1027   RegionNode* bailout = new RegionNode(1);
1028   record_for_igvn(bailout);
1029 
1030   // offset must not be negative.
1031   generate_negative_guard(offset, bailout);
1032 
1033   // offset + length must not exceed length of ba.
1034   generate_limit_guard(offset, len, load_array_length(ba), bailout);
1035 
1036   if (bailout->req() > 1) {
1037     PreserveJVMState pjvms(this);
1038     set_control(_gvn.transform(bailout));
1039     uncommon_trap(Deoptimization::Reason_intrinsic,
1040                   Deoptimization::Action_maybe_recompile);
1041   }
1042   if (!stopped()) {
1043     Node* ba_start = array_element_address(ba, offset, T_BYTE);
1044     Node* result = new HasNegativesNode(control(), memory(TypeAryPtr::BYTES), ba_start, len);
1045     set_result(_gvn.transform(result));
1046   }
1047   return true;
1048 }
1049 
1050 bool LibraryCallKit::inline_objects_checkIndex() {
1051   Node* index = argument(0);
1052   Node* length = argument(1);
1053   if (too_many_traps(Deoptimization::Reason_intrinsic) || too_many_traps(Deoptimization::Reason_range_check)) {
1054     return false;
1055   }
1056 
1057   Node* len_pos_cmp = _gvn.transform(new CmpINode(length, intcon(0)));
1058   Node* len_pos_bol = _gvn.transform(new BoolNode(len_pos_cmp, BoolTest::ge));
1059 
1060   {
1061     BuildCutout unless(this, len_pos_bol, PROB_MAX);
1062     uncommon_trap(Deoptimization::Reason_intrinsic,
1063                   Deoptimization::Action_make_not_entrant);
1064   }
1065 
1066   if (stopped()) {


1107   RegionNode* result_rgn = new RegionNode(4);
1108   Node*       result_phi = new PhiNode(result_rgn, TypeInt::INT);
1109 
1110   // Get start addr and length of source string
1111   Node* src_start = array_element_address(src, intcon(0), T_BYTE);
1112   Node* src_count = load_array_length(src);
1113 
1114   // Get start addr and length of substring
1115   Node* tgt_start = array_element_address(tgt, intcon(0), T_BYTE);
1116   Node* tgt_count = load_array_length(tgt);
1117 
1118   if (ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode::UL) {
1119     // Divide src size by 2 if String is UTF16 encoded
1120     src_count = _gvn.transform(new RShiftINode(src_count, intcon(1)));
1121   }
1122   if (ae == StrIntrinsicNode::UU) {
1123     // Divide substring size by 2 if String is UTF16 encoded
1124     tgt_count = _gvn.transform(new RShiftINode(tgt_count, intcon(1)));
1125   }
1126 
1127   // Check for substr count > string count
1128   Node* cmp = _gvn.transform(new CmpINode(tgt_count, src_count));
1129   Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::gt));
1130   Node* if_gt = generate_slow_guard(bol, NULL);
1131   if (if_gt != NULL) {
1132     result_phi->init_req(2, intcon(-1));
1133     result_rgn->init_req(2, if_gt);
1134   }
1135 
1136   if (!stopped()) {
1137     // Check for substr count == 0
1138     cmp = _gvn.transform(new CmpINode(tgt_count, intcon(0)));
1139     bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
1140     Node* if_zero = generate_slow_guard(bol, NULL);
1141     if (if_zero != NULL) {
1142       result_phi->init_req(3, intcon(0));
1143       result_rgn->init_req(3, if_zero);
1144     }
1145   }
1146 
1147   if (!stopped()) {
1148     Node* result = make_string_method_node(Op_StrIndexOf, src_start, src_count, tgt_start, tgt_count, ae);
1149     result_phi->init_req(1, result);
1150     result_rgn->init_req(1, control());
1151   }
1152   set_control(_gvn.transform(result_rgn));
1153   record_for_igvn(result_rgn);
1154   set_result(_gvn.transform(result_phi));
1155 
1156   return true;
1157 }
1158 
1159 //-----------------------------inline_string_indexOf-----------------------
1160 bool LibraryCallKit::inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae) {



1161   if (!Matcher::has_match_rule(Op_StrIndexOf) || !UseSSE42Intrinsics) {
1162     return false;
1163   }
1164   assert(callee()->signature()->size() == 5, "String.indexOf() has 5 arguments");
1165   Node* src         = argument(0); // byte[]
1166   Node* src_count   = argument(1);
1167   Node* tgt         = argument(2); // byte[]
1168   Node* tgt_count   = argument(3);
1169   Node* from_index  = argument(4);
1170 
1171   // Java code which calls this method has range checks for from_index value.
1172   src_count = _gvn.transform(new SubINode(src_count, from_index));
1173 
1174   // Multiply byte array index by 2 if String is UTF16 encoded
1175   Node* src_offset = (ae == StrIntrinsicNode::LL) ? from_index : _gvn.transform(new LShiftINode(from_index, intcon(1)));

1176   Node* src_start = array_element_address(src, src_offset, T_BYTE);
1177   Node* tgt_start = array_element_address(tgt, intcon(0), T_BYTE);
1178 
1179   Node* result = make_string_method_node(Op_StrIndexOf, src_start, src_count, tgt_start, tgt_count, ae);





1180 
1181   // The result is index relative to from_index if substring was found, -1 otherwise.
1182   // Generate code which will fold into cmove.
1183   RegionNode* region = new RegionNode(3);
1184   Node* phi = new PhiNode(region, TypeInt::INT);
1185 




1186   Node* cmp = _gvn.transform(new CmpINode(result, intcon(0)));
1187   Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::lt));
1188 
1189   Node* if_lt = generate_slow_guard(bol, NULL);
1190   if (if_lt != NULL) {
1191     // result == -1
1192     phi->init_req(2, result);
1193     region->init_req(2, if_lt);
1194   }
1195   if (!stopped()) {
1196     result = _gvn.transform(new AddINode(result, from_index));
1197     phi->init_req(1, result);
1198     region->init_req(1, control());

1199   }
1200 
1201   set_control(_gvn.transform(region));
1202   record_for_igvn(region);
1203   set_result(_gvn.transform(phi));
1204 
1205   return true;
1206 }
1207 



























1208 //-----------------------------inline_string_indexOfChar-----------------------
1209 bool LibraryCallKit::inline_string_indexOfChar() {



1210   if (!Matcher::has_match_rule(Op_StrIndexOfChar) || !(UseSSE > 4)) {
1211     return false;
1212   }
1213   assert(callee()->signature()->size() == 4, "String.indexOfChar() has 4 arguments");
1214   Node* src         = argument(0); // byte[]
1215   Node* tgt         = argument(1); // tgt is int ch
1216   Node* from_index  = argument(2);
1217   Node* max         = argument(3);
1218 
1219   Node* src_offset = _gvn.transform(new LShiftINode(from_index, intcon(1)));
1220   Node* src_start = array_element_address(src, src_offset, T_BYTE);
1221 
1222   Node* src_count = _gvn.transform(new SubINode(max, from_index));
1223 






1224   RegionNode* region = new RegionNode(3);
1225   Node* phi = new PhiNode(region, TypeInt::INT);
1226 
1227   Node* result = new StrIndexOfCharNode(control(), memory(TypeAryPtr::BYTES), src_start, src_count, tgt, StrIntrinsicNode::none);
1228   C->set_has_split_ifs(true); // Has chance for split-if optimization
1229   _gvn.transform(result);
1230 
1231   Node* cmp = _gvn.transform(new CmpINode(result, intcon(0)));
1232   Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::lt));
1233 
1234   Node* if_lt = generate_slow_guard(bol, NULL);
1235   if (if_lt != NULL) {
1236     // result == -1
1237     phi->init_req(2, result);
1238     region->init_req(2, if_lt);
1239   }
1240   if (!stopped()) {
1241     result = _gvn.transform(new AddINode(result, from_index));
1242     phi->init_req(1, result);
1243     region->init_req(1, control());
1244   }
1245   set_control(_gvn.transform(region));
1246   record_for_igvn(region);
1247   set_result(_gvn.transform(phi));
1248 
1249   return true;
1250 }
1251 //---------------------------inline_string_copy---------------------
1252 // compressIt == true --> generate a compressed copy operation (compress char[]/byte[] to byte[])
1253 //   int StringUTF16.compress(char[] src, int srcOff, byte[] dst, int dstOff, int len)
1254 //   int StringUTF16.compress(byte[] src, int srcOff, byte[] dst, int dstOff, int len)
1255 // compressIt == false --> generate an inflated copy operation (inflate byte[] to char[]/byte[])
1256 //   void StringLatin1.inflate(byte[] src, int srcOff, char[] dst, int dstOff, int len)
1257 //   void StringLatin1.inflate(byte[] src, int srcOff, byte[] dst, int dstOff, int len)
1258 bool LibraryCallKit::inline_string_copy(bool compress) {



1259   int nargs = 5;  // 2 oops, 3 ints
1260   assert(callee()->signature()->size() == nargs, "string copy has 5 arguments");
1261 
1262   Node* src         = argument(0);
1263   Node* src_offset  = argument(1);
1264   Node* dst         = argument(2);
1265   Node* dst_offset  = argument(3);
1266   Node* length      = argument(4);
1267 
1268   // Check for allocation before we add nodes that would confuse
1269   // tightly_coupled_allocation()
1270   AllocateArrayNode* alloc = tightly_coupled_allocation(dst, NULL);
1271 
1272   // Figure out the size and type of the elements we will be copying.
1273   const Type* src_type = src->Value(&_gvn);
1274   const Type* dst_type = dst->Value(&_gvn);
1275   BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1276   BasicType dst_elem = dst_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1277   assert((compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) ||
1278          (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR)),
1279          "Unsupported array types for inline_string_copy");
1280 







1281   // Convert char[] offsets to byte[] offsets
1282   if (compress && src_elem == T_BYTE) {
1283     src_offset = _gvn.transform(new LShiftINode(src_offset, intcon(1)));
1284   } else if (!compress && dst_elem == T_BYTE) {
1285     dst_offset = _gvn.transform(new LShiftINode(dst_offset, intcon(1)));
1286   }
1287 
1288   Node* src_start = array_element_address(src, src_offset, src_elem);
1289   Node* dst_start = array_element_address(dst, dst_offset, dst_elem);
1290   // 'src_start' points to src array + scaled offset
1291   // 'dst_start' points to dst array + scaled offset
1292   Node* count = NULL;
1293   if (compress) {
1294     count = compress_string(src_start, dst_start, length);
1295   } else {
1296     inflate_string(src_start, dst_start, length);
1297   }
1298 
1299   if (alloc != NULL) {
1300     if (alloc->maybe_set_complete(&_gvn)) {


1312     // escape analysis can go from the MemBarStoreStoreNode to the
1313     // AllocateNode and eliminate the MemBarStoreStoreNode if possible
1314     // based on the escape status of the AllocateNode.
1315     insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
1316   }
1317   if (compress) {
1318     set_result(_gvn.transform(count));
1319   }
1320   return true;
1321 }
1322 
1323 #ifdef _LP64
1324 #define XTOP ,top() /*additional argument*/
1325 #else  //_LP64
1326 #define XTOP        /*no additional argument*/
1327 #endif //_LP64
1328 
1329 //------------------------inline_string_toBytesU--------------------------
1330 // public static byte[] StringUTF16.toBytes(char[] value, int off, int len)
1331 bool LibraryCallKit::inline_string_toBytesU() {



1332   // Get the arguments.
1333   Node* value     = argument(0);
1334   Node* offset    = argument(1);
1335   Node* length    = argument(2);
1336 
1337   Node* newcopy = NULL;
1338 
1339   // Set the original stack and the reexecute bit for the interpreter to reexecute
1340   // the bytecode that invokes StringUTF16.toBytes() if deoptimization happens.
1341   { PreserveReexecuteState preexecs(this);
1342     jvms()->set_should_reexecute(true);
1343 
1344     // Check if a null path was taken unconditionally.
1345     value = null_check(value);
1346 
1347     RegionNode* bailout = new RegionNode(1);
1348     record_for_igvn(bailout);
1349 
1350     // Make sure that resulting byte[] length does not overflow Integer.MAX_VALUE

1351     generate_negative_guard(length, bailout);


1352     generate_limit_guard(length, intcon(0), intcon(max_jint/2), bailout);
1353 
1354     if (bailout->req() > 1) {
1355       PreserveJVMState pjvms(this);
1356       set_control(_gvn.transform(bailout));
1357       uncommon_trap(Deoptimization::Reason_intrinsic,
1358                     Deoptimization::Action_maybe_recompile);
1359     }
1360     if (stopped()) return true;
1361 
1362     // Range checks are done by caller.
1363 
1364     Node* size = _gvn.transform(new LShiftINode(length, intcon(1)));
1365     Node* klass_node = makecon(TypeKlassPtr::make(ciTypeArrayKlass::make(T_BYTE)));
1366     newcopy = new_array(klass_node, size, 0);  // no arguments to push
1367     AllocateArrayNode* alloc = tightly_coupled_allocation(newcopy, NULL);
1368 
1369     // Calculate starting addresses.
1370     Node* src_start = array_element_address(value, offset, T_CHAR);
1371     Node* dst_start = basic_plus_adr(newcopy, arrayOopDesc::base_offset_in_bytes(T_BYTE));
1372 
1373     // Check if src array address is aligned to HeapWordSize (dst is always aligned)
1374     const TypeInt* toffset = gvn().type(offset)->is_int();
1375     bool aligned = toffset->is_con() && ((toffset->get_con() * type2aelembytes(T_CHAR)) % HeapWordSize == 0);
1376 
1377     // Figure out which arraycopy runtime method to call (disjoint, uninitialized).
1378     const char* copyfunc_name = "arraycopy";
1379     address     copyfunc_addr = StubRoutines::select_arraycopy_function(T_CHAR, aligned, true, copyfunc_name, true);
1380     Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
1381                       OptoRuntime::fast_arraycopy_Type(),
1382                       copyfunc_addr, copyfunc_name, TypeRawPtr::BOTTOM,


1395       // a subsequent store that would make this object accessible by
1396       // other threads.
1397       // Record what AllocateNode this StoreStore protects so that
1398       // escape analysis can go from the MemBarStoreStoreNode to the
1399       // AllocateNode and eliminate the MemBarStoreStoreNode if possible
1400       // based on the escape status of the AllocateNode.
1401       insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
1402     } else {
1403       insert_mem_bar(Op_MemBarCPUOrder);
1404     }
1405   } // original reexecute is set back here
1406 
1407   C->set_has_split_ifs(true); // Has chance for split-if optimization
1408   if (!stopped()) {
1409     set_result(newcopy);
1410   }
1411   return true;
1412 }
1413 
1414 //------------------------inline_string_getCharsU--------------------------
1415 // public void StringUTF16.getChars(byte[] value, int srcBegin, int srcEnd, char dst[], int dstBegin)
1416 bool LibraryCallKit::inline_string_getCharsU() {
1417   if (too_many_traps(Deoptimization::Reason_intrinsic))  return false;


1418 
1419   // Get the arguments.
1420   Node* value     = argument(0);
1421   Node* src_begin = argument(1);
1422   Node* src_end   = argument(2); // exclusive offset (i < src_end)
1423   Node* dst       = argument(3);
1424   Node* dst_begin = argument(4);
1425 
1426   // Check for allocation before we add nodes that would confuse
1427   // tightly_coupled_allocation()
1428   AllocateArrayNode* alloc = tightly_coupled_allocation(dst, NULL);
1429 
1430   // Check if a null path was taken unconditionally.
1431   value = null_check(value);
1432   dst = null_check(dst);
1433   if (stopped()) {
1434     return true;
1435   }
1436 
1437   // Range checks are done by caller.
1438 
1439   // Get length and convert char[] offset to byte[] offset
1440   Node* length = _gvn.transform(new SubINode(src_end, src_begin));
1441   src_begin = _gvn.transform(new LShiftINode(src_begin, intcon(1)));
1442 







1443   if (!stopped()) {
1444     // Calculate starting addresses.
1445     Node* src_start = array_element_address(value, src_begin, T_BYTE);
1446     Node* dst_start = array_element_address(dst, dst_begin, T_CHAR);
1447 
1448     // Check if array addresses are aligned to HeapWordSize
1449     const TypeInt* tsrc = gvn().type(src_begin)->is_int();
1450     const TypeInt* tdst = gvn().type(dst_begin)->is_int();
1451     bool aligned = tsrc->is_con() && ((tsrc->get_con() * type2aelembytes(T_BYTE)) % HeapWordSize == 0) &&
1452                    tdst->is_con() && ((tdst->get_con() * type2aelembytes(T_CHAR)) % HeapWordSize == 0);
1453 
1454     // Figure out which arraycopy runtime method to call (disjoint, uninitialized).
1455     const char* copyfunc_name = "arraycopy";
1456     address     copyfunc_addr = StubRoutines::select_arraycopy_function(T_CHAR, aligned, true, copyfunc_name, true);
1457     Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
1458                       OptoRuntime::fast_arraycopy_Type(),
1459                       copyfunc_addr, copyfunc_name, TypeRawPtr::BOTTOM,
1460                       src_start, dst_start, ConvI2X(length) XTOP);
1461     // Do not let reads from the cloned object float above the arraycopy.
1462     if (alloc != NULL) {
1463       if (alloc->maybe_set_complete(&_gvn)) {
1464         // "You break it, you buy it."
1465         InitializeNode* init = alloc->initialization();




 135   void fatal_unexpected_iid(vmIntrinsics::ID iid) {
 136     fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid));
 137   }
 138 
 139   void  set_result(Node* n) { assert(_result == NULL, "only set once"); _result = n; }
 140   void  set_result(RegionNode* region, PhiNode* value);
 141   Node*     result() { return _result; }
 142 
 143   virtual int reexecute_sp() { return _reexecute_sp; }
 144 
 145   // Helper functions to inline natives
 146   Node* generate_guard(Node* test, RegionNode* region, float true_prob);
 147   Node* generate_slow_guard(Node* test, RegionNode* region);
 148   Node* generate_fair_guard(Node* test, RegionNode* region);
 149   Node* generate_negative_guard(Node* index, RegionNode* region,
 150                                 // resulting CastII of index:
 151                                 Node* *pos_index = NULL);
 152   Node* generate_limit_guard(Node* offset, Node* subseq_length,
 153                              Node* array_length,
 154                              RegionNode* region);
 155   void  generate_string_range_check(Node* array, Node* offset,
 156                                     Node* length, bool char_count);
 157   Node* generate_current_thread(Node* &tls_output);
 158   Node* load_mirror_from_klass(Node* klass);
 159   Node* load_klass_from_mirror_common(Node* mirror, bool never_see_null,
 160                                       RegionNode* region, int null_path,
 161                                       int offset);
 162   Node* load_klass_from_mirror(Node* mirror, bool never_see_null,
 163                                RegionNode* region, int null_path) {
 164     int offset = java_lang_Class::klass_offset_in_bytes();
 165     return load_klass_from_mirror_common(mirror, never_see_null,
 166                                          region, null_path,
 167                                          offset);
 168   }
 169   Node* load_array_klass_from_mirror(Node* mirror, bool never_see_null,
 170                                      RegionNode* region, int null_path) {
 171     int offset = java_lang_Class::array_klass_offset_in_bytes();
 172     return load_klass_from_mirror_common(mirror, never_see_null,
 173                                          region, null_path,
 174                                          offset);
 175   }
 176   Node* generate_access_flags_guard(Node* kls,


 189   Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
 190     return generate_array_guard_common(kls, region, true, true);
 191   }
 192   Node* generate_array_guard_common(Node* kls, RegionNode* region,
 193                                     bool obj_array, bool not_array);
 194   Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
 195   CallJavaNode* generate_method_call(vmIntrinsics::ID method_id,
 196                                      bool is_virtual = false, bool is_static = false);
 197   CallJavaNode* generate_method_call_static(vmIntrinsics::ID method_id) {
 198     return generate_method_call(method_id, false, true);
 199   }
 200   CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
 201     return generate_method_call(method_id, true, false);
 202   }
 203   Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 204 
 205   Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae);
 206   bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae);
 207   bool inline_string_indexOf(StrIntrinsicNode::ArgEnc ae);
 208   bool inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae);
 209   Node* make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count,
 210                           RegionNode* region, Node* phi, StrIntrinsicNode::ArgEnc ae);
 211   bool inline_string_indexOfChar();
 212   bool inline_string_equals(StrIntrinsicNode::ArgEnc ae);
 213   bool inline_string_toBytesU();
 214   bool inline_string_getCharsU();
 215   bool inline_string_copy(bool compress);
 216   bool inline_string_char_access(bool is_store);
 217   Node* round_double_node(Node* n);
 218   bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
 219   bool inline_math_native(vmIntrinsics::ID id);
 220   bool inline_trig(vmIntrinsics::ID id);
 221   bool inline_math(vmIntrinsics::ID id);
 222   template <typename OverflowOp>
 223   bool inline_math_overflow(Node* arg1, Node* arg2);
 224   void inline_math_mathExact(Node* math, Node* test);
 225   bool inline_math_addExactI(bool is_increment);
 226   bool inline_math_addExactL(bool is_increment);
 227   bool inline_math_multiplyExactI();
 228   bool inline_math_multiplyExactL();
 229   bool inline_math_negateExactI();
 230   bool inline_math_negateExactL();


 884 // check they generally use Plan B instead of Plan A.
 885 // For the moment we use Plan A.
 886 inline Node* LibraryCallKit::generate_limit_guard(Node* offset,
 887                                                   Node* subseq_length,
 888                                                   Node* array_length,
 889                                                   RegionNode* region) {
 890   if (stopped())
 891     return NULL;                // already stopped
 892   bool zero_offset = _gvn.type(offset) == TypeInt::ZERO;
 893   if (zero_offset && subseq_length->eqv_uncast(array_length))
 894     return NULL;                // common case of whole-array copy
 895   Node* last = subseq_length;
 896   if (!zero_offset)             // last += offset
 897     last = _gvn.transform(new AddINode(last, offset));
 898   Node* cmp_lt = _gvn.transform(new CmpUNode(array_length, last));
 899   Node* bol_lt = _gvn.transform(new BoolNode(cmp_lt, BoolTest::lt));
 900   Node* is_over = generate_guard(bol_lt, region, PROB_MIN);
 901   return is_over;
 902 }
 903 
 904 // Emit range checks for the given String.value byte array
 905 void LibraryCallKit::generate_string_range_check(Node* array, Node* offset, Node* count, bool char_count) {
 906   if (stopped()) {
 907     return; // already stopped
 908   }
 909   RegionNode* bailout = new RegionNode(1);
 910   record_for_igvn(bailout);
 911   if (char_count) {
 912     // Convert char count to byte count
 913     count = _gvn.transform(new LShiftINode(count, intcon(1)));
 914   }
 915 
 916   // Offset and count must not be negative
 917   generate_negative_guard(offset, bailout);
 918   generate_negative_guard(count, bailout);
 919   // Offset + count must not exceed length of array
 920   generate_limit_guard(offset, count, load_array_length(array), bailout);
 921 
 922   if (bailout->req() > 1) {
 923     PreserveJVMState pjvms(this);
 924     set_control(_gvn.transform(bailout));
 925     uncommon_trap(Deoptimization::Reason_intrinsic,
 926                   Deoptimization::Action_maybe_recompile);
 927   }
 928 }
 929 
 930 //--------------------------generate_current_thread--------------------
 931 Node* LibraryCallKit::generate_current_thread(Node* &tls_output) {
 932   ciKlass*    thread_klass = env()->Thread_klass();
 933   const Type* thread_type  = TypeOopPtr::make_from_klass(thread_klass)->cast_to_ptr_type(TypePtr::NotNull);
 934   Node* thread = _gvn.transform(new ThreadLocalNode());
 935   Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::threadObj_offset()));
 936   Node* threadObj = make_load(NULL, p, thread_type, T_OBJECT, MemNode::unordered);
 937   tls_output = thread;
 938   return threadObj;
 939 }
 940 
 941 
 942 //------------------------------make_string_method_node------------------------
 943 // Helper method for String intrinsic functions. This version is called with
 944 // str1 and str2 pointing to byte[] nodes containing Latin1 or UTF16 encoded
 945 // characters (depending on 'is_byte'). cnt1 and cnt2 are pointing to Int nodes
 946 // containing the lengths of str1 and str2.
 947 Node* LibraryCallKit::make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae) {
 948   Node* result = NULL;


1028   set_control(_gvn.transform(region));
1029   record_for_igvn(region);
1030 
1031   set_result(_gvn.transform(phi));
1032   return true;
1033 }
1034 
1035 //------------------------------inline_array_equals----------------------------
1036 bool LibraryCallKit::inline_array_equals(StrIntrinsicNode::ArgEnc ae) {
1037   assert(ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode::LL, "unsupported array types");
1038   Node* arg1 = argument(0);
1039   Node* arg2 = argument(1);
1040 
1041   const TypeAryPtr* mtype = (ae == StrIntrinsicNode::UU) ? TypeAryPtr::CHARS : TypeAryPtr::BYTES;
1042   set_result(_gvn.transform(new AryEqNode(control(), memory(mtype), arg1, arg2, ae)));
1043   return true;
1044 }
1045 
1046 //------------------------------inline_hasNegatives------------------------------
1047 bool LibraryCallKit::inline_hasNegatives() {
1048   if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1049     return false;
1050   }
1051 
1052   assert(callee()->signature()->size() == 3, "hasNegatives has 3 parameters");
1053   // no receiver since it is static method
1054   Node* ba         = argument(0);
1055   Node* offset     = argument(1);
1056   Node* len        = argument(2);
1057 
1058   // Range checks
1059   generate_string_range_check(ba, offset, len, false);
1060   if (stopped()) {
1061     return true;










1062   }

1063   Node* ba_start = array_element_address(ba, offset, T_BYTE);
1064   Node* result = new HasNegativesNode(control(), memory(TypeAryPtr::BYTES), ba_start, len);
1065   set_result(_gvn.transform(result));

1066   return true;
1067 }
1068 
1069 bool LibraryCallKit::inline_objects_checkIndex() {
1070   Node* index = argument(0);
1071   Node* length = argument(1);
1072   if (too_many_traps(Deoptimization::Reason_intrinsic) || too_many_traps(Deoptimization::Reason_range_check)) {
1073     return false;
1074   }
1075 
1076   Node* len_pos_cmp = _gvn.transform(new CmpINode(length, intcon(0)));
1077   Node* len_pos_bol = _gvn.transform(new BoolNode(len_pos_cmp, BoolTest::ge));
1078 
1079   {
1080     BuildCutout unless(this, len_pos_bol, PROB_MAX);
1081     uncommon_trap(Deoptimization::Reason_intrinsic,
1082                   Deoptimization::Action_make_not_entrant);
1083   }
1084 
1085   if (stopped()) {


1126   RegionNode* result_rgn = new RegionNode(4);
1127   Node*       result_phi = new PhiNode(result_rgn, TypeInt::INT);
1128 
1129   // Get start addr and length of source string
1130   Node* src_start = array_element_address(src, intcon(0), T_BYTE);
1131   Node* src_count = load_array_length(src);
1132 
1133   // Get start addr and length of substring
1134   Node* tgt_start = array_element_address(tgt, intcon(0), T_BYTE);
1135   Node* tgt_count = load_array_length(tgt);
1136 
1137   if (ae == StrIntrinsicNode::UU || ae == StrIntrinsicNode::UL) {
1138     // Divide src size by 2 if String is UTF16 encoded
1139     src_count = _gvn.transform(new RShiftINode(src_count, intcon(1)));
1140   }
1141   if (ae == StrIntrinsicNode::UU) {
1142     // Divide substring size by 2 if String is UTF16 encoded
1143     tgt_count = _gvn.transform(new RShiftINode(tgt_count, intcon(1)));
1144   }
1145 
1146   Node* result = make_indexOf_node(src_start, src_count, tgt_start, tgt_count, result_rgn, result_phi, ae);
1147   if (result != NULL) {
1148     result_phi->init_req(3, result);
1149     result_rgn->init_req(3, control());




















1150   }
1151   set_control(_gvn.transform(result_rgn));
1152   record_for_igvn(result_rgn);
1153   set_result(_gvn.transform(result_phi));
1154 
1155   return true;
1156 }
1157 
1158 //-----------------------------inline_string_indexOf-----------------------
1159 bool LibraryCallKit::inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae) {
1160   if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1161     return false;
1162   }
1163   if (!Matcher::has_match_rule(Op_StrIndexOf) || !UseSSE42Intrinsics) {
1164     return false;
1165   }
1166   assert(callee()->signature()->size() == 5, "String.indexOf() has 5 arguments");
1167   Node* src         = argument(0); // byte[]
1168   Node* src_count   = argument(1); // char count
1169   Node* tgt         = argument(2); // byte[]
1170   Node* tgt_count   = argument(3); // char count
1171   Node* from_index  = argument(4); // char index



1172 
1173   // Multiply byte array index by 2 if String is UTF16 encoded
1174   Node* src_offset = (ae == StrIntrinsicNode::LL) ? from_index : _gvn.transform(new LShiftINode(from_index, intcon(1)));
1175   src_count = _gvn.transform(new SubINode(src_count, from_index));
1176   Node* src_start = array_element_address(src, src_offset, T_BYTE);
1177   Node* tgt_start = array_element_address(tgt, intcon(0), T_BYTE);
1178 
1179   // Range checks
1180   generate_string_range_check(src, src_offset, src_count, ae != StrIntrinsicNode::LL);
1181   generate_string_range_check(tgt, intcon(0), tgt_count, ae == StrIntrinsicNode::UU);
1182   if (stopped()) {
1183     return true;
1184   }
1185 
1186   RegionNode* region = new RegionNode(5);


1187   Node* phi = new PhiNode(region, TypeInt::INT);
1188 
1189   Node* result = make_indexOf_node(src_start, src_count, tgt_start, tgt_count, region, phi, ae);
1190   if (result != NULL) {
1191     // The result is index relative to from_index if substring was found, -1 otherwise.
1192     // Generate code which will fold into cmove.
1193     Node* cmp = _gvn.transform(new CmpINode(result, intcon(0)));
1194     Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::lt));
1195 
1196     Node* if_lt = generate_slow_guard(bol, NULL);
1197     if (if_lt != NULL) {
1198       // result == -1
1199       phi->init_req(3, result);
1200       region->init_req(3, if_lt);
1201     }
1202     if (!stopped()) {
1203       result = _gvn.transform(new AddINode(result, from_index));
1204       phi->init_req(4, result);
1205       region->init_req(4, control());
1206     }
1207   }
1208 
1209   set_control(_gvn.transform(region));
1210   record_for_igvn(region);
1211   set_result(_gvn.transform(phi));
1212 
1213   return true;
1214 }
1215 
1216 // Create StrIndexOfNode with fast path checks
1217 Node* LibraryCallKit::make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count,
1218                                         RegionNode* region, Node* phi, StrIntrinsicNode::ArgEnc ae) {
1219   // Check for substr count > string count
1220   Node* cmp = _gvn.transform(new CmpINode(tgt_count, src_count));
1221   Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::gt));
1222   Node* if_gt = generate_slow_guard(bol, NULL);
1223   if (if_gt != NULL) {
1224     phi->init_req(1, intcon(-1));
1225     region->init_req(1, if_gt);
1226   }
1227   if (!stopped()) {
1228     // Check for substr count == 0
1229     cmp = _gvn.transform(new CmpINode(tgt_count, intcon(0)));
1230     bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
1231     Node* if_zero = generate_slow_guard(bol, NULL);
1232     if (if_zero != NULL) {
1233       phi->init_req(2, intcon(0));
1234       region->init_req(2, if_zero);
1235     }
1236   }
1237   if (!stopped()) {
1238     return make_string_method_node(Op_StrIndexOf, src_start, src_count, tgt_start, tgt_count, ae);
1239   }
1240   return NULL;
1241 }
1242 
1243 //-----------------------------inline_string_indexOfChar-----------------------
1244 bool LibraryCallKit::inline_string_indexOfChar() {
1245   if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1246     return false;
1247   }
1248   if (!Matcher::has_match_rule(Op_StrIndexOfChar) || !(UseSSE > 4)) {
1249     return false;
1250   }
1251   assert(callee()->signature()->size() == 4, "String.indexOfChar() has 4 arguments");
1252   Node* src         = argument(0); // byte[]
1253   Node* tgt         = argument(1); // tgt is int ch
1254   Node* from_index  = argument(2);
1255   Node* max         = argument(3);
1256 
1257   Node* src_offset = _gvn.transform(new LShiftINode(from_index, intcon(1)));
1258   Node* src_start = array_element_address(src, src_offset, T_BYTE);

1259   Node* src_count = _gvn.transform(new SubINode(max, from_index));
1260 
1261   // Range checks
1262   generate_string_range_check(src, src_offset, src_count, true);
1263   if (stopped()) {
1264     return true;
1265   }
1266 
1267   RegionNode* region = new RegionNode(3);
1268   Node* phi = new PhiNode(region, TypeInt::INT);
1269 
1270   Node* result = new StrIndexOfCharNode(control(), memory(TypeAryPtr::BYTES), src_start, src_count, tgt, StrIntrinsicNode::none);
1271   C->set_has_split_ifs(true); // Has chance for split-if optimization
1272   _gvn.transform(result);
1273 
1274   Node* cmp = _gvn.transform(new CmpINode(result, intcon(0)));
1275   Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::lt));
1276 
1277   Node* if_lt = generate_slow_guard(bol, NULL);
1278   if (if_lt != NULL) {
1279     // result == -1
1280     phi->init_req(2, result);
1281     region->init_req(2, if_lt);
1282   }
1283   if (!stopped()) {
1284     result = _gvn.transform(new AddINode(result, from_index));
1285     phi->init_req(1, result);
1286     region->init_req(1, control());
1287   }
1288   set_control(_gvn.transform(region));
1289   record_for_igvn(region);
1290   set_result(_gvn.transform(phi));
1291 
1292   return true;
1293 }
1294 //---------------------------inline_string_copy---------------------
1295 // compressIt == true --> generate a compressed copy operation (compress char[]/byte[] to byte[])
1296 //   int StringUTF16.compress(char[] src, int srcOff, byte[] dst, int dstOff, int len)
1297 //   int StringUTF16.compress(byte[] src, int srcOff, byte[] dst, int dstOff, int len)
1298 // compressIt == false --> generate an inflated copy operation (inflate byte[] to char[]/byte[])
1299 //   void StringLatin1.inflate(byte[] src, int srcOff, char[] dst, int dstOff, int len)
1300 //   void StringLatin1.inflate(byte[] src, int srcOff, byte[] dst, int dstOff, int len)
1301 bool LibraryCallKit::inline_string_copy(bool compress) {
1302   if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1303     return false;
1304   }
1305   int nargs = 5;  // 2 oops, 3 ints
1306   assert(callee()->signature()->size() == nargs, "string copy has 5 arguments");
1307 
1308   Node* src         = argument(0);
1309   Node* src_offset  = argument(1);
1310   Node* dst         = argument(2);
1311   Node* dst_offset  = argument(3);
1312   Node* length      = argument(4);
1313 
1314   // Check for allocation before we add nodes that would confuse
1315   // tightly_coupled_allocation()
1316   AllocateArrayNode* alloc = tightly_coupled_allocation(dst, NULL);
1317 
1318   // Figure out the size and type of the elements we will be copying.
1319   const Type* src_type = src->Value(&_gvn);
1320   const Type* dst_type = dst->Value(&_gvn);
1321   BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1322   BasicType dst_elem = dst_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1323   assert((compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) ||
1324          (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR)),
1325          "Unsupported array types for inline_string_copy");
1326 
1327   // Range checks
1328   generate_string_range_check(src, src_offset, length, compress && src_elem == T_BYTE);
1329   generate_string_range_check(dst, dst_offset, length, !compress && dst_elem == T_BYTE);
1330   if (stopped()) {
1331     return true;
1332   }
1333 
1334   // Convert char[] offsets to byte[] offsets
1335   if (compress && src_elem == T_BYTE) {
1336     src_offset = _gvn.transform(new LShiftINode(src_offset, intcon(1)));
1337   } else if (!compress && dst_elem == T_BYTE) {
1338     dst_offset = _gvn.transform(new LShiftINode(dst_offset, intcon(1)));
1339   }
1340 
1341   Node* src_start = array_element_address(src, src_offset, src_elem);
1342   Node* dst_start = array_element_address(dst, dst_offset, dst_elem);
1343   // 'src_start' points to src array + scaled offset
1344   // 'dst_start' points to dst array + scaled offset
1345   Node* count = NULL;
1346   if (compress) {
1347     count = compress_string(src_start, dst_start, length);
1348   } else {
1349     inflate_string(src_start, dst_start, length);
1350   }
1351 
1352   if (alloc != NULL) {
1353     if (alloc->maybe_set_complete(&_gvn)) {


1365     // escape analysis can go from the MemBarStoreStoreNode to the
1366     // AllocateNode and eliminate the MemBarStoreStoreNode if possible
1367     // based on the escape status of the AllocateNode.
1368     insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
1369   }
1370   if (compress) {
1371     set_result(_gvn.transform(count));
1372   }
1373   return true;
1374 }
1375 
1376 #ifdef _LP64
1377 #define XTOP ,top() /*additional argument*/
1378 #else  //_LP64
1379 #define XTOP        /*no additional argument*/
1380 #endif //_LP64
1381 
1382 //------------------------inline_string_toBytesU--------------------------
1383 // public static byte[] StringUTF16.toBytes(char[] value, int off, int len)
1384 bool LibraryCallKit::inline_string_toBytesU() {
1385   if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1386     return false;
1387   }
1388   // Get the arguments.
1389   Node* value     = argument(0);
1390   Node* offset    = argument(1);
1391   Node* length    = argument(2);
1392 
1393   Node* newcopy = NULL;
1394 
1395   // Set the original stack and the reexecute bit for the interpreter to reexecute
1396   // the bytecode that invokes StringUTF16.toBytes() if deoptimization happens.
1397   { PreserveReexecuteState preexecs(this);
1398     jvms()->set_should_reexecute(true);
1399 
1400     // Check if a null path was taken unconditionally.
1401     value = null_check(value);
1402 
1403     RegionNode* bailout = new RegionNode(1);
1404     record_for_igvn(bailout);
1405 
1406     // Range checks
1407     generate_negative_guard(offset, bailout);
1408     generate_negative_guard(length, bailout);
1409     generate_limit_guard(offset, length, load_array_length(value), bailout);
1410     // Make sure that resulting byte[] length does not overflow Integer.MAX_VALUE
1411     generate_limit_guard(length, intcon(0), intcon(max_jint/2), bailout);
1412 
1413     if (bailout->req() > 1) {
1414       PreserveJVMState pjvms(this);
1415       set_control(_gvn.transform(bailout));
1416       uncommon_trap(Deoptimization::Reason_intrinsic,
1417                     Deoptimization::Action_maybe_recompile);
1418     }
1419     if (stopped()) {
1420       return true;
1421     }
1422 
1423     Node* size = _gvn.transform(new LShiftINode(length, intcon(1)));
1424     Node* klass_node = makecon(TypeKlassPtr::make(ciTypeArrayKlass::make(T_BYTE)));
1425     newcopy = new_array(klass_node, size, 0);  // no arguments to push
1426     AllocateArrayNode* alloc = tightly_coupled_allocation(newcopy, NULL);
1427 
1428     // Calculate starting addresses.
1429     Node* src_start = array_element_address(value, offset, T_CHAR);
1430     Node* dst_start = basic_plus_adr(newcopy, arrayOopDesc::base_offset_in_bytes(T_BYTE));
1431 
1432     // Check if src array address is aligned to HeapWordSize (dst is always aligned)
1433     const TypeInt* toffset = gvn().type(offset)->is_int();
1434     bool aligned = toffset->is_con() && ((toffset->get_con() * type2aelembytes(T_CHAR)) % HeapWordSize == 0);
1435 
1436     // Figure out which arraycopy runtime method to call (disjoint, uninitialized).
1437     const char* copyfunc_name = "arraycopy";
1438     address     copyfunc_addr = StubRoutines::select_arraycopy_function(T_CHAR, aligned, true, copyfunc_name, true);
1439     Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
1440                       OptoRuntime::fast_arraycopy_Type(),
1441                       copyfunc_addr, copyfunc_name, TypeRawPtr::BOTTOM,


1454       // a subsequent store that would make this object accessible by
1455       // other threads.
1456       // Record what AllocateNode this StoreStore protects so that
1457       // escape analysis can go from the MemBarStoreStoreNode to the
1458       // AllocateNode and eliminate the MemBarStoreStoreNode if possible
1459       // based on the escape status of the AllocateNode.
1460       insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
1461     } else {
1462       insert_mem_bar(Op_MemBarCPUOrder);
1463     }
1464   } // original reexecute is set back here
1465 
1466   C->set_has_split_ifs(true); // Has chance for split-if optimization
1467   if (!stopped()) {
1468     set_result(newcopy);
1469   }
1470   return true;
1471 }
1472 
1473 //------------------------inline_string_getCharsU--------------------------
1474 // public void StringUTF16.getChars(byte[] src, int srcBegin, int srcEnd, char dst[], int dstBegin)
1475 bool LibraryCallKit::inline_string_getCharsU() {
1476   if (too_many_traps(Deoptimization::Reason_intrinsic)) {
1477     return false;
1478   }
1479 
1480   // Get the arguments.
1481   Node* src       = argument(0);
1482   Node* src_begin = argument(1);
1483   Node* src_end   = argument(2); // exclusive offset (i < src_end)
1484   Node* dst       = argument(3);
1485   Node* dst_begin = argument(4);
1486 
1487   // Check for allocation before we add nodes that would confuse
1488   // tightly_coupled_allocation()
1489   AllocateArrayNode* alloc = tightly_coupled_allocation(dst, NULL);
1490 
1491   // Check if a null path was taken unconditionally.
1492   src = null_check(src);
1493   dst = null_check(dst);
1494   if (stopped()) {
1495     return true;
1496   }
1497 


1498   // Get length and convert char[] offset to byte[] offset
1499   Node* length = _gvn.transform(new SubINode(src_end, src_begin));
1500   src_begin = _gvn.transform(new LShiftINode(src_begin, intcon(1)));
1501 
1502   // Range checks
1503   generate_string_range_check(src, src_begin, length, true);
1504   generate_string_range_check(dst, dst_begin, length, false);
1505   if (stopped()) {
1506     return true;
1507   }
1508 
1509   if (!stopped()) {
1510     // Calculate starting addresses.
1511     Node* src_start = array_element_address(src, src_begin, T_BYTE);
1512     Node* dst_start = array_element_address(dst, dst_begin, T_CHAR);
1513 
1514     // Check if array addresses are aligned to HeapWordSize
1515     const TypeInt* tsrc = gvn().type(src_begin)->is_int();
1516     const TypeInt* tdst = gvn().type(dst_begin)->is_int();
1517     bool aligned = tsrc->is_con() && ((tsrc->get_con() * type2aelembytes(T_BYTE)) % HeapWordSize == 0) &&
1518                    tdst->is_con() && ((tdst->get_con() * type2aelembytes(T_CHAR)) % HeapWordSize == 0);
1519 
1520     // Figure out which arraycopy runtime method to call (disjoint, uninitialized).
1521     const char* copyfunc_name = "arraycopy";
1522     address     copyfunc_addr = StubRoutines::select_arraycopy_function(T_CHAR, aligned, true, copyfunc_name, true);
1523     Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
1524                       OptoRuntime::fast_arraycopy_Type(),
1525                       copyfunc_addr, copyfunc_name, TypeRawPtr::BOTTOM,
1526                       src_start, dst_start, ConvI2X(length) XTOP);
1527     // Do not let reads from the cloned object float above the arraycopy.
1528     if (alloc != NULL) {
1529       if (alloc->maybe_set_complete(&_gvn)) {
1530         // "You break it, you buy it."
1531         InitializeNode* init = alloc->initialization();


< prev index next >