< prev index next >

src/share/vm/opto/library_call.cpp

Print this page




1222   // Figure out the size and type of the elements we will be copying.
1223   const Type* src_type = src->Value(&_gvn);
1224   const Type* dst_type = dst->Value(&_gvn);
1225   BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1226   BasicType dst_elem = dst_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1227   assert((compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) ||
1228          (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR)),
1229          "Unsupported array types for inline_string_copy");
1230 
1231   // Convert char[] offsets to byte[] offsets
1232   if (compress && src_elem == T_BYTE) {
1233     src_offset = _gvn.transform(new LShiftINode(src_offset, intcon(1)));
1234   } else if (!compress && dst_elem == T_BYTE) {
1235     dst_offset = _gvn.transform(new LShiftINode(dst_offset, intcon(1)));
1236   }
1237 
1238   Node* src_start = array_element_address(src, src_offset, src_elem);
1239   Node* dst_start = array_element_address(dst, dst_offset, dst_elem);
1240   // 'src_start' points to src array + scaled offset
1241   // 'dst_start' points to dst array + scaled offset
1242   Node* count;
1243   if (compress) {
1244     count = compress_string(src_start, dst_start, length);
1245   } else {
1246     inflate_string(src_start, dst_start, length);
1247   }
1248 
1249   if (alloc != NULL) {
1250     if (alloc->maybe_set_complete(&_gvn)) {
1251       // "You break it, you buy it."
1252       InitializeNode* init = alloc->initialization();
1253       assert(init->is_complete(), "we just did this");
1254       init->set_complete_with_arraycopy();
1255       assert(dst->is_CheckCastPP(), "sanity");
1256       assert(dst->in(0)->in(0) == init, "dest pinned");
1257     }
1258     // Do not let stores that initialize this object be reordered with
1259     // a subsequent store that would make this object accessible by
1260     // other threads.
1261     // Record what AllocateNode this StoreStore protects so that
1262     // escape analysis can go from the MemBarStoreStoreNode to the




1222   // Figure out the size and type of the elements we will be copying.
1223   const Type* src_type = src->Value(&_gvn);
1224   const Type* dst_type = dst->Value(&_gvn);
1225   BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1226   BasicType dst_elem = dst_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
1227   assert((compress && dst_elem == T_BYTE && (src_elem == T_BYTE || src_elem == T_CHAR)) ||
1228          (!compress && src_elem == T_BYTE && (dst_elem == T_BYTE || dst_elem == T_CHAR)),
1229          "Unsupported array types for inline_string_copy");
1230 
1231   // Convert char[] offsets to byte[] offsets
1232   if (compress && src_elem == T_BYTE) {
1233     src_offset = _gvn.transform(new LShiftINode(src_offset, intcon(1)));
1234   } else if (!compress && dst_elem == T_BYTE) {
1235     dst_offset = _gvn.transform(new LShiftINode(dst_offset, intcon(1)));
1236   }
1237 
1238   Node* src_start = array_element_address(src, src_offset, src_elem);
1239   Node* dst_start = array_element_address(dst, dst_offset, dst_elem);
1240   // 'src_start' points to src array + scaled offset
1241   // 'dst_start' points to dst array + scaled offset
1242   Node* count = NULL;
1243   if (compress) {
1244     count = compress_string(src_start, dst_start, length);
1245   } else {
1246     inflate_string(src_start, dst_start, length);
1247   }
1248 
1249   if (alloc != NULL) {
1250     if (alloc->maybe_set_complete(&_gvn)) {
1251       // "You break it, you buy it."
1252       InitializeNode* init = alloc->initialization();
1253       assert(init->is_complete(), "we just did this");
1254       init->set_complete_with_arraycopy();
1255       assert(dst->is_CheckCastPP(), "sanity");
1256       assert(dst->in(0)->in(0) == init, "dest pinned");
1257     }
1258     // Do not let stores that initialize this object be reordered with
1259     // a subsequent store that would make this object accessible by
1260     // other threads.
1261     // Record what AllocateNode this StoreStore protects so that
1262     // escape analysis can go from the MemBarStoreStoreNode to the


< prev index next >