< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page




4301   Node* raw_obj = alloc_obj->in(1);
4302   assert(alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate(), "");
4303 
4304   AllocateNode* alloc = NULL;
4305   if (ReduceBulkZeroing) {
4306     // We will be completely responsible for initializing this object -
4307     // mark Initialize node as complete.
4308     alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
4309     // The object was just allocated - there should be no any stores!
4310     guarantee(alloc != NULL && alloc->maybe_set_complete(&_gvn), "");
4311     // Mark as complete_with_arraycopy so that on AllocateNode
4312     // expansion, we know this AllocateNode is initialized by an array
4313     // copy and a StoreStore barrier exists after the array copy.
4314     alloc->initialization()->set_complete_with_arraycopy();
4315   }
4316 
4317   // Copy the fastest available way.
4318   // TODO: generate fields copies for small objects instead.
4319   Node* size = _gvn.transform(obj_size);
4320 
4321   access_clone(control(), obj, alloc_obj, size, is_array);



























4322 
4323   // Do not let reads from the cloned object float above the arraycopy.
4324   if (alloc != NULL) {
4325     // Do not let stores that initialize this object be reordered with
4326     // a subsequent store that would make this object accessible by
4327     // other threads.
4328     // Record what AllocateNode this StoreStore protects so that
4329     // escape analysis can go from the MemBarStoreStoreNode to the
4330     // AllocateNode and eliminate the MemBarStoreStoreNode if possible
4331     // based on the escape status of the AllocateNode.
4332     insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
4333   } else {
4334     insert_mem_bar(Op_MemBarCPUOrder);
4335   }
4336 }
4337 
4338 //------------------------inline_native_clone----------------------------
4339 // protected native Object java.lang.Object.clone();
4340 //
4341 // Here are the simple edge cases:




4301   Node* raw_obj = alloc_obj->in(1);
4302   assert(alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate(), "");
4303 
4304   AllocateNode* alloc = NULL;
4305   if (ReduceBulkZeroing) {
4306     // We will be completely responsible for initializing this object -
4307     // mark Initialize node as complete.
4308     alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
4309     // The object was just allocated - there should be no any stores!
4310     guarantee(alloc != NULL && alloc->maybe_set_complete(&_gvn), "");
4311     // Mark as complete_with_arraycopy so that on AllocateNode
4312     // expansion, we know this AllocateNode is initialized by an array
4313     // copy and a StoreStore barrier exists after the array copy.
4314     alloc->initialization()->set_complete_with_arraycopy();
4315   }
4316 
4317   // Copy the fastest available way.
4318   // TODO: generate fields copies for small objects instead.
4319   Node* size = _gvn.transform(obj_size);
4320 
4321   // Exclude the header but include array length to copy by 8 bytes words.
4322   // Can't use base_offset_in_bytes(bt) since basic type is unknown.
4323   int base_off = is_array ? arrayOopDesc::length_offset_in_bytes() :
4324                             instanceOopDesc::base_offset_in_bytes();
4325   // base_off:
4326   // 8  - 32-bit VM
4327   // 12 - 64-bit VM, compressed klass
4328   // 16 - 64-bit VM, normal klass
4329   if (base_off % BytesPerLong != 0) {
4330     assert(UseCompressedClassPointers, "");
4331     if (is_array) {
4332       // Exclude length to copy by 8 bytes words.
4333       base_off += sizeof(int);
4334     } else {
4335       // Include klass to copy by 8 bytes words.
4336       base_off = instanceOopDesc::klass_offset_in_bytes();
4337     }
4338     assert(base_off % BytesPerLong == 0, "expect 8 bytes alignment");
4339   }
4340   Node* src_base  = basic_plus_adr(obj,  base_off);
4341   Node* dst_base = basic_plus_adr(alloc_obj, base_off);
4342 
4343   // Compute the length also, if needed:
4344   Node* countx = size;
4345   countx = _gvn.transform(new SubXNode(countx, MakeConX(base_off)));
4346   countx = _gvn.transform(new URShiftXNode(countx, intcon(LogBytesPerLong)));
4347 
4348   access_clone(control(), src_base, dst_base, countx, is_array);
4349 
4350   // Do not let reads from the cloned object float above the arraycopy.
4351   if (alloc != NULL) {
4352     // Do not let stores that initialize this object be reordered with
4353     // a subsequent store that would make this object accessible by
4354     // other threads.
4355     // Record what AllocateNode this StoreStore protects so that
4356     // escape analysis can go from the MemBarStoreStoreNode to the
4357     // AllocateNode and eliminate the MemBarStoreStoreNode if possible
4358     // based on the escape status of the AllocateNode.
4359     insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
4360   } else {
4361     insert_mem_bar(Op_MemBarCPUOrder);
4362   }
4363 }
4364 
4365 //------------------------inline_native_clone----------------------------
4366 // protected native Object java.lang.Object.clone();
4367 //
4368 // Here are the simple edge cases:


< prev index next >