src/share/vm/opto/library_call.cpp

Print this page
rev 3688 : 7054512: Compress class pointers after perm gen removal
Summary: support of compress class pointers in the compilers.
Reviewed-by:


4364     // expansion, we know this AllocateNode is initialized by an array
4365     // copy and a StoreStore barrier exists after the array copy.
4366     alloc->initialization()->set_complete_with_arraycopy();
4367   }
4368 
4369   // Copy the fastest available way.
4370   // TODO: generate fields copies for small objects instead.
4371   Node* src  = obj;
4372   Node* dest = alloc_obj;
4373   Node* size = _gvn.transform(obj_size);
4374 
4375   // Exclude the header but include array length to copy by 8 bytes words.
4376   // Can't use base_offset_in_bytes(bt) since basic type is unknown.
4377   int base_off = is_array ? arrayOopDesc::length_offset_in_bytes() :
4378                             instanceOopDesc::base_offset_in_bytes();
4379   // base_off:
4380   // 8  - 32-bit VM
4381   // 12 - 64-bit VM, compressed klass
4382   // 16 - 64-bit VM, normal klass
4383   if (base_off % BytesPerLong != 0) {
4384     assert(UseCompressedOops, "");
4385     if (is_array) {
4386       // Exclude length to copy by 8 bytes words.
4387       base_off += sizeof(int);
4388     } else {
4389       // Include klass to copy by 8 bytes words.
4390       base_off = instanceOopDesc::klass_offset_in_bytes();
4391     }
4392     assert(base_off % BytesPerLong == 0, "expect 8 bytes alignment");
4393   }
4394   src  = basic_plus_adr(src,  base_off);
4395   dest = basic_plus_adr(dest, base_off);
4396 
4397   // Compute the length also, if needed:
4398   Node* countx = size;
4399   countx = _gvn.transform( new (C) SubXNode(countx, MakeConX(base_off)) );
4400   countx = _gvn.transform( new (C) URShiftXNode(countx, intcon(LogBytesPerLong) ));
4401 
4402   const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
4403   bool disjoint_bases = true;
4404   generate_unchecked_arraycopy(raw_adr_type, T_LONG, disjoint_bases,




4364     // expansion, we know this AllocateNode is initialized by an array
4365     // copy and a StoreStore barrier exists after the array copy.
4366     alloc->initialization()->set_complete_with_arraycopy();
4367   }
4368 
4369   // Copy the fastest available way.
4370   // TODO: generate fields copies for small objects instead.
4371   Node* src  = obj;
4372   Node* dest = alloc_obj;
4373   Node* size = _gvn.transform(obj_size);
4374 
4375   // Exclude the header but include array length to copy by 8 bytes words.
4376   // Can't use base_offset_in_bytes(bt) since basic type is unknown.
4377   int base_off = is_array ? arrayOopDesc::length_offset_in_bytes() :
4378                             instanceOopDesc::base_offset_in_bytes();
4379   // base_off:
4380   // 8  - 32-bit VM
4381   // 12 - 64-bit VM, compressed klass
4382   // 16 - 64-bit VM, normal klass
4383   if (base_off % BytesPerLong != 0) {
4384     assert(UseCompressedKlassPointers, "");
4385     if (is_array) {
4386       // Exclude length to copy by 8 bytes words.
4387       base_off += sizeof(int);
4388     } else {
4389       // Include klass to copy by 8 bytes words.
4390       base_off = instanceOopDesc::klass_offset_in_bytes();
4391     }
4392     assert(base_off % BytesPerLong == 0, "expect 8 bytes alignment");
4393   }
4394   src  = basic_plus_adr(src,  base_off);
4395   dest = basic_plus_adr(dest, base_off);
4396 
4397   // Compute the length also, if needed:
4398   Node* countx = size;
4399   countx = _gvn.transform( new (C) SubXNode(countx, MakeConX(base_off)) );
4400   countx = _gvn.transform( new (C) URShiftXNode(countx, intcon(LogBytesPerLong) ));
4401 
4402   const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
4403   bool disjoint_bases = true;
4404   generate_unchecked_arraycopy(raw_adr_type, T_LONG, disjoint_bases,