src/share/vm/c1/c1_Runtime1.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/share/vm/c1/c1_Runtime1.cpp	Thu Nov 25 07:06:11 2010
--- new/src/share/vm/c1/c1_Runtime1.cpp	Thu Nov 25 07:06:11 2010

*** 1172,1182 **** --- 1172,1182 ---- // Potential problem: memmove is not guaranteed to be word atomic // Revisit in Merlin memmove(dst_addr, src_addr, length << l2es); return ac_ok; } else if (src->is_objArray() && dst->is_objArray()) { - if (UseCompressedOops) { // will need for tiered narrowOop *src_addr = objArrayOop(src)->obj_at_addr<narrowOop>(src_pos); narrowOop *dst_addr = objArrayOop(dst)->obj_at_addr<narrowOop>(dst_pos); return obj_arraycopy_work(src, src_addr, dst, dst_addr, length); } else { oop *src_addr = objArrayOop(src)->obj_at_addr<oop>(src_pos);
*** 1208,1221 **** --- 1208,1222 ---- BarrierSet* bs = Universe::heap()->barrier_set(); assert(bs->has_write_ref_array_opt(), "Barrier set must have ref array opt"); assert(bs->has_write_ref_array_pre_opt(), "For pre-barrier as well."); if (UseCompressedOops) { bs->write_ref_array_pre((narrowOop*)dst, num); + Copy::conjoint_oops_atomic((narrowOop*) src, (narrowOop*) dst, num); } else { bs->write_ref_array_pre((oop*)dst, num); } Copy::conjoint_oops_atomic((oop*) src, (oop*) dst, num); + } bs->write_ref_array(dst, num); JRT_END #ifndef PRODUCT

src/share/vm/c1/c1_Runtime1.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File