< prev index next >

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

remove c1 runtime1 medium slowpath

*** 2172,2183 **** // have to fall back to the JNI stub __ stp(dst, dst_pos, Address(sp, 0*BytesPerWord)); __ stp(length, src_pos, Address(sp, 2*BytesPerWord)); __ str(src, Address(sp, 4*BytesPerWord)); - address C_entry = CAST_FROM_FN_PTR(address, Runtime1::arraycopy); address copyfunc_addr = StubRoutines::generic_arraycopy(); // The arguments are in java calling convention so we shift them // to C convention assert_different_registers(c_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4); __ mov(c_rarg0, j_rarg0); --- 2172,2183 ---- // have to fall back to the JNI stub __ stp(dst, dst_pos, Address(sp, 0*BytesPerWord)); __ stp(length, src_pos, Address(sp, 2*BytesPerWord)); __ str(src, Address(sp, 4*BytesPerWord)); address copyfunc_addr = StubRoutines::generic_arraycopy(); + assert(copyfunc_addr != NULL, "generic arraycopy stub required"); // The arguments are in java calling convention so we shift them // to C convention assert_different_registers(c_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4); __ mov(c_rarg0, j_rarg0); ***************
*** 2186,2223 **** assert_different_registers(c_rarg2, j_rarg3, j_rarg4); __ mov(c_rarg2, j_rarg2); assert_different_registers(c_rarg3, j_rarg4); __ mov(c_rarg3, j_rarg3); __ mov(c_rarg4, j_rarg4); - if (copyfunc_addr == NULL) { // Use C version if stub was not generated - __ mov(rscratch1, RuntimeAddress(C_entry)); - __ blrt(rscratch1, 5, 0, 1); - } else { #ifndef PRODUCT ! if (PrintC1Statistics) { ! __ incrementw(ExternalAddress((address)&Runtime1::_generic_arraycopystub_cnt)); ! } ! #endif ! __ far_call(RuntimeAddress(copyfunc_addr)); } __ cbz(r0, *stub->continuation()); // Reload values from the stack so they are where the stub // expects them. __ ldp(dst, dst_pos, Address(sp, 0*BytesPerWord)); __ ldp(length, src_pos, Address(sp, 2*BytesPerWord)); __ ldr(src, Address(sp, 4*BytesPerWord)); ! if (copyfunc_addr != NULL) { ! // r0 is -1^K where K == partial copied count ! __ eonw(rscratch1, r0, 0); ! // adjust length down and src/end pos up by partial copied count ! __ subw(length, length, rscratch1); ! __ addw(src_pos, src_pos, rscratch1); ! __ addw(dst_pos, dst_pos, rscratch1); ! } __ b(*stub->entry()); __ bind(*stub->continuation()); return; } --- 2186,2216 ---- assert_different_registers(c_rarg2, j_rarg3, j_rarg4); __ mov(c_rarg2, j_rarg2); assert_different_registers(c_rarg3, j_rarg4); __ mov(c_rarg3, j_rarg3); __ mov(c_rarg4, j_rarg4); #ifndef PRODUCT ! if (PrintC1Statistics) { ! __ incrementw(ExternalAddress((address)&Runtime1::_generic_arraycopystub_cnt)); } + #endif + __ far_call(RuntimeAddress(copyfunc_addr)); __ cbz(r0, *stub->continuation()); // Reload values from the stack so they are where the stub // expects them. __ ldp(dst, dst_pos, Address(sp, 0*BytesPerWord)); __ ldp(length, src_pos, Address(sp, 2*BytesPerWord)); __ ldr(src, Address(sp, 4*BytesPerWord)); ! // r0 is -1^K where K == partial copied count ! __ eonw(rscratch1, r0, 0); ! // adjust length down and src/end pos up by partial copied count ! __ subw(length, length, rscratch1); ! __ addw(src_pos, src_pos, rscratch1); ! __ addw(dst_pos, dst_pos, rscratch1); __ b(*stub->entry()); __ bind(*stub->continuation()); return; }
< prev index next >