< prev index next >

src/hotspot/cpu/arm/c1_LIRAssembler_arm.cpp

remove c1 runtime1 medium slowpath
     // pass length argument on SP[0]
     __ str(length, Address(SP, -2*wordSize, pre_indexed));  // 2 words for a proper stack alignment
 #endif // AARCH64
 
     address copyfunc_addr = StubRoutines::generic_arraycopy();
-    if (copyfunc_addr == NULL) { // Use C version if stub was not generated
-      __ call(CAST_FROM_FN_PTR(address, Runtime1::arraycopy));
-    } else {
+    assert(copyfunc_addr != NULL, "generic arraycopy stub required");
 #ifndef PRODUCT
-      if (PrintC1Statistics) {
-        __ inc_counter((address)&Runtime1::_generic_arraycopystub_cnt, tmp, tmp2);
-      }
-#endif // !PRODUCT
-      // the stub is in the code cache so close enough
-      __ call(copyfunc_addr, relocInfo::runtime_call_type);
+    if (PrintC1Statistics) {
+      __ inc_counter((address)&Runtime1::_generic_arraycopystub_cnt, tmp, tmp2);
     }
+#endif // !PRODUCT
+    // the stub is in the code cache so close enough
+    __ call(copyfunc_addr, relocInfo::runtime_call_type);
 
 #ifdef AARCH64
     __ raw_pop(length, ZR);
 #else
     __ add(SP, SP, 2*wordSize);
 #endif // AARCH64
 
     __ cbz_32(R0, *stub->continuation());
 
-    if (copyfunc_addr != NULL) {
-      __ mvn_32(tmp, R0);
-      restore_from_reserved_area(R0, R1, R2, R3);  // load saved arguments in slow case only
-      __ sub_32(length, length, tmp);
-      __ add_32(src_pos, src_pos, tmp);
-      __ add_32(dst_pos, dst_pos, tmp);
-    } else {
-      restore_from_reserved_area(R0, R1, R2, R3);  // load saved arguments in slow case only
-    }
+    __ mvn_32(tmp, R0);
+    restore_from_reserved_area(R0, R1, R2, R3);  // load saved arguments in slow case only
+    __ sub_32(length, length, tmp);
+    __ add_32(src_pos, src_pos, tmp);
+    __ add_32(dst_pos, dst_pos, tmp);
 
     __ b(*stub->entry());
 
     __ bind(*stub->continuation());
     return;
    
< prev index next >