--- old/src/cpu/aarch64/vm/c1_LIRAssembler_aarch64.cpp 2014-12-09 13:49:46.403162394 -0500 +++ new/src/cpu/aarch64/vm/c1_LIRAssembler_aarch64.cpp 2014-12-09 13:49:46.053193390 -0500 @@ -297,7 +297,7 @@ // Note: RECEIVER must still contain the receiver! Label dont; __ br(Assembler::EQ, dont); - __ b(RuntimeAddress(SharedRuntime::get_ic_miss_stub())); + __ far_jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub())); // We align the verified entry point unless the method body // (including its inline cache check) will fit in a single 64-byte @@ -344,7 +344,7 @@ default: ShouldNotReachHere(); } - __ bl(RuntimeAddress(target)); + __ far_call(RuntimeAddress(target)); add_call_info_here(info); } @@ -390,8 +390,7 @@ __ verify_not_null_oop(r0); // search an exception handler (r0: exception oop, r3: throwing pc) - __ bl(RuntimeAddress(Runtime1::entry_for(Runtime1::handle_exception_from_callee_id))); - __ should_not_reach_here(); + __ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::handle_exception_from_callee_id))); __ should_not_reach_here(); guarantee(code_offset() - offset <= exception_handler_size, "overflow"); __ end_a_stub(); @@ -446,7 +445,7 @@ // remove the activation and dispatch to the unwind handler __ block_comment("remove_frame and dispatch to the unwind handler"); __ remove_frame(initial_frame_size_in_bytes()); - __ b(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id))); + __ far_jump(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id))); // Emit the slow path assembly if (stub != NULL) { @@ -476,7 +475,7 @@ int offset = code_offset(); __ adr(lr, pc()); - __ b(RuntimeAddress(SharedRuntime::deopt_blob()->unpack())); + __ far_jump(RuntimeAddress(SharedRuntime::deopt_blob()->unpack())); guarantee(code_offset() - offset <= deopt_handler_size, "overflow"); __ end_a_stub(); @@ -954,7 +953,7 @@ default: ShouldNotReachHere(); } - __ bl(RuntimeAddress(target)); + __ far_call(RuntimeAddress(target)); add_call_info_here(info); } @@ -1425,7 +1424,7 @@ __ br(Assembler::EQ, *success_target); __ stp(klass_RInfo, k_RInfo, Address(__ pre(sp, -2 * wordSize))); - __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); + __ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); __ ldr(klass_RInfo, Address(__ post(sp, 2 * wordSize))); // result is a boolean __ cbzw(klass_RInfo, *failure_target); @@ -1436,7 +1435,7 @@ __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, success_target, failure_target, NULL); // call out-of-line instance of __ check_klass_subtype_slow_path(...): __ stp(klass_RInfo, k_RInfo, Address(__ pre(sp, -2 * wordSize))); - __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); + __ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); __ ldp(k_RInfo, klass_RInfo, Address(__ post(sp, 2 * wordSize))); // result is a boolean __ cbz(k_RInfo, *failure_target); @@ -1526,7 +1525,7 @@ __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, success_target, failure_target, NULL); // call out-of-line instance of __ check_klass_subtype_slow_path(...): __ stp(klass_RInfo, k_RInfo, Address(__ pre(sp, -2 * wordSize))); - __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); + __ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); __ ldp(k_RInfo, klass_RInfo, Address(__ post(sp, 2 * wordSize))); // result is a boolean __ cbzw(k_RInfo, *failure_target); @@ -2017,7 +2016,7 @@ void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) { - __ bl(Address(op->addr(), rtype)); + __ trampoline_call(Address(op->addr(), rtype)); add_call_info(code_offset(), op->info()); } @@ -2046,7 +2045,8 @@ __ relocate(static_stub_Relocation::spec(call_pc)); __ mov_metadata(rmethod, (Metadata*)NULL); - __ b(__ pc()); + __ movptr(rscratch1, 0); + __ br(rscratch1); assert(__ offset() - start <= call_stub_size, "stub too big"); __ end_a_stub(); @@ -2076,7 +2076,7 @@ } else { unwind_id = Runtime1::handle_exception_nofpu_id; } - __ bl(RuntimeAddress(Runtime1::entry_for(unwind_id))); + __ far_call(RuntimeAddress(Runtime1::entry_for(unwind_id))); // FIXME: enough room for two byte trap ???? __ nop(); @@ -2239,7 +2239,7 @@ __ incrementw(ExternalAddress((address)&Runtime1::_generic_arraycopystub_cnt)); } #endif - __ bl(RuntimeAddress(copyfunc_addr)); + __ far_call(RuntimeAddress(copyfunc_addr)); } __ cbz(r0, *stub->continuation()); @@ -2352,7 +2352,7 @@ __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, NULL); __ PUSH(src, dst); - __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); + __ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); __ POP(src, dst); __ cbnz(src, cont); @@ -2402,7 +2402,7 @@ __ load_klass(c_rarg4, dst); __ ldr(c_rarg4, Address(c_rarg4, ObjArrayKlass::element_klass_offset())); __ ldrw(c_rarg3, Address(c_rarg4, Klass::super_check_offset_offset())); - __ call(RuntimeAddress(copyfunc_addr)); + __ far_call(RuntimeAddress(copyfunc_addr)); #ifndef PRODUCT if (PrintC1Statistics) { @@ -2517,7 +2517,7 @@ CodeBlob *cb = CodeCache::find_blob(entry); if (cb) { - __ bl(RuntimeAddress(entry)); + __ far_call(RuntimeAddress(entry)); } else { __ call_VM_leaf(entry, 3); } @@ -2855,7 +2855,7 @@ CodeBlob *cb = CodeCache::find_blob(dest); if (cb) { - __ bl(RuntimeAddress(dest)); + __ far_call(RuntimeAddress(dest)); } else { __ mov(rscratch1, RuntimeAddress(dest)); int len = args->length();