src/hotspot/cpu/arm/relocInfo_arm.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File
*** old/src/hotspot/cpu/arm/relocInfo_arm.cpp	Mon Sep 17 10:30:33 2018
--- new/src/hotspot/cpu/arm/relocInfo_arm.cpp	Mon Sep 17 10:30:32 2018

*** 33,57 **** --- 33,42 ---- #include "runtime/safepoint.hpp" void Relocation::pd_set_data_value(address x, intptr_t o, bool verify_only) { NativeMovConstReg* ni = nativeMovConstReg_at(addr()); #if defined(AARCH64) && defined(COMPILER2) if (ni->is_movz()) { assert(type() == relocInfo::oop_type, "!"); if (verify_only) { uintptr_t d = ni->data(); guarantee((d >> 32) == 0, "not narrow oop"); narrowOop no = d; oop o = CompressedOops::decode(no); guarantee(cast_from_oop<intptr_t>(o) == (intptr_t)x, "instructions must match"); } else { ni->set_data((intptr_t)x); } return; } #endif if (verify_only) { guarantee(ni->data() == (intptr_t)(x + o), "instructions must match"); } else { ni->set_data((intptr_t)(x + o)); }
*** 67,91 **** --- 52,71 ---- adj = orig_addr - pc; } RawNativeInstruction* ni = rawNativeInstruction_at(pc); #if (!defined(AARCH64)) if (NOT_AARCH64(ni->is_add_lr()) AARCH64_ONLY(ni->is_adr_aligned_lr())) { // On arm32, skip the optional 'add LR, PC, #offset' + if (ni->is_add_lr()) { + // Skip the optional 'add LR, PC, #offset' // (Allowing the jump support code to handle fat_call) pc = ni->next_raw_instruction_address(); ni = nativeInstruction_at(pc); } #endif ! if (AARCH64_ONLY(ni->is_call()) NOT_AARCH64(ni->is_bl())) { ! // For arm32, fat_call are handled by is_jump for the new 'ni', ! // Fat_call are handled by is_jump for the new 'ni', // requiring only to support is_bl. // // For AARCH64, skipping a leading adr is not sufficient // to reduce calls to a simple bl. return rawNativeCall_at(pc)->destination(adj); } if (ni->is_jump()) { return rawNativeJump_at(pc)->jump_destination(adj);
*** 96,120 **** --- 76,95 ---- void Relocation::pd_set_call_destination(address x) { address pc = addr(); NativeInstruction* ni = nativeInstruction_at(pc); #if (!defined(AARCH64)) if (NOT_AARCH64(ni->is_add_lr()) AARCH64_ONLY(ni->is_adr_aligned_lr())) { // On arm32, skip the optional 'add LR, PC, #offset' + if (ni->is_add_lr()) { + // Skip the optional 'add LR, PC, #offset' // (Allowing the jump support code to handle fat_call) pc = ni->next_raw_instruction_address(); ni = nativeInstruction_at(pc); } #endif ! if (AARCH64_ONLY(ni->is_call()) NOT_AARCH64(ni->is_bl())) { ! // For arm32, fat_call are handled by is_jump for the new 'ni', ! // Fat_call are handled by is_jump for the new 'ni', // requiring only to support is_bl. // // For AARCH64, skipping a leading adr is not sufficient // to reduce calls to a simple bl. rawNativeCall_at(pc)->set_destination(x); return; } if (ni->is_jump()) { // raw jump
*** 136,154 **** --- 111,120 ---- void poll_Relocation::fix_relocation_after_move(const CodeBuffer* src, CodeBuffer* dest) { } void metadata_Relocation::pd_fix_value(address x) { assert(! addr_in_const(), "Do not use"); #ifdef AARCH64 #ifdef COMPILER2 NativeMovConstReg* ni = nativeMovConstReg_at(addr()); if (ni->is_mov_slow()) { return; } #endif set_value(x); #else if (!VM_Version::supports_movw()) { set_value(x); #ifdef ASSERT } else { // the movw/movt data should be correct
*** 163,169 **** --- 129,134 ---- // correct and the table is copied shortly afterward. // // assert(ni->data() == (int)x, "metadata relocation mismatch"); #endif } #endif // !AARCH64 }

src/hotspot/cpu/arm/relocInfo_arm.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File