< prev index next >

src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp

Print this page
rev 49736 : 8185505: AArch64: Port AOT to AArch64
Reviewed-by: duke
   1 /*
   2  * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2014, Red Hat Inc. All rights reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "asm/macroAssembler.hpp"
  28 #include "memory/resourceArea.hpp"
  29 #include "nativeInst_aarch64.hpp"
  30 #include "oops/oop.inline.hpp"
  31 #include "runtime/handles.hpp"
  32 #include "runtime/sharedRuntime.hpp"
  33 #include "runtime/stubRoutines.hpp"
  34 #include "utilities/ostream.hpp"
  35 #ifdef COMPILER1
  36 #include "c1/c1_Runtime1.hpp"
  37 #endif
  38 
  39 void NativeCall::verify() { ; }

















































































































  40 
  41 address NativeCall::destination() const {
  42   address addr = (address)this;
  43   address destination = instruction_address() + displacement();
  44 
  45   // Do we use a trampoline stub for this call?
  46   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
  47   assert(cb && cb->is_nmethod(), "sanity");
  48   nmethod *nm = (nmethod *)cb;
  49   if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
  50     // Yes we do, so get the destination from the trampoline stub.
  51     const address trampoline_stub_addr = destination;
  52     destination = nativeCallTrampolineStub_at(trampoline_stub_addr)->destination();
  53   }
  54 
  55   return destination;
  56 }
  57 
  58 // Similar to replace_mt_safe, but just changes the destination. The
  59 // important thing is that free-running threads are able to execute this
  60 // call instruction at all times.
  61 //
  62 // Used in the runtime linkage of calls; see class CompiledIC.
  63 //
  64 // Add parameter assert_lock to switch off assertion
  65 // during code generation, where no patching lock is needed.
  66 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
  67   assert(!assert_lock ||
  68          (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
  69          "concurrent code patching");
  70 
  71   ResourceMark rm;
  72   int code_size = NativeInstruction::instruction_size;
  73   address addr_call = addr_at(0);

  74   assert(NativeCall::is_call_at(addr_call), "unexpected code at call site");
  75 
  76   // Patch the constant in the call's trampoline stub.
  77   address trampoline_stub_addr = get_trampoline();
  78   if (trampoline_stub_addr != NULL) {
  79     assert (! is_NativeCallTrampolineStub_at(dest), "chained trampolines");
  80     nativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
  81   }
  82 
  83   // Patch the call.
  84   if (Assembler::reachable_from_branch_at(addr_call, dest)) {
  85     set_destination(dest);
  86   } else {
  87     assert (trampoline_stub_addr != NULL, "we need a trampoline");
  88     set_destination(trampoline_stub_addr);
  89   }
  90 
  91   ICache::invalidate_range(addr_call, instruction_size);
  92 }
  93 
  94 address NativeCall::get_trampoline() {
  95   address call_addr = addr_at(0);
  96 
  97   CodeBlob *code = CodeCache::find_blob(call_addr);
  98   assert(code != NULL, "Could not find the containing code blob");
  99 
 100   address bl_destination
 101     = MacroAssembler::pd_call_destination(call_addr);
 102   if (code->contains(bl_destination) &&
 103       is_NativeCallTrampolineStub_at(bl_destination))
 104     return bl_destination;
 105 
 106   // If the codeBlob is not a nmethod, this is because we get here from the
 107   // CodeBlob constructor, which is called within the nmethod constructor.
 108   return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);



 109 }
 110 
 111 // Inserts a native call instruction at a given pc
 112 void NativeCall::insert(address code_pos, address entry) { Unimplemented(); }
 113 
 114 //-------------------------------------------------------------------
 115 
 116 void NativeMovConstReg::verify() {
 117   // make sure code pattern is actually mov reg64, imm64 instructions
 118 }
 119 
 120 
 121 intptr_t NativeMovConstReg::data() const {
 122   // das(uint64_t(instruction_address()),2);
 123   address addr = MacroAssembler::target_addr_for_insn(instruction_address());
 124   if (maybe_cpool_ref(instruction_address())) {
 125     return *(intptr_t*)addr;
 126   } else {
 127     return (intptr_t)addr;
 128   }


 303 bool NativeInstruction::is_movk() {
 304   return Instruction_aarch64::extract(int_at(0), 30, 23) == 0b11100101;
 305 }
 306 
 307 bool NativeInstruction::is_sigill_zombie_not_entrant() {
 308   return uint_at(0) == 0xd4bbd5a1; // dcps1 #0xdead
 309 }
 310 
 311 void NativeIllegalInstruction::insert(address code_pos) {
 312   *(juint*)code_pos = 0xd4bbd5a1; // dcps1 #0xdead
 313 }
 314 
 315 //-------------------------------------------------------------------
 316 
 317 // MT-safe inserting of a jump over a jump or a nop (used by
 318 // nmethod::make_not_entrant_or_zombie)
 319 
 320 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
 321 
 322   assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");





 323   assert(nativeInstruction_at(verified_entry)->is_jump_or_nop()
 324          || nativeInstruction_at(verified_entry)->is_sigill_zombie_not_entrant(),
 325          "Aarch64 cannot replace non-jump with jump");


 326 
 327   // Patch this nmethod atomically.
 328   if (Assembler::reachable_from_branch_at(verified_entry, dest)) {
 329     ptrdiff_t disp = dest - verified_entry;
 330     guarantee(disp < 1 << 27 && disp > - (1 << 27), "branch overflow");
 331 
 332     unsigned int insn = (0b000101 << 26) | ((disp >> 2) & 0x3ffffff);
 333     *(unsigned int*)verified_entry = insn;
 334   } else {
 335     // We use an illegal instruction for marking a method as
 336     // not_entrant or zombie.
 337     NativeIllegalInstruction::insert(verified_entry);
 338   }
 339 
 340   ICache::invalidate_range(verified_entry, instruction_size);
 341 }
 342 
 343 void NativeGeneralJump::verify() {  }
 344 
 345 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {


   1 /*
   2  * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "asm/macroAssembler.hpp"
  28 #include "memory/resourceArea.hpp"
  29 #include "nativeInst_aarch64.hpp"
  30 #include "oops/oop.inline.hpp"
  31 #include "runtime/handles.hpp"
  32 #include "runtime/sharedRuntime.hpp"
  33 #include "runtime/stubRoutines.hpp"
  34 #include "utilities/ostream.hpp"
  35 #ifdef COMPILER1
  36 #include "c1/c1_Runtime1.hpp"
  37 #endif
  38 
  39 void NativeCall::verify() {
  40   assert(NativeCall::is_call_at((address)this), "unexpected code at call site");
  41 }
  42 
  43 void NativeInstruction::wrote(int offset) {
  44   ICache::invalidate_word(addr_at(offset));
  45 }
  46 
  47 void NativeLoadGot::report_and_fail() const {
  48   tty->print_cr("Addr: " INTPTR_FORMAT, p2i(instruction_address()));
  49   fatal("not a indirect rip mov to rbx");
  50 }
  51 
  52 void NativeLoadGot::verify() const {
  53   assert(is_adrp_at((address)this), "must be adrp");
  54 }
  55 
  56 address NativeLoadGot::got_address() const {
  57   return MacroAssembler::target_addr_for_insn((address)this);
  58 }
  59 
  60 intptr_t NativeLoadGot::data() const {
  61   return *(intptr_t *) got_address();
  62 }
  63 
  64 address NativePltCall::destination() const {
  65   NativeGotJump* jump = nativeGotJump_at(plt_jump());
  66   return *(address*)MacroAssembler::target_addr_for_insn((address)jump);
  67 }
  68 
  69 address NativePltCall::plt_entry() const {
  70   return MacroAssembler::target_addr_for_insn((address)this);
  71 }
  72 
  73 address NativePltCall::plt_jump() const {
  74   address entry = plt_entry();
  75   // Virtual PLT code has move instruction first
  76   if (((NativeGotJump*)entry)->is_GotJump()) {
  77     return entry;
  78   } else {
  79     return nativeLoadGot_at(entry)->next_instruction_address();
  80   }
  81 }
  82 
  83 address NativePltCall::plt_load_got() const {
  84   address entry = plt_entry();
  85   if (!((NativeGotJump*)entry)->is_GotJump()) {
  86     // Virtual PLT code has move instruction first
  87     return entry;
  88   } else {
  89     // Static PLT code has move instruction second (from c2i stub)
  90     return nativeGotJump_at(entry)->next_instruction_address();
  91   }
  92 }
  93 
  94 address NativePltCall::plt_c2i_stub() const {
  95   address entry = plt_load_got();
  96   // This method should be called only for static calls which has C2I stub.
  97   NativeLoadGot* load = nativeLoadGot_at(entry);
  98   return entry;
  99 }
 100 
 101 address NativePltCall::plt_resolve_call() const {
 102   NativeGotJump* jump = nativeGotJump_at(plt_jump());
 103   address entry = jump->next_instruction_address();
 104   if (((NativeGotJump*)entry)->is_GotJump()) {
 105     return entry;
 106   } else {
 107     // c2i stub 2 instructions
 108     entry = nativeLoadGot_at(entry)->next_instruction_address();
 109     return nativeGotJump_at(entry)->next_instruction_address();
 110   }
 111 }
 112 
 113 void NativePltCall::reset_to_plt_resolve_call() {
 114   set_destination_mt_safe(plt_resolve_call());
 115 }
 116 
 117 void NativePltCall::set_destination_mt_safe(address dest) {
 118   // rewriting the value in the GOT, it should always be aligned
 119   NativeGotJump* jump = nativeGotJump_at(plt_jump());
 120   address* got = (address *) jump->got_address();
 121   *got = dest;
 122 }
 123 
 124 void NativePltCall::set_stub_to_clean() {
 125   NativeLoadGot* method_loader = nativeLoadGot_at(plt_c2i_stub());
 126   NativeGotJump* jump          = nativeGotJump_at(method_loader->next_instruction_address());
 127   method_loader->set_data(0);
 128   jump->set_jump_destination((address)-1);
 129 }
 130 
 131 void NativePltCall::verify() const {
 132   assert(NativeCall::is_call_at((address)this), "unexpected code at call site");
 133 }
 134 
 135 address NativeGotJump::got_address() const {
 136   return MacroAssembler::target_addr_for_insn((address)this);
 137 }
 138 
 139 address NativeGotJump::destination() const {
 140   address *got_entry = (address *) got_address();
 141   return *got_entry;
 142 }
 143 
 144 bool NativeGotJump::is_GotJump() const {
 145   NativeInstruction *insn =
 146     nativeInstruction_at(addr_at(3 * NativeInstruction::instruction_size));
 147   return insn->encoding() == 0xd61f0200; // br x16
 148 }
 149 
 150 void NativeGotJump::verify() const {
 151   assert(is_adrp_at((address)this), "must be adrp");
 152 }
 153 
 154 address NativeCall::destination() const {
 155   address addr = (address)this;
 156   address destination = instruction_address() + displacement();
 157 
 158   // Do we use a trampoline stub for this call?
 159   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
 160   assert(cb && cb->is_nmethod(), "sanity");
 161   nmethod *nm = (nmethod *)cb;
 162   if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
 163     // Yes we do, so get the destination from the trampoline stub.
 164     const address trampoline_stub_addr = destination;
 165     destination = nativeCallTrampolineStub_at(trampoline_stub_addr)->destination();
 166   }
 167 
 168   return destination;
 169 }
 170 
 171 // Similar to replace_mt_safe, but just changes the destination. The
 172 // important thing is that free-running threads are able to execute this
 173 // call instruction at all times.
 174 //
 175 // Used in the runtime linkage of calls; see class CompiledIC.
 176 //
 177 // Add parameter assert_lock to switch off assertion
 178 // during code generation, where no patching lock is needed.
 179 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
 180   assert(!assert_lock ||
 181          (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
 182          "concurrent code patching");
 183 
 184   ResourceMark rm;
 185   int code_size = NativeInstruction::instruction_size;
 186   address addr_call = addr_at(0);
 187   bool reachable = Assembler::reachable_from_branch_at(addr_call, dest);
 188   assert(NativeCall::is_call_at(addr_call), "unexpected code at call site");
 189 
 190   // Patch the constant in the call's trampoline stub.
 191   address trampoline_stub_addr = get_trampoline();
 192   if (trampoline_stub_addr != NULL) {
 193     assert (! is_NativeCallTrampolineStub_at(dest), "chained trampolines");
 194     nativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
 195   }
 196 
 197   // Patch the call.
 198   if (reachable) {
 199     set_destination(dest);
 200   } else {
 201     assert (trampoline_stub_addr != NULL, "we need a trampoline");
 202     set_destination(trampoline_stub_addr);
 203   }
 204 
 205   ICache::invalidate_range(addr_call, instruction_size);
 206 }
 207 
 208 address NativeCall::get_trampoline() {
 209   address call_addr = addr_at(0);
 210 
 211   CodeBlob *code = CodeCache::find_blob(call_addr);
 212   assert(code != NULL, "Could not find the containing code blob");
 213 
 214   address bl_destination
 215     = MacroAssembler::pd_call_destination(call_addr);
 216   if (code->contains(bl_destination) &&
 217       is_NativeCallTrampolineStub_at(bl_destination))
 218     return bl_destination;
 219 
 220   if (code->is_nmethod()) {

 221     return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
 222   }
 223 
 224   return NULL;
 225 }
 226 
 227 // Inserts a native call instruction at a given pc
 228 void NativeCall::insert(address code_pos, address entry) { Unimplemented(); }
 229 
 230 //-------------------------------------------------------------------
 231 
 232 void NativeMovConstReg::verify() {
 233   // make sure code pattern is actually mov reg64, imm64 instructions
 234 }
 235 
 236 
 237 intptr_t NativeMovConstReg::data() const {
 238   // das(uint64_t(instruction_address()),2);
 239   address addr = MacroAssembler::target_addr_for_insn(instruction_address());
 240   if (maybe_cpool_ref(instruction_address())) {
 241     return *(intptr_t*)addr;
 242   } else {
 243     return (intptr_t)addr;
 244   }


 419 bool NativeInstruction::is_movk() {
 420   return Instruction_aarch64::extract(int_at(0), 30, 23) == 0b11100101;
 421 }
 422 
 423 bool NativeInstruction::is_sigill_zombie_not_entrant() {
 424   return uint_at(0) == 0xd4bbd5a1; // dcps1 #0xdead
 425 }
 426 
 427 void NativeIllegalInstruction::insert(address code_pos) {
 428   *(juint*)code_pos = 0xd4bbd5a1; // dcps1 #0xdead
 429 }
 430 
 431 //-------------------------------------------------------------------
 432 
 433 // MT-safe inserting of a jump over a jump or a nop (used by
 434 // nmethod::make_not_entrant_or_zombie)
 435 
 436 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
 437 
 438   assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
 439 
 440 #ifdef ASSERT
 441   // This may be the temporary nmethod generated while we're AOT
 442   // compiling.  Such an nmethod doesn't begin with a NOP but with an ADRP.
 443   if (! (CalculateClassFingerprint && UseAOT && is_adrp_at(verified_entry))) {
 444     assert(nativeInstruction_at(verified_entry)->is_jump_or_nop()
 445            || nativeInstruction_at(verified_entry)->is_sigill_zombie_not_entrant(),
 446            "Aarch64 cannot replace non-jump with jump");
 447   }
 448 #endif
 449 
 450   // Patch this nmethod atomically.
 451   if (Assembler::reachable_from_branch_at(verified_entry, dest)) {
 452     ptrdiff_t disp = dest - verified_entry;
 453     guarantee(disp < 1 << 27 && disp > - (1 << 27), "branch overflow");
 454 
 455     unsigned int insn = (0b000101 << 26) | ((disp >> 2) & 0x3ffffff);
 456     *(unsigned int*)verified_entry = insn;
 457   } else {
 458     // We use an illegal instruction for marking a method as
 459     // not_entrant or zombie.
 460     NativeIllegalInstruction::insert(verified_entry);
 461   }
 462 
 463   ICache::invalidate_range(verified_entry, instruction_size);
 464 }
 465 
 466 void NativeGeneralJump::verify() {  }
 467 
 468 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {


< prev index next >