1 /*
   2  * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2012, 2015 SAP SE. All rights reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "memory/resourceArea.hpp"
  29 #include "nativeInst_ppc.hpp"
  30 #include "oops/compressedOops.inline.hpp"
  31 #include "oops/oop.hpp"
  32 #include "runtime/handles.hpp"
  33 #include "runtime/sharedRuntime.hpp"
  34 #include "runtime/stubRoutines.hpp"
  35 #include "utilities/ostream.hpp"
  36 #ifdef COMPILER1
  37 #include "c1/c1_Runtime1.hpp"
  38 #endif
  39 
  40 // We use an illtrap for marking a method as not_entrant or zombie iff !UseSIGTRAP
  41 // Work around a C++ compiler bug which changes 'this'
  42 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) {
  43   assert(!UseSIGTRAP, "precondition");
  44   if (*(int*)addr != 0 /*illtrap*/) return false;
  45   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
  46   if (cb == NULL || !cb->is_nmethod()) return false;
  47   nmethod *nm = (nmethod *)cb;
  48   // This method is not_entrant or zombie iff the illtrap instruction is
  49   // located at the verified entry point.
  50   return nm->verified_entry_point() == addr;
  51 }
  52 
  53 #ifdef ASSERT
  54 void NativeInstruction::verify() {
  55   // Make sure code pattern is actually an instruction address.
  56   address addr = addr_at(0);
  57   if (addr == 0 || ((intptr_t)addr & 3) != 0) {
  58     fatal("not an instruction address");
  59   }
  60 }
  61 #endif // ASSERT
  62 
  63 // Extract call destination from a NativeCall. The call might use a trampoline stub.
  64 address NativeCall::destination() const {
  65   address addr = (address)this;
  66   address destination = Assembler::bxx_destination(addr);
  67 
  68   // Do we use a trampoline stub for this call?
  69   // Trampoline stubs are located behind the main code.
  70   if (destination > addr) {
  71     // Filter out recursive method invocation (call to verified/unverified entry point).
  72     CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
  73     assert(cb && cb->is_nmethod(), "sanity");
  74     nmethod *nm = (nmethod *)cb;
  75     if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
  76       // Yes we do, so get the destination from the trampoline stub.
  77       const address trampoline_stub_addr = destination;
  78       destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
  79     }
  80   }
  81 
  82   return destination;
  83 }
  84 
  85 // Similar to replace_mt_safe, but just changes the destination. The
  86 // important thing is that free-running threads are able to execute this
  87 // call instruction at all times. Thus, the displacement field must be
  88 // instruction-word-aligned.
  89 //
  90 // Used in the runtime linkage of calls; see class CompiledIC.
  91 //
  92 // Add parameter assert_lock to switch off assertion
  93 // during code generation, where no patching lock is needed.
  94 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
  95   assert(!assert_lock ||
  96          (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
  97          "concurrent code patching");
  98 
  99   ResourceMark rm;
 100   int code_size = 1 * BytesPerInstWord;
 101   address addr_call = addr_at(0);
 102   assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
 103 
 104   CodeBuffer cb(addr_call, code_size + 1);
 105   MacroAssembler* a = new MacroAssembler(&cb);
 106 
 107   // Patch the call.
 108   if (!ReoptimizeCallSequences || !a->is_within_range_of_b(dest, addr_call)) {
 109     address trampoline_stub_addr = get_trampoline();
 110 
 111     // We did not find a trampoline stub because the current codeblob
 112     // does not provide this information. The branch will be patched
 113     // later during a final fixup, when all necessary information is
 114     // available.
 115     if (trampoline_stub_addr == 0)
 116       return;
 117 
 118     // Patch the constant in the call's trampoline stub.
 119     NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
 120     dest = trampoline_stub_addr;
 121   }
 122 
 123   OrderAccess::release();
 124   a->bl(dest);
 125 
 126   ICache::ppc64_flush_icache_bytes(addr_call, code_size);
 127 }
 128 
 129 address NativeCall::get_trampoline() {
 130   address call_addr = addr_at(0);
 131 
 132   CodeBlob *code = CodeCache::find_blob(call_addr);
 133   assert(code != NULL, "Could not find the containing code blob");
 134 
 135   // There are no relocations available when the code gets relocated
 136   // because of CodeBuffer expansion.
 137   if (code->relocation_size() == 0)
 138     return NULL;
 139 
 140   address bl_destination = Assembler::bxx_destination(call_addr);
 141   if (code->contains(bl_destination) &&
 142       is_NativeCallTrampolineStub_at(bl_destination))
 143     return bl_destination;
 144 
 145   // If the codeBlob is not a nmethod, this is because we get here from the
 146   // CodeBlob constructor, which is called within the nmethod constructor.
 147   return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
 148 }
 149 
 150 #ifdef ASSERT
 151 void NativeCall::verify() {
 152   address addr = addr_at(0);
 153 
 154   if (!NativeCall::is_call_at(addr)) {
 155     tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr));
 156     // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
 157     fatal("not a NativeCall at " PTR_FORMAT, p2i(addr));
 158   }
 159 }
 160 #endif // ASSERT
 161 
 162 #ifdef ASSERT
 163 void NativeFarCall::verify() {
 164   address addr = addr_at(0);
 165 
 166   NativeInstruction::verify();
 167   if (!NativeFarCall::is_far_call_at(addr)) {
 168     tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
 169     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
 170     fatal("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
 171   }
 172 }
 173 #endif // ASSERT
 174 
 175 address NativeMovConstReg::next_instruction_address() const {
 176 #ifdef ASSERT
 177   CodeBlob* nm = CodeCache::find_blob(instruction_address());
 178   assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
 179 #endif
 180 
 181   if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
 182     return addr_at(load_const_from_method_toc_instruction_size);
 183   } else {
 184     return addr_at(load_const_instruction_size);
 185   }
 186 }
 187 
 188 intptr_t NativeMovConstReg::data() const {
 189   address   addr = addr_at(0);
 190 
 191   if (MacroAssembler::is_load_const_at(addr)) {
 192     return MacroAssembler::get_const(addr);
 193   }
 194 
 195   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
 196   if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
 197     narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin());
 198     return cast_from_oop<intptr_t>(CompressedOops::decode(no));
 199   } else {
 200     assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool");
 201 
 202     address ctable = cb->content_begin();
 203     int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
 204     return *(intptr_t *)(ctable + offset);
 205   }
 206 }
 207 
 208 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
 209   address addr         = instruction_address();
 210   address next_address = NULL;
 211   if (!cb) cb = CodeCache::find_blob(addr);
 212 
 213   if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
 214     // A load from the method's TOC (ctable).
 215     assert(cb->is_nmethod(), "must be nmethod");
 216     const address ctable = cb->content_begin();
 217     const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
 218     *(intptr_t *)(ctable + toc_offset) = data;
 219     next_address = addr + BytesPerInstWord;
 220   } else if (cb != NULL &&
 221              MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
 222     // A calculation relative to the global TOC.
 223     if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) !=
 224         (address)data) {
 225       const address inst2_addr = addr;
 226       const address inst1_addr =
 227         MacroAssembler::patch_calculate_address_from_global_toc_at(inst2_addr, cb->content_begin(),
 228                                                                    (address)data);
 229       assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
 230       const int range = inst2_addr - inst1_addr + BytesPerInstWord;
 231       ICache::ppc64_flush_icache_bytes(inst1_addr, range);
 232     }
 233     next_address = addr + 1 * BytesPerInstWord;
 234   } else if (MacroAssembler::is_load_const_at(addr)) {
 235     // A normal 5 instruction load_const code sequence.
 236     if (MacroAssembler::get_const(addr) != (long)data) {
 237       // This is not mt safe, ok in methods like CodeBuffer::copy_code().
 238       MacroAssembler::patch_const(addr, (long)data);
 239       ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size);
 240     }
 241     next_address = addr + 5 * BytesPerInstWord;
 242   } else if (MacroAssembler::is_bl(* (int*) addr)) {
 243     // A single branch-and-link instruction.
 244     ResourceMark rm;
 245     const int code_size = 1 * BytesPerInstWord;
 246     CodeBuffer cb(addr, code_size + 1);
 247     MacroAssembler* a = new MacroAssembler(&cb);
 248     a->bl((address) data);
 249     ICache::ppc64_flush_icache_bytes(addr, code_size);
 250     next_address = addr + code_size;
 251   } else {
 252     ShouldNotReachHere();
 253   }
 254 
 255   return next_address;
 256 }
 257 
 258 void NativeMovConstReg::set_data(intptr_t data) {
 259   // Store the value into the instruction stream.
 260   CodeBlob *cb = CodeCache::find_blob(instruction_address());
 261   address next_address = set_data_plain(data, cb);
 262 
 263   // Also store the value into an oop_Relocation cell, if any.
 264   if (cb && cb->is_nmethod()) {
 265     RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
 266     oop* oop_addr = NULL;
 267     Metadata** metadata_addr = NULL;
 268     while (iter.next()) {
 269       if (iter.type() == relocInfo::oop_type) {
 270         oop_Relocation *r = iter.oop_reloc();
 271         if (oop_addr == NULL) {
 272           oop_addr = r->oop_addr();
 273           *oop_addr = cast_to_oop(data);
 274         } else {
 275           assert(oop_addr == r->oop_addr(), "must be only one set-oop here");
 276         }
 277       }
 278       if (iter.type() == relocInfo::metadata_type) {
 279         metadata_Relocation *r = iter.metadata_reloc();
 280         if (metadata_addr == NULL) {
 281           metadata_addr = r->metadata_addr();
 282           *metadata_addr = (Metadata*)data;
 283         } else {
 284           assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
 285         }
 286       }
 287     }
 288   }
 289 }
 290 
 291 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) {
 292   address   inst2_addr = addr_at(0);
 293   CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
 294   if (MacroAssembler::get_narrow_oop(inst2_addr, cb->content_begin()) == (long)data)
 295     return;
 296   const address inst1_addr =
 297     MacroAssembler::patch_set_narrow_oop(inst2_addr, cb->content_begin(), (long)data);
 298   assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
 299   const int range = inst2_addr - inst1_addr + BytesPerInstWord;
 300   ICache::ppc64_flush_icache_bytes(inst1_addr, range);
 301 }
 302 
 303 // Do not use an assertion here. Let clients decide whether they only
 304 // want this when assertions are enabled.
 305 #ifdef ASSERT
 306 void NativeMovConstReg::verify() {
 307   address   addr = addr_at(0);
 308   if (! MacroAssembler::is_load_const_at(addr) &&
 309       ! MacroAssembler::is_load_const_from_method_toc_at(addr)) {
 310     CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // find_nmethod() asserts if nmethod is zombie.
 311     if (! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
 312         ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
 313         ! MacroAssembler::is_bl(*((int*) addr))) {
 314       tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
 315       // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
 316       fatal("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
 317     }
 318   }
 319 }
 320 #endif // ASSERT
 321 
 322 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
 323   ResourceMark rm;
 324   int code_size = 1 * BytesPerInstWord;
 325   CodeBuffer cb(verified_entry, code_size + 1);
 326   MacroAssembler* a = new MacroAssembler(&cb);
 327 #ifdef COMPILER2
 328   assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
 329 #endif
 330   // Patch this nmethod atomically. Always use illtrap/trap in debug build.
 331   if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
 332     a->b(dest);
 333   } else {
 334     // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
 335     if (TrapBasedNotEntrantChecks) {
 336       // We use a special trap for marking a method as not_entrant or zombie.
 337       a->trap_zombie_not_entrant();
 338     } else {
 339       // We use an illtrap for marking a method as not_entrant or zombie.
 340       a->illtrap();
 341     }
 342   }
 343   ICache::ppc64_flush_icache_bytes(verified_entry, code_size);
 344 }
 345 
 346 #ifdef ASSERT
 347 void NativeJump::verify() {
 348   address addr = addr_at(0);
 349 
 350   NativeInstruction::verify();
 351   if (!NativeJump::is_jump_at(addr)) {
 352     tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr));
 353     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
 354     fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
 355   }
 356 }
 357 #endif // ASSERT
 358 
 359 
 360 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
 361   CodeBuffer cb(code_pos, BytesPerInstWord + 1);
 362   MacroAssembler* a = new MacroAssembler(&cb);
 363   a->b(entry);
 364   ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
 365 }
 366 
 367 // MT-safe patching of a jmp instruction.
 368 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
 369   // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
 370 
 371   // Finally patch out the jump.
 372   volatile juint *jump_addr = (volatile juint*)instr_addr;
 373   // Release not needed because caller uses invalidate_range after copying the remaining bytes.
 374   //OrderAccess::release_store(jump_addr, *((juint*)code_buffer));
 375   *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
 376   ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
 377 }
 378 
 379 
 380 //-------------------------------------------------------------------
 381 
 382 // Call trampoline stubs.
 383 //
 384 // Layout and instructions of a call trampoline stub:
 385 //    0:  load the TOC (part 1)
 386 //    4:  load the TOC (part 2)
 387 //    8:  load the call target from the constant pool (part 1)
 388 //  [12:  load the call target from the constant pool (part 2, optional)]
 389 //   ..:  branch via CTR
 390 //
 391 
 392 address NativeCallTrampolineStub::encoded_destination_addr() const {
 393   address instruction_addr = addr_at(0 * BytesPerInstWord);
 394   if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
 395     instruction_addr = addr_at(2 * BytesPerInstWord);
 396     assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
 397            "must be a ld with large offset (from the constant pool)");
 398   }
 399   return instruction_addr;
 400 }
 401 
 402 address NativeCallTrampolineStub::destination(nmethod *nm) const {
 403   CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));
 404   address ctable = cb->content_begin();
 405 
 406   return *(address*)(ctable + destination_toc_offset());
 407 }
 408 
 409 int NativeCallTrampolineStub::destination_toc_offset() const {
 410   return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
 411 }
 412 
 413 void NativeCallTrampolineStub::set_destination(address new_destination) {
 414   CodeBlob* cb = CodeCache::find_blob(addr_at(0));
 415   address ctable = cb->content_begin();
 416 
 417   *(address*)(ctable + destination_toc_offset()) = new_destination;
 418 }