1 /*
   2  * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2012, 2015 SAP SE. All rights reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "memory/resourceArea.hpp"
  29 #include "nativeInst_ppc.hpp"
  30 #include "oops/compressedOops.inline.hpp"
  31 #include "oops/oop.hpp"
  32 #include "runtime/handles.hpp"
  33 #include "runtime/orderAccess.hpp"
  34 #include "runtime/sharedRuntime.hpp"
  35 #include "runtime/stubRoutines.hpp"
  36 #include "utilities/ostream.hpp"
  37 #ifdef COMPILER1
  38 #include "c1/c1_Runtime1.hpp"
  39 #endif
  40 
  41 // We use an illtrap for marking a method as not_entrant or zombie iff !UseSIGTRAP
  42 // Work around a C++ compiler bug which changes 'this'
  43 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) {
  44   assert(!UseSIGTRAP, "precondition");
  45   if (*(int*)addr != 0 /*illtrap*/) return false;
  46   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
  47   if (cb == NULL || !cb->is_nmethod()) return false;
  48   nmethod *nm = (nmethod *)cb;
  49   // This method is not_entrant or zombie iff the illtrap instruction is
  50   // located at the verified entry point.
  51   return nm->verified_entry_point() == addr;
  52 }
  53 
  54 #ifdef ASSERT
  55 void NativeInstruction::verify() {
  56   // Make sure code pattern is actually an instruction address.
  57   address addr = addr_at(0);
  58   if (addr == 0 || ((intptr_t)addr & 3) != 0) {
  59     fatal("not an instruction address");
  60   }
  61 }
  62 #endif // ASSERT
  63 
  64 // Extract call destination from a NativeCall. The call might use a trampoline stub.
  65 address NativeCall::destination() const {
  66   address addr = (address)this;
  67   address destination = Assembler::bxx_destination(addr);
  68 
  69   // Do we use a trampoline stub for this call?
  70   // Trampoline stubs are located behind the main code.
  71   if (destination > addr) {
  72     // Filter out recursive method invocation (call to verified/unverified entry point).
  73     CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
  74     assert(cb && cb->is_nmethod(), "sanity");
  75     nmethod *nm = (nmethod *)cb;
  76     if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
  77       // Yes we do, so get the destination from the trampoline stub.
  78       const address trampoline_stub_addr = destination;
  79       destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
  80     }
  81   }
  82 
  83   return destination;
  84 }
  85 
  86 // Similar to replace_mt_safe, but just changes the destination. The
  87 // important thing is that free-running threads are able to execute this
  88 // call instruction at all times. Thus, the displacement field must be
  89 // instruction-word-aligned.
  90 //
  91 // Used in the runtime linkage of calls; see class CompiledIC.
  92 //
  93 // Add parameter assert_lock to switch off assertion
  94 // during code generation, where no patching lock is needed.
  95 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
  96   assert(!assert_lock ||
  97          (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
  98          "concurrent code patching");
  99 
 100   ResourceMark rm;
 101   int code_size = 1 * BytesPerInstWord;
 102   address addr_call = addr_at(0);
 103   assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
 104 
 105   CodeBuffer cb(addr_call, code_size + 1);
 106   MacroAssembler* a = new MacroAssembler(&cb);
 107 
 108   // Patch the call.
 109   if (!ReoptimizeCallSequences || !a->is_within_range_of_b(dest, addr_call)) {
 110     address trampoline_stub_addr = get_trampoline();
 111 
 112     // We did not find a trampoline stub because the current codeblob
 113     // does not provide this information. The branch will be patched
 114     // later during a final fixup, when all necessary information is
 115     // available.
 116     if (trampoline_stub_addr == 0)
 117       return;
 118 
 119     // Patch the constant in the call's trampoline stub.
 120     NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
 121     dest = trampoline_stub_addr;
 122   }
 123 
 124   OrderAccess::release();
 125   a->bl(dest);
 126 
 127   ICache::ppc64_flush_icache_bytes(addr_call, code_size);
 128 }
 129 
 130 address NativeCall::get_trampoline() {
 131   address call_addr = addr_at(0);
 132 
 133   CodeBlob *code = CodeCache::find_blob(call_addr);
 134   assert(code != NULL, "Could not find the containing code blob");
 135 
 136   // There are no relocations available when the code gets relocated
 137   // because of CodeBuffer expansion.
 138   if (code->relocation_size() == 0)
 139     return NULL;
 140 
 141   address bl_destination = Assembler::bxx_destination(call_addr);
 142   if (code->contains(bl_destination) &&
 143       is_NativeCallTrampolineStub_at(bl_destination))
 144     return bl_destination;
 145 
 146   // If the codeBlob is not a nmethod, this is because we get here from the
 147   // CodeBlob constructor, which is called within the nmethod constructor.
 148   return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
 149 }
 150 
 151 #ifdef ASSERT
 152 void NativeCall::verify() {
 153   address addr = addr_at(0);
 154 
 155   if (!NativeCall::is_call_at(addr)) {
 156     tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr));
 157     // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
 158     fatal("not a NativeCall at " PTR_FORMAT, p2i(addr));
 159   }
 160 }
 161 #endif // ASSERT
 162 
 163 #ifdef ASSERT
 164 void NativeFarCall::verify() {
 165   address addr = addr_at(0);
 166 
 167   NativeInstruction::verify();
 168   if (!NativeFarCall::is_far_call_at(addr)) {
 169     tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
 170     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
 171     fatal("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
 172   }
 173 }
 174 #endif // ASSERT
 175 
 176 address NativeMovConstReg::next_instruction_address() const {
 177 #ifdef ASSERT
 178   CodeBlob* nm = CodeCache::find_blob(instruction_address());
 179   assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
 180 #endif
 181 
 182   if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
 183     return addr_at(load_const_from_method_toc_instruction_size);
 184   } else {
 185     return addr_at(load_const_instruction_size);
 186   }
 187 }
 188 
 189 intptr_t NativeMovConstReg::data() const {
 190   address   addr = addr_at(0);
 191 
 192   if (MacroAssembler::is_load_const_at(addr)) {
 193     return MacroAssembler::get_const(addr);
 194   }
 195 
 196   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
 197   if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
 198     narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin());
 199     return cast_from_oop<intptr_t>(CompressedOops::decode(no));
 200   } else {
 201     assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool");
 202 
 203     address ctable = cb->content_begin();
 204     int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
 205     return *(intptr_t *)(ctable + offset);
 206   }
 207 }
 208 
 209 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
 210   address addr         = instruction_address();
 211   address next_address = NULL;
 212   if (!cb) cb = CodeCache::find_blob(addr);
 213 
 214   if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
 215     // A load from the method's TOC (ctable).
 216     assert(cb->is_nmethod(), "must be nmethod");
 217     const address ctable = cb->content_begin();
 218     const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
 219     *(intptr_t *)(ctable + toc_offset) = data;
 220     next_address = addr + BytesPerInstWord;
 221   } else if (cb != NULL &&
 222              MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
 223     // A calculation relative to the global TOC.
 224     if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) !=
 225         (address)data) {
 226       const address inst2_addr = addr;
 227       const address inst1_addr =
 228         MacroAssembler::patch_calculate_address_from_global_toc_at(inst2_addr, cb->content_begin(),
 229                                                                    (address)data);
 230       assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
 231       const int range = inst2_addr - inst1_addr + BytesPerInstWord;
 232       ICache::ppc64_flush_icache_bytes(inst1_addr, range);
 233     }
 234     next_address = addr + 1 * BytesPerInstWord;
 235   } else if (MacroAssembler::is_load_const_at(addr)) {
 236     // A normal 5 instruction load_const code sequence.
 237     if (MacroAssembler::get_const(addr) != (long)data) {
 238       // This is not mt safe, ok in methods like CodeBuffer::copy_code().
 239       MacroAssembler::patch_const(addr, (long)data);
 240       ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size);
 241     }
 242     next_address = addr + 5 * BytesPerInstWord;
 243   } else if (MacroAssembler::is_bl(* (int*) addr)) {
 244     // A single branch-and-link instruction.
 245     ResourceMark rm;
 246     const int code_size = 1 * BytesPerInstWord;
 247     CodeBuffer cb(addr, code_size + 1);
 248     MacroAssembler* a = new MacroAssembler(&cb);
 249     a->bl((address) data);
 250     ICache::ppc64_flush_icache_bytes(addr, code_size);
 251     next_address = addr + code_size;
 252   } else {
 253     ShouldNotReachHere();
 254   }
 255 
 256   return next_address;
 257 }
 258 
 259 void NativeMovConstReg::set_data(intptr_t data) {
 260   // Store the value into the instruction stream.
 261   CodeBlob *cb = CodeCache::find_blob(instruction_address());
 262   address next_address = set_data_plain(data, cb);
 263 
 264   // Also store the value into an oop_Relocation cell, if any.
 265   if (cb && cb->is_nmethod()) {
 266     RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
 267     oop* oop_addr = NULL;
 268     Metadata** metadata_addr = NULL;
 269     while (iter.next()) {
 270       if (iter.type() == relocInfo::oop_type) {
 271         oop_Relocation *r = iter.oop_reloc();
 272         if (oop_addr == NULL) {
 273           oop_addr = r->oop_addr();
 274           *oop_addr = cast_to_oop(data);
 275         } else {
 276           assert(oop_addr == r->oop_addr(), "must be only one set-oop here");
 277         }
 278       }
 279       if (iter.type() == relocInfo::metadata_type) {
 280         metadata_Relocation *r = iter.metadata_reloc();
 281         if (metadata_addr == NULL) {
 282           metadata_addr = r->metadata_addr();
 283           *metadata_addr = (Metadata*)data;
 284         } else {
 285           assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
 286         }
 287       }
 288     }
 289   }
 290 }
 291 
 292 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) {
 293   address   inst2_addr = addr_at(0);
 294   CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
 295   if (MacroAssembler::get_narrow_oop(inst2_addr, cb->content_begin()) == (long)data)
 296     return;
 297   const address inst1_addr =
 298     MacroAssembler::patch_set_narrow_oop(inst2_addr, cb->content_begin(), (long)data);
 299   assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
 300   const int range = inst2_addr - inst1_addr + BytesPerInstWord;
 301   ICache::ppc64_flush_icache_bytes(inst1_addr, range);
 302 }
 303 
 304 // Do not use an assertion here. Let clients decide whether they only
 305 // want this when assertions are enabled.
 306 #ifdef ASSERT
 307 void NativeMovConstReg::verify() {
 308   address   addr = addr_at(0);
 309   if (! MacroAssembler::is_load_const_at(addr) &&
 310       ! MacroAssembler::is_load_const_from_method_toc_at(addr)) {
 311     CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // find_nmethod() asserts if nmethod is zombie.
 312     if (! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
 313         ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
 314         ! MacroAssembler::is_bl(*((int*) addr))) {
 315       tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
 316       // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
 317       fatal("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
 318     }
 319   }
 320 }
 321 #endif // ASSERT
 322 
 323 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
 324   ResourceMark rm;
 325   int code_size = 1 * BytesPerInstWord;
 326   CodeBuffer cb(verified_entry, code_size + 1);
 327   MacroAssembler* a = new MacroAssembler(&cb);
 328 #ifdef COMPILER2
 329   assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
 330 #endif
 331   // Patch this nmethod atomically. Always use illtrap/trap in debug build.
 332   if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
 333     a->b(dest);
 334   } else {
 335     // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
 336     if (TrapBasedNotEntrantChecks) {
 337       // We use a special trap for marking a method as not_entrant or zombie.
 338       a->trap_zombie_not_entrant();
 339     } else {
 340       // We use an illtrap for marking a method as not_entrant or zombie.
 341       a->illtrap();
 342     }
 343   }
 344   ICache::ppc64_flush_icache_bytes(verified_entry, code_size);
 345 }
 346 
 347 #ifdef ASSERT
 348 void NativeJump::verify() {
 349   address addr = addr_at(0);
 350 
 351   NativeInstruction::verify();
 352   if (!NativeJump::is_jump_at(addr)) {
 353     tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr));
 354     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
 355     fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
 356   }
 357 }
 358 #endif // ASSERT
 359 
 360 
 361 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
 362   CodeBuffer cb(code_pos, BytesPerInstWord + 1);
 363   MacroAssembler* a = new MacroAssembler(&cb);
 364   a->b(entry);
 365   ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
 366 }
 367 
 368 // MT-safe patching of a jmp instruction.
 369 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
 370   // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
 371 
 372   // Finally patch out the jump.
 373   volatile juint *jump_addr = (volatile juint*)instr_addr;
 374   // Release not needed because caller uses invalidate_range after copying the remaining bytes.
 375   //OrderAccess::release_store(jump_addr, *((juint*)code_buffer));
 376   *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
 377   ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
 378 }
 379 
 380 
 381 //-------------------------------------------------------------------
 382 
 383 // Call trampoline stubs.
 384 //
 385 // Layout and instructions of a call trampoline stub:
 386 //    0:  load the TOC (part 1)
 387 //    4:  load the TOC (part 2)
 388 //    8:  load the call target from the constant pool (part 1)
 389 //  [12:  load the call target from the constant pool (part 2, optional)]
 390 //   ..:  branch via CTR
 391 //
 392 
 393 address NativeCallTrampolineStub::encoded_destination_addr() const {
 394   address instruction_addr = addr_at(0 * BytesPerInstWord);
 395   if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
 396     instruction_addr = addr_at(2 * BytesPerInstWord);
 397     assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
 398            "must be a ld with large offset (from the constant pool)");
 399   }
 400   return instruction_addr;
 401 }
 402 
 403 address NativeCallTrampolineStub::destination(nmethod *nm) const {
 404   CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));
 405   address ctable = cb->content_begin();
 406 
 407   return *(address*)(ctable + destination_toc_offset());
 408 }
 409 
 410 int NativeCallTrampolineStub::destination_toc_offset() const {
 411   return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
 412 }
 413 
 414 void NativeCallTrampolineStub::set_destination(address new_destination) {
 415   CodeBlob* cb = CodeCache::find_blob(addr_at(0));
 416   address ctable = cb->content_begin();
 417 
 418   *(address*)(ctable + destination_toc_offset()) = new_destination;
 419 }