1 /*
   2  * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/icBuffer.hpp"
  30 #include "code/nmethod.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "runtime/mutexLocker.hpp"
  33 #include "runtime/safepoint.hpp"
  34 
  35 // ----------------------------------------------------------------------------
  36 
  37 #define __ _masm.
  38 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
  39   // Stub is fixed up when the corresponding call is converted from
  40   // calling compiled code to calling interpreted code.
  41   // mov rmethod, 0
  42   // jmp -4 # to self
  43 
  44   if (mark == NULL) {
  45     mark = cbuf.insts_mark();  // Get mark within main instrs section.
  46   }
  47 
  48   // Note that the code buffer's insts_mark is always relative to insts.
  49   // That's why we must use the macroassembler to generate a stub.
  50   MacroAssembler _masm(&cbuf);
  51 
  52   address base = __ start_a_stub(to_interp_stub_size());
  53   int offset = __ offset();
  54   if (base == NULL) {
  55     return NULL;  // CodeBuffer::expand failed
  56   }
  57   // static stub relocation stores the instruction address of the call
  58   __ relocate(static_stub_Relocation::spec(mark));
  59 
  60 #if INCLUDE_AOT
  61   // Don't create a Metadata reloc if we're generating immutable PIC.
  62   if (cbuf.immutable_PIC()) {
  63     __ movptr(rmethod, 0);
  64     __ movptr(rscratch1, 0);
  65     __ br(rscratch1);
  66 
  67   } else
  68 #endif
  69   {
  70     __ emit_static_call_stub();
  71   }
  72 
  73   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
  74   __ end_a_stub();
  75   return base;
  76 }
  77 #undef __
  78 
  79 int CompiledStaticCall::to_interp_stub_size() {
  80   // isb; movk; movz; movz; movk; movz; movz; br
  81   return 8 * NativeInstruction::instruction_size;
  82 }
  83 
  84 int CompiledStaticCall::to_trampoline_stub_size() {
  85   // Somewhat pessimistically, we count 3 instructions here (although
  86   // there are only two) because we sometimes emit an alignment nop.
  87   // Trampoline stubs are always word aligned.
  88   return 3 * NativeInstruction::instruction_size + wordSize;
  89 }
  90 
  91 // Relocation entries for call stub, compiled java to interpreter.
  92 int CompiledStaticCall::reloc_to_interp_stub() {
  93   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  94 }
  95 
  96 #if INCLUDE_AOT
  97 #define __ _masm.
  98 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
  99   if (!UseAOT) {
 100     return;
 101   }
 102   // Stub is fixed up when the corresponding call is converted from
 103   // calling compiled code to calling aot code.
 104   // mov r, imm64_aot_code_address
 105   // jmp r
 106 
 107   if (mark == NULL) {
 108     mark = cbuf.insts_mark();  // Get mark within main instrs section.
 109   }
 110 
 111   // Note that the code buffer's insts_mark is always relative to insts.
 112   // That's why we must use the macroassembler to generate a stub.
 113   MacroAssembler _masm(&cbuf);
 114 
 115   address base =
 116   __ start_a_stub(to_aot_stub_size());
 117   guarantee(base != NULL, "out of space");
 118 
 119   // Static stub relocation stores the instruction address of the call.
 120   __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */));
 121   // Load destination AOT code address.
 122   __ movptr(rscratch1, 0);  // address is zapped till fixup time.
 123   // This is recognized as unresolved by relocs/nativeinst/ic code.
 124   __ br(rscratch1);
 125 
 126   assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size");
 127 
 128   // Update current stubs pointer and restore insts_end.
 129   __ end_a_stub();
 130 }
 131 #undef __
 132 
 133 int CompiledStaticCall::to_aot_stub_size() {
 134   if (UseAOT) {
 135     return 5 * 4;  // movz; movk; movk; movk; br
 136   } else {
 137     return 0;
 138   }
 139 }
 140 
 141 // Relocation entries for call stub, compiled java to aot.
 142 int CompiledStaticCall::reloc_to_aot_stub() {
 143   if (UseAOT) {
 144     return 5 * 4;  // movz; movk; movk; movk; br
 145   } else {
 146     return 0;
 147   }
 148 }
 149 #endif // INCLUDE_AOT
 150 
 151 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 152   address stub = find_stub(false /* is_aot */);
 153   guarantee(stub != NULL, "stub not found");
 154 
 155   if (TraceICs) {
 156     ResourceMark rm;
 157     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 158                   p2i(instruction_address()),
 159                   callee->name_and_sig_as_C_string());
 160   }
 161 
 162   // Creation also verifies the object.
 163   NativeMovConstReg* method_holder
 164     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 165 
 166 #ifdef ASSERT
 167   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 168 
 169   // A generated lambda form might be deleted from the Lambdaform
 170   // cache in MethodTypeForm.  If a jit compiled lambdaform method
 171   // becomes not entrant and the cache access returns null, the new
 172   // resolve will lead to a new generated LambdaForm.
 173   Method* volatile old_method = reinterpret_cast<Method*>(method_holder->data());
 174   assert(old_method == NULL || old_method == callee() ||
 175          callee->is_compiled_lambda_form() ||
 176          !old_method->method_holder()->is_loader_alive() ||
 177          old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
 178          "a) MT-unsafe modification of inline cache");
 179 
 180   volatile address destination = jump->jump_destination();
 181   assert(destination == (address)-1 || destination == entry,
 182          "b) MT-unsafe modification of inline cache");
 183 #endif
 184 
 185   // Update stub.
 186   method_holder->set_data((intptr_t)callee());
 187   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
 188   ICache::invalidate_range(stub, to_interp_stub_size());
 189   // Update jump to call.
 190   set_destination_mt_safe(stub);
 191 }
 192 
 193 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 194   // Reset stub.
 195   address stub = static_stub->addr();
 196   assert(stub != NULL, "stub not found");
 197   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
 198   // Creation also verifies the object.
 199   NativeMovConstReg* method_holder
 200     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 201   method_holder->set_data(0);
 202 }
 203 
 204 //-----------------------------------------------------------------------------
 205 // Non-product mode code
 206 #ifndef PRODUCT
 207 
 208 void CompiledDirectStaticCall::verify() {
 209   // Verify call.
 210   _call->verify();
 211   _call->verify_alignment();
 212 
 213   // Verify stub.
 214   address stub = find_stub(false /* is_aot */);
 215   assert(stub != NULL, "no stub found for static call");
 216   // Creation also verifies the object.
 217   NativeMovConstReg* method_holder
 218     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 219   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
 220 
 221   // Verify state.
 222   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 223 }
 224 
 225 #endif // !PRODUCT