1 /*
   2  * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/icBuffer.hpp"
  30 #include "code/nmethod.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "runtime/mutexLocker.hpp"
  33 #include "runtime/safepoint.hpp"
  34 
  35 // ----------------------------------------------------------------------------
  36 
  37 #define __ _masm.
  38 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
  39   // Stub is fixed up when the corresponding call is converted from
  40   // calling compiled code to calling interpreted code.
  41   // mov rmethod, 0
  42   // jmp -4 # to self
  43 
  44   if (mark == NULL) {
  45     mark = cbuf.insts_mark();  // Get mark within main instrs section.
  46   }
  47 
  48   // Note that the code buffer's insts_mark is always relative to insts.
  49   // That's why we must use the macroassembler to generate a stub.
  50   MacroAssembler _masm(&cbuf);
  51 
  52   address base = __ start_a_stub(to_interp_stub_size());
  53   int offset = __ offset();
  54   if (base == NULL) {
  55     return NULL;  // CodeBuffer::expand failed
  56   }
  57   // static stub relocation stores the instruction address of the call
  58   __ relocate(static_stub_Relocation::spec(mark));
  59 
  60 #if INCLUDE_AOT
  61   // Don't create a Metadata reloc if we're generating immutable PIC.
  62   if (cbuf.immutable_PIC()) {
  63     __ movptr(rmethod, 0);
  64   } else {
  65     __ mov_metadata(rmethod, (Metadata*)NULL);
  66   }
  67 #else
  68   __ mov_metadata(rmethod, (Metadata*)NULL);
  69 #endif
  70   __ movptr(rscratch1, 0);
  71   __ br(rscratch1);
  72 
  73   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
  74   __ end_a_stub();
  75   return base;
  76 }
  77 #undef __
  78 
  79 int CompiledStaticCall::to_interp_stub_size() {
  80   return 7 * NativeInstruction::instruction_size;
  81 }
  82 
  83 int CompiledStaticCall::to_trampoline_stub_size() {
  84   // Somewhat pessimistically, we count 3 instructions here (although
  85   // there are only two) because we sometimes emit an alignment nop.
  86   // Trampoline stubs are always word aligned.
  87   return 3 * NativeInstruction::instruction_size + wordSize;
  88 }
  89 
  90 // Relocation entries for call stub, compiled java to interpreter.
  91 int CompiledStaticCall::reloc_to_interp_stub() {
  92   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  93 }
  94 
  95 #if INCLUDE_AOT
  96 #define __ _masm.
  97 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
  98   if (!UseAOT) {
  99     return;
 100   }
 101   // Stub is fixed up when the corresponding call is converted from
 102   // calling compiled code to calling aot code.
 103   // mov r, imm64_aot_code_address
 104   // jmp r
 105 
 106   if (mark == NULL) {
 107     mark = cbuf.insts_mark();  // Get mark within main instrs section.
 108   }
 109 
 110   // Note that the code buffer's insts_mark is always relative to insts.
 111   // That's why we must use the macroassembler to generate a stub.
 112   MacroAssembler _masm(&cbuf);
 113 
 114   address base =
 115   __ start_a_stub(to_aot_stub_size());
 116   guarantee(base != NULL, "out of space");
 117 
 118   // Static stub relocation stores the instruction address of the call.
 119   __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */));
 120   // Load destination AOT code address.
 121   __ movptr(rscratch1, 0);  // address is zapped till fixup time.
 122   // This is recognized as unresolved by relocs/nativeinst/ic code.
 123   __ br(rscratch1);
 124 
 125   assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size");
 126 
 127   // Update current stubs pointer and restore insts_end.
 128   __ end_a_stub();
 129 }
 130 #undef __
 131 
 132 int CompiledStaticCall::to_aot_stub_size() {
 133   if (UseAOT) {
 134     return 5 * 4;  // movz; movk; movk; movk; br
 135   } else {
 136     return 0;
 137   }
 138 }
 139 
 140 // Relocation entries for call stub, compiled java to aot.
 141 int CompiledStaticCall::reloc_to_aot_stub() {
 142   if (UseAOT) {
 143     return 5 * 4;  // movz; movk; movk; movk; br
 144   } else {
 145     return 0;
 146   }
 147 }
 148 #endif // INCLUDE_AOT
 149 
 150 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 151   address stub = find_stub(false /* is_aot */);
 152   guarantee(stub != NULL, "stub not found");
 153 
 154   if (TraceICs) {
 155     ResourceMark rm;
 156     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 157                   p2i(instruction_address()),
 158                   callee->name_and_sig_as_C_string());
 159   }
 160 
 161   // Creation also verifies the object.
 162   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 163 #ifndef PRODUCT
 164   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 165 
 166   // read the value once
 167   volatile intptr_t data = method_holder->data();
 168   assert(data == 0 || data == (intptr_t)callee(),
 169          "a) MT-unsafe modification of inline cache");
 170   assert(data == 0 || jump->jump_destination() == entry,
 171          "b) MT-unsafe modification of inline cache");
 172 #endif
 173   // Update stub.
 174   method_holder->set_data((intptr_t)callee());
 175   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
 176   ICache::invalidate_range(stub, to_interp_stub_size());
 177   // Update jump to call.
 178   set_destination_mt_safe(stub);
 179 }
 180 
 181 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 182   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
 183   // Reset stub.
 184   address stub = static_stub->addr();
 185   assert(stub != NULL, "stub not found");
 186   // Creation also verifies the object.
 187   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 188   method_holder->set_data(0);
 189 }
 190 
 191 //-----------------------------------------------------------------------------
 192 // Non-product mode code
 193 #ifndef PRODUCT
 194 
 195 void CompiledDirectStaticCall::verify() {
 196   // Verify call.
 197   _call->verify();
 198   _call->verify_alignment();
 199 
 200   // Verify stub.
 201   address stub = find_stub(false /* is_aot */);
 202   assert(stub != NULL, "no stub found for static call");
 203   // Creation also verifies the object.
 204   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 205   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 206 
 207   // Verify state.
 208   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 209 }
 210 
 211 #endif // !PRODUCT