1 /*
   2  * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.inline.hpp"
  27 #include "code/compiledIC.hpp"
  28 #include "code/icBuffer.hpp"
  29 #include "code/nativeInst.hpp"
  30 #include "code/nmethod.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "runtime/mutexLocker.hpp"
  33 #include "runtime/safepoint.hpp"
  34 
  35 // ----------------------------------------------------------------------------
  36 #if COMPILER2_OR_JVMCI
  37 #define __ _masm.
  38 // emit call stub, compiled java to interpreter
  39 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
  40   // Stub is fixed up when the corresponding call is converted from calling
  41   // compiled code to calling interpreted code.
  42   // set (empty), R9
  43   // b -1
  44 
  45   if (mark == NULL) {
  46     mark = cbuf.insts_mark();  // get mark within main instrs section
  47   }
  48 
  49   MacroAssembler _masm(&cbuf);
  50 
  51   address base = __ start_a_stub(to_interp_stub_size());
  52   if (base == NULL) {
  53     return NULL;  // CodeBuffer::expand failed
  54   }
  55 
  56   // static stub relocation stores the instruction address of the call
  57   __ relocate(static_stub_Relocation::spec(mark));
  58 
  59   InlinedMetadata object_literal(NULL);
  60   // single instruction, see NativeMovConstReg::next_instruction_address() in
  61   // CompiledStaticCall::set_to_interpreted()
  62   __ ldr_literal(Rmethod, object_literal);
  63 
  64   __ set_inst_mark(); // Who uses this?
  65 
  66   bool near_range = __ cache_fully_reachable();
  67   InlinedAddress dest((address)-1);
  68   address branch_site = __ pc();
  69   if (near_range) {
  70     __ b(branch_site); // special NativeJump -1 destination
  71   } else {
  72     // Can't trash LR, FP, or argument registers
  73     __ indirect_jump(dest, Rtemp);
  74   }
  75   __ bind_literal(object_literal); // includes spec_for_immediate reloc
  76   if (!near_range) {
  77     __ bind_literal(dest); // special NativeJump -1 destination
  78   }
  79 
  80   assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size");
  81 
  82   // Update current stubs pointer and restore code_end.
  83   __ end_a_stub();
  84   return base;
  85 }
  86 #undef __
  87 
  88 // Relocation entries for call stub, compiled java to interpreter.
  89 int CompiledStaticCall::reloc_to_interp_stub() {
  90   return 10;  // 4 in emit_to_interp_stub + 1 in Java_Static_Call
  91 }
  92 #endif // COMPILER2_OR_JVMCI
  93 
  94 int CompiledStaticCall::to_trampoline_stub_size() {
  95   // ARM doesn't use trampolines.
  96   return 0;
  97 }
  98 
  99 // size of C2 call stub, compiled java to interpretor
 100 int CompiledStaticCall::to_interp_stub_size() {
 101   return 8 * NativeInstruction::instruction_size;
 102 }
 103 
 104 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 105   address stub = find_stub(/*is_aot*/ false);
 106   guarantee(stub != NULL, "stub not found");
 107 
 108   if (TraceICs) {
 109     ResourceMark rm;
 110     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 111                   p2i(instruction_address()),
 112                   callee->name_and_sig_as_C_string());
 113   }
 114 
 115   // Creation also verifies the object.
 116   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 117   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 118 
 119 #ifdef ASSERT
 120   // A generated lambda form might be deleted from the Lambdaform
 121   // cache in MethodTypeForm.  If a jit compiled lambdaform method
 122   // becomes not entrant and the cache access returns null, the new
 123   // resolve will lead to a new generated LambdaForm.
 124   Method* old_method = reinterpret_cast<Method*>(method_holder->data());
 125   assert(old_method == NULL || old_method == callee() ||
 126          callee->is_compiled_lambda_form() ||
 127          !old_method->method_holder()->is_loader_alive() ||
 128          old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
 129          "a) MT-unsafe modification of inline cache");
 130 
 131   address destination = jump->jump_destination();
 132   assert(destination == (address)-1 || destination == entry,
 133          "b) MT-unsafe modification of inline cache");
 134 #endif
 135 
 136   // Update stub.
 137   method_holder->set_data((intptr_t)callee());
 138   jump->set_jump_destination(entry);
 139 
 140   ICache::invalidate_range(stub, to_interp_stub_size());
 141 
 142   // Update jump to call.
 143   set_destination_mt_safe(stub);
 144 }
 145 
 146 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 147   // Reset stub.
 148   address stub = static_stub->addr();
 149   assert(stub != NULL, "stub not found");
 150   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
 151   // Creation also verifies the object.
 152   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 153   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 154   method_holder->set_data(0);
 155   jump->set_jump_destination((address)-1);
 156 }
 157 
 158 //-----------------------------------------------------------------------------
 159 // Non-product mode code
 160 #ifndef PRODUCT
 161 
 162 void CompiledDirectStaticCall::verify() {
 163   // Verify call.
 164   _call->verify();
 165   _call->verify_alignment();
 166 
 167   // Verify stub.
 168   address stub = find_stub(/*is_aot*/ false);
 169   assert(stub != NULL, "no stub found for static call");
 170   // Creation also verifies the object.
 171   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 172   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 173 
 174   // Verify state.
 175   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 176 }
 177 
 178 #endif // !PRODUCT