1 /*
   2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.inline.hpp"
  27 #include "code/compiledIC.hpp"
  28 #include "code/icBuffer.hpp"
  29 #include "code/nmethod.hpp"
  30 #include "memory/resourceArea.hpp"
  31 #include "runtime/mutexLocker.hpp"
  32 #include "runtime/safepoint.hpp"
  33 #ifdef COMPILER2
  34 #include "opto/matcher.hpp"
  35 #endif
  36 
  37 // Release the CompiledICHolder* associated with this call site is there is one.
  38 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
  39   // This call site might have become stale so inspect it carefully.
  40   NativeCall* call = nativeCall_at(call_site->addr());
  41   if (is_icholder_entry(call->destination())) {
  42     NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
  43     InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
  44   }
  45 }
  46 
  47 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
  48   // This call site might have become stale so inspect it carefully.
  49   NativeCall* call = nativeCall_at(call_site->addr());
  50   return is_icholder_entry(call->destination());
  51 }
  52 
  53 // ----------------------------------------------------------------------------
  54 
  55 // A PPC CompiledStaticCall looks like this:
  56 //
  57 // >>>> consts
  58 //
  59 // [call target1]
  60 // [IC cache]
  61 // [call target2]
  62 //
  63 // <<<< consts
  64 // >>>> insts
  65 //
  66 // bl offset16               -+  -+             ??? // How many bits available?
  67 //                            |   |
  68 // <<<< insts                 |   |
  69 // >>>> stubs                 |   |
  70 //                            |   |- trampoline_stub_Reloc
  71 // trampoline stub:           | <-+
  72 //   r2 = toc                 |
  73 //   r2 = [r2 + offset]       |       // Load call target1 from const section
  74 //   mtctr r2                 |
  75 //   bctr                     |- static_stub_Reloc
  76 // comp_to_interp_stub:   <---+
  77 //   r1 = toc
  78 //   ICreg = [r1 + IC_offset]         // Load IC from const section
  79 //   r1    = [r1 + offset]            // Load call target2 from const section
  80 //   mtctr r1
  81 //   bctr
  82 //
  83 // <<<< stubs
  84 //
  85 // The call instruction in the code either
  86 // - branches directly to a compiled method if offset encodable in instruction
  87 // - branches to the trampoline stub if offset to compiled method not encodable
  88 // - branches to the compiled_to_interp stub if target interpreted
  89 //
  90 // Further there are three relocations from the loads to the constants in
  91 // the constant section.
  92 //
  93 // Usage of r1 and r2 in the stubs allows to distinguish them.
  94 
  95 const int IC_pos_in_java_to_interp_stub = 8;
  96 #define __ _masm.
  97 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark/* = NULL*/) {
  98 #ifdef COMPILER2
  99   if (mark == NULL) {
 100     // Get the mark within main instrs section which is set to the address of the call.
 101     mark = cbuf.insts_mark();
 102   }
 103 
 104   // Note that the code buffer's insts_mark is always relative to insts.
 105   // That's why we must use the macroassembler to generate a stub.
 106   MacroAssembler _masm(&cbuf);
 107 
 108   // Start the stub.
 109   address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size());
 110   if (stub == NULL) {
 111     return NULL; // CodeCache is full
 112   }
 113 
 114   // For java_to_interp stubs we use R11_scratch1 as scratch register
 115   // and in call trampoline stubs we use R12_scratch2. This way we
 116   // can distinguish them (see is_NativeCallTrampolineStub_at()).
 117   Register reg_scratch = R11_scratch1;
 118 
 119   // Create a static stub relocation which relates this stub
 120   // with the call instruction at insts_call_instruction_offset in the
 121   // instructions code-section.
 122   __ relocate(static_stub_Relocation::spec(mark));
 123   const int stub_start_offset = __ offset();
 124 
 125   // Now, create the stub's code:
 126   // - load the TOC
 127   // - load the inline cache oop from the constant pool
 128   // - load the call target from the constant pool
 129   // - call
 130   __ calculate_address_from_global_toc(reg_scratch, __ method_toc());
 131   AddressLiteral ic = __ allocate_metadata_address((Metadata *)NULL);
 132   __ load_const_from_method_toc(as_Register(Matcher::inline_cache_reg_encode()), ic, reg_scratch);
 133 
 134   if (ReoptimizeCallSequences) {
 135     __ b64_patchable((address)-1, relocInfo::none);
 136   } else {
 137     AddressLiteral a((address)-1);
 138     __ load_const_from_method_toc(reg_scratch, a, reg_scratch);
 139     __ mtctr(reg_scratch);
 140     __ bctr();
 141   }
 142 
 143   // FIXME: Assert that the stub can be identified and patched.
 144 
 145   // Java_to_interp_stub_size should be good.
 146   assert((__ offset() - stub_start_offset) <= CompiledStaticCall::to_interp_stub_size(),
 147          "should be good size");
 148   assert(!is_NativeCallTrampolineStub_at(__ addr_at(stub_start_offset)),
 149          "must not confuse java_to_interp with trampoline stubs");
 150 
 151  // End the stub.
 152   __ end_a_stub();
 153   return stub;
 154 #else
 155   ShouldNotReachHere();
 156 #endif
 157 }
 158 #undef __
 159 
 160 // Size of java_to_interp stub, this doesn't need to be accurate but it must
 161 // be larger or equal to the real size of the stub.
 162 // Used for optimization in Compile::Shorten_branches.
 163 int CompiledStaticCall::to_interp_stub_size() {
 164   return 12 * BytesPerInstWord;
 165 }
 166 
 167 // Relocation entries for call stub, compiled java to interpreter.
 168 // Used for optimization in Compile::Shorten_branches.
 169 int CompiledStaticCall::reloc_to_interp_stub() {
 170   return 5;
 171 }
 172 
 173 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
 174   address stub = find_stub();
 175   guarantee(stub != NULL, "stub not found");
 176 
 177   if (TraceICs) {
 178     ResourceMark rm;
 179     tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 180                   p2i(instruction_address()),
 181                   callee->name_and_sig_as_C_string());
 182   }
 183 
 184   // Creation also verifies the object.
 185   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 186   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 187 
 188   assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
 189          "a) MT-unsafe modification of inline cache");
 190   assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry,
 191          "b) MT-unsafe modification of inline cache");
 192 
 193   // Update stub.
 194   method_holder->set_data((intptr_t)callee());
 195   jump->set_jump_destination(entry);
 196 
 197   // Update jump to call.
 198   set_destination_mt_safe(stub);
 199 }
 200 
 201 void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 202   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
 203   // Reset stub.
 204   address stub = static_stub->addr();
 205   assert(stub != NULL, "stub not found");
 206   // Creation also verifies the object.
 207   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 208   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 209   method_holder->set_data(0);
 210   jump->set_jump_destination((address)-1);
 211 }
 212 
 213 //-----------------------------------------------------------------------------
 214 // Non-product mode code
 215 #ifndef PRODUCT
 216 
 217 void CompiledStaticCall::verify() {
 218   // Verify call.
 219   NativeCall::verify();
 220   if (os::is_MP()) {
 221     verify_alignment();
 222   }
 223 
 224   // Verify stub.
 225   address stub = find_stub();
 226   assert(stub != NULL, "no stub found for static call");
 227   // Creation also verifies the object.
 228   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 229   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 230 
 231   // Verify state.
 232   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 233 }
 234 
 235 #endif // !PRODUCT