1 /*
   2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.inline.hpp"
  27 #include "code/compiledIC.hpp"
  28 #include "code/icBuffer.hpp"
  29 #include "code/nmethod.hpp"
  30 #include "memory/resourceArea.hpp"
  31 #include "runtime/mutexLocker.hpp"
  32 #include "runtime/safepoint.hpp"
  33 #ifdef COMPILER2
  34 #include "opto/matcher.hpp"
  35 #endif
  36 
  37 // Release the CompiledICHolder* associated with this call site is there is one.
  38 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
  39   // This call site might have become stale so inspect it carefully.
  40   NativeCall* call = nativeCall_at(call_site->addr());
  41   if (is_icholder_entry(call->destination())) {
  42     NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
  43     InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
  44   }
  45 }
  46 
  47 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
  48   // This call site might have become stale so inspect it carefully.
  49   NativeCall* call = nativeCall_at(call_site->addr());
  50   return is_icholder_entry(call->destination());
  51 }
  52 
  53 // ----------------------------------------------------------------------------
  54 
  55 // A PPC CompiledStaticCall looks like this:
  56 //
  57 // >>>> consts
  58 //
  59 // [call target1]
  60 // [IC cache]
  61 // [call target2]
  62 //
  63 // <<<< consts
  64 // >>>> insts
  65 //
  66 // bl offset16               -+  -+             ??? // How many bits available?
  67 //                            |   |
  68 // <<<< insts                 |   |
  69 // >>>> stubs                 |   |
  70 //                            |   |- trampoline_stub_Reloc
  71 // trampoline stub:           | <-+
  72 //   r2 = toc                 |
  73 //   r2 = [r2 + offset]       |       // Load call target1 from const section
  74 //   mtctr r2                 |
  75 //   bctr                     |- static_stub_Reloc
  76 // comp_to_interp_stub:   <---+
  77 //   r1 = toc
  78 //   ICreg = [r1 + IC_offset]         // Load IC from const section
  79 //   r1    = [r1 + offset]            // Load call target2 from const section
  80 //   mtctr r1
  81 //   bctr
  82 //
  83 // <<<< stubs
  84 //
  85 // The call instruction in the code either
  86 // - branches directly to a compiled method if offset encodable in instruction
  87 // - branches to the trampoline stub if offset to compiled method not encodable
  88 // - branches to the compiled_to_interp stub if target interpreted
  89 //
  90 // Further there are three relocations from the loads to the constants in
  91 // the constant section.
  92 //
  93 // Usage of r1 and r2 in the stubs allows to distinguish them.
  94 
  95 const int IC_pos_in_java_to_interp_stub = 8;
  96 #define __ _masm.
  97 void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
  98 #ifdef COMPILER2
  99   // Get the mark within main instrs section which is set to the address of the call.
 100   address call_addr = cbuf.insts_mark();
 101 
 102   // Note that the code buffer's insts_mark is always relative to insts.
 103   // That's why we must use the macroassembler to generate a stub.
 104   MacroAssembler _masm(&cbuf);
 105 
 106   // Start the stub.
 107   address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size());
 108   if (stub == NULL) {
 109     Compile::current()->env()->record_out_of_memory_failure();
 110     return;
 111   }
 112 
 113   // For java_to_interp stubs we use R11_scratch1 as scratch register
 114   // and in call trampoline stubs we use R12_scratch2. This way we
 115   // can distinguish them (see is_NativeCallTrampolineStub_at()).
 116   Register reg_scratch = R11_scratch1;
 117 
 118   // Create a static stub relocation which relates this stub
 119   // with the call instruction at insts_call_instruction_offset in the
 120   // instructions code-section.
 121   __ relocate(static_stub_Relocation::spec(call_addr));
 122   const int stub_start_offset = __ offset();
 123 
 124   // Now, create the stub's code:
 125   // - load the TOC
 126   // - load the inline cache oop from the constant pool
 127   // - load the call target from the constant pool
 128   // - call
 129   __ calculate_address_from_global_toc(reg_scratch, __ method_toc());
 130   AddressLiteral ic = __ allocate_metadata_address((Metadata *)NULL);
 131   __ load_const_from_method_toc(as_Register(Matcher::inline_cache_reg_encode()), ic, reg_scratch);
 132 
 133   if (ReoptimizeCallSequences) {
 134     __ b64_patchable((address)-1, relocInfo::none);
 135   } else {
 136     AddressLiteral a((address)-1);
 137     __ load_const_from_method_toc(reg_scratch, a, reg_scratch);
 138     __ mtctr(reg_scratch);
 139     __ bctr();
 140   }
 141 
 142   // FIXME: Assert that the stub can be identified and patched.
 143 
 144   // Java_to_interp_stub_size should be good.
 145   assert((__ offset() - stub_start_offset) <= CompiledStaticCall::to_interp_stub_size(),
 146          "should be good size");
 147   assert(!is_NativeCallTrampolineStub_at(__ addr_at(stub_start_offset)),
 148          "must not confuse java_to_interp with trampoline stubs");
 149 
 150  // End the stub.
 151   __ end_a_stub();
 152 #else
 153   ShouldNotReachHere();
 154 #endif
 155 }
 156 #undef __
 157 
 158 // Size of java_to_interp stub, this doesn't need to be accurate but it must
 159 // be larger or equal to the real size of the stub.
 160 // Used for optimization in Compile::Shorten_branches.
 161 int CompiledStaticCall::to_interp_stub_size() {
 162   return 12 * BytesPerInstWord;
 163 }
 164 
 165 // Relocation entries for call stub, compiled java to interpreter.
 166 // Used for optimization in Compile::Shorten_branches.
 167 int CompiledStaticCall::reloc_to_interp_stub() {
 168   return 5;
 169 }
 170 
 171 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
 172   address stub = find_stub();
 173   guarantee(stub != NULL, "stub not found");
 174 
 175   if (TraceICs) {
 176     ResourceMark rm;
 177     tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 178                   p2i(instruction_address()),
 179                   callee->name_and_sig_as_C_string());
 180   }
 181 
 182   // Creation also verifies the object.
 183   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 184   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 185 
 186   assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
 187          "a) MT-unsafe modification of inline cache");
 188   assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry,
 189          "b) MT-unsafe modification of inline cache");
 190 
 191   // Update stub.
 192   method_holder->set_data((intptr_t)callee());
 193   jump->set_jump_destination(entry);
 194 
 195   // Update jump to call.
 196   set_destination_mt_safe(stub);
 197 }
 198 
 199 void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 200   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
 201   // Reset stub.
 202   address stub = static_stub->addr();
 203   assert(stub != NULL, "stub not found");
 204   // Creation also verifies the object.
 205   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 206   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 207   method_holder->set_data(0);
 208   jump->set_jump_destination((address)-1);
 209 }
 210 
 211 //-----------------------------------------------------------------------------
 212 // Non-product mode code
 213 #ifndef PRODUCT
 214 
 215 void CompiledStaticCall::verify() {
 216   // Verify call.
 217   NativeCall::verify();
 218   if (os::is_MP()) {
 219     verify_alignment();
 220   }
 221 
 222   // Verify stub.
 223   address stub = find_stub();
 224   assert(stub != NULL, "no stub found for static call");
 225   // Creation also verifies the object.
 226   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 227   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 228 
 229   // Verify state.
 230   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 231 }
 232 
 233 #endif // !PRODUCT