1 /*
   2  * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.inline.hpp"
  27 #include "code/compiledIC.hpp"
  28 #include "code/icBuffer.hpp"
  29 #include "code/nmethod.hpp"
  30 #include "memory/resourceArea.hpp"
  31 #include "runtime/mutexLocker.hpp"
  32 #include "runtime/safepoint.hpp"
  33 #ifdef COMPILER2
  34 #include "opto/matcher.hpp"
  35 #endif
  36 
  37 // Release the CompiledICHolder* associated with this call site is there is one.
  38 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
  39   // This call site might have become stale so inspect it carefully.
  40   NativeCall* call = nativeCall_at(call_site->addr());
  41   if (is_icholder_entry(call->destination())) {
  42     NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
  43     InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
  44   }
  45 }
  46 
  47 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
  48   // This call site might have become stale so inspect it carefully.
  49   NativeCall* call = nativeCall_at(call_site->addr());
  50   return is_icholder_entry(call->destination());
  51 }
  52 
  53 //-----------------------------------------------------------------------------
  54 // High-level access to an inline cache. Guaranteed to be MT-safe.
  55 
  56 CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
  57   : _ic_call(call)
  58 {
  59   address ic_call = call->instruction_address();
  60 
  61   assert(ic_call != NULL, "ic_call address must be set");
  62   assert(nm != NULL, "must pass nmethod");
  63   assert(nm->contains(ic_call), "must be in nmethod");
  64 
  65   // Search for the ic_call at the given address.
  66   RelocIterator iter(nm, ic_call, ic_call+1);
  67   bool ret = iter.next();
  68   assert(ret == true, "relocInfo must exist at this address");
  69   assert(iter.addr() == ic_call, "must find ic_call");
  70   if (iter.type() == relocInfo::virtual_call_type) {
  71     virtual_call_Relocation* r = iter.virtual_call_reloc();
  72     _is_optimized = false;
  73     _value = nativeMovConstReg_at(r->cached_value());
  74   } else {
  75     assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
  76     _is_optimized = true;
  77     _value = NULL;
  78   }
  79 }
  80 
  81 // ----------------------------------------------------------------------------
  82 
  83 // A PPC CompiledStaticCall looks like this:
  84 //
  85 // >>>> consts
  86 //
  87 // [call target1]
  88 // [IC cache]
  89 // [call target2]
  90 //
  91 // <<<< consts
  92 // >>>> insts
  93 //
  94 // bl offset16               -+  -+             ??? // How many bits available?
  95 //                            |   |
  96 // <<<< insts                 |   |
  97 // >>>> stubs                 |   |
  98 //                            |   |- trampoline_stub_Reloc
  99 // trampoline stub:           | <-+
 100 //   r2 = toc                 |
 101 //   r2 = [r2 + offset]       |       // Load call target1 from const section
 102 //   mtctr r2                 |
 103 //   bctr                     |- static_stub_Reloc
 104 // comp_to_interp_stub:   <---+
 105 //   r1 = toc
 106 //   ICreg = [r1 + IC_offset]         // Load IC from const section
 107 //   r1    = [r1 + offset]            // Load call target2 from const section
 108 //   mtctr r1
 109 //   bctr
 110 //
 111 // <<<< stubs
 112 //
 113 // The call instruction in the code either
 114 // - branches directly to a compiled method if offset encodable in instruction
 115 // - branches to the trampoline stub if offset to compiled method not encodable
 116 // - branches to the compiled_to_interp stub if target interpreted
 117 //
 118 // Further there are three relocations from the loads to the constants in
 119 // the constant section.
 120 //
 121 // Usage of r1 and r2 in the stubs allows to distinguish them.
 122 
 123 const int IC_pos_in_java_to_interp_stub = 8;
 124 #define __ _masm.
 125 void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
 126 #ifdef COMPILER2
 127   // Get the mark within main instrs section which is set to the address of the call.
 128   address call_addr = cbuf.insts_mark();
 129 
 130   // Note that the code buffer's insts_mark is always relative to insts.
 131   // That's why we must use the macroassembler to generate a stub.
 132   MacroAssembler _masm(&cbuf);
 133 
 134   // Start the stub.
 135   address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size());
 136   if (stub == NULL) {
 137     Compile::current()->env()->record_out_of_memory_failure();
 138     return;
 139   }
 140 
 141   // For java_to_interp stubs we use R11_scratch1 as scratch register
 142   // and in call trampoline stubs we use R12_scratch2. This way we
 143   // can distinguish them (see is_NativeCallTrampolineStub_at()).
 144   Register reg_scratch = R11_scratch1;
 145 
 146   // Create a static stub relocation which relates this stub
 147   // with the call instruction at insts_call_instruction_offset in the
 148   // instructions code-section.
 149   __ relocate(static_stub_Relocation::spec(call_addr));
 150   const int stub_start_offset = __ offset();
 151 
 152   // Now, create the stub's code:
 153   // - load the TOC
 154   // - load the inline cache oop from the constant pool
 155   // - load the call target from the constant pool
 156   // - call
 157   __ calculate_address_from_global_toc(reg_scratch, __ method_toc());
 158   AddressLiteral ic = __ allocate_metadata_address((Metadata *)NULL);
 159   __ load_const_from_method_toc(as_Register(Matcher::inline_cache_reg_encode()), ic, reg_scratch);
 160 
 161   if (ReoptimizeCallSequences) {
 162     __ b64_patchable((address)-1, relocInfo::none);
 163   } else {
 164     AddressLiteral a((address)-1);
 165     __ load_const_from_method_toc(reg_scratch, a, reg_scratch);
 166     __ mtctr(reg_scratch);
 167     __ bctr();
 168   }
 169 
 170   // FIXME: Assert that the stub can be identified and patched.
 171 
 172   // Java_to_interp_stub_size should be good.
 173   assert((__ offset() - stub_start_offset) <= CompiledStaticCall::to_interp_stub_size(),
 174          "should be good size");
 175   assert(!is_NativeCallTrampolineStub_at(__ addr_at(stub_start_offset)),
 176          "must not confuse java_to_interp with trampoline stubs");
 177 
 178  // End the stub.
 179   __ end_a_stub();
 180 #else
 181   ShouldNotReachHere();
 182 #endif
 183 }
 184 #undef __
 185 
 186 // Size of java_to_interp stub, this doesn't need to be accurate but it must
 187 // be larger or equal to the real size of the stub.
 188 // Used for optimization in Compile::Shorten_branches.
 189 int CompiledStaticCall::to_interp_stub_size() {
 190   return 12 * BytesPerInstWord;
 191 }
 192 
 193 // Relocation entries for call stub, compiled java to interpreter.
 194 // Used for optimization in Compile::Shorten_branches.
 195 int CompiledStaticCall::reloc_to_interp_stub() {
 196   return 5;
 197 }
 198 
 199 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
 200   address stub = find_stub();
 201   guarantee(stub != NULL, "stub not found");
 202 
 203   if (TraceICs) {
 204     ResourceMark rm;
 205     tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 206                   instruction_address(),
 207                   callee->name_and_sig_as_C_string());
 208   }
 209 
 210   // Creation also verifies the object.
 211   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 212   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 213 
 214   assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
 215          "a) MT-unsafe modification of inline cache");
 216   assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry,
 217          "b) MT-unsafe modification of inline cache");
 218 
 219   // Update stub.
 220   method_holder->set_data((intptr_t)callee());
 221   jump->set_jump_destination(entry);
 222 
 223   // Update jump to call.
 224   set_destination_mt_safe(stub);
 225 }
 226 
 227 void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 228   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
 229   // Reset stub.
 230   address stub = static_stub->addr();
 231   assert(stub != NULL, "stub not found");
 232   // Creation also verifies the object.
 233   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 234   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 235   method_holder->set_data(0);
 236   jump->set_jump_destination((address)-1);
 237 }
 238 
 239 //-----------------------------------------------------------------------------
 240 // Non-product mode code
 241 #ifndef PRODUCT
 242 
 243 void CompiledStaticCall::verify() {
 244   // Verify call.
 245   NativeCall::verify();
 246   if (os::is_MP()) {
 247     verify_alignment();
 248   }
 249 
 250   // Verify stub.
 251   address stub = find_stub();
 252   assert(stub != NULL, "no stub found for static call");
 253   // Creation also verifies the object.
 254   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
 255   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 256 
 257   // Verify state.
 258   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 259 }
 260 
 261 #endif // !PRODUCT