1 /*
   2  * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "code/codeBehaviours.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/compiledIC.hpp"
  30 #include "code/icBuffer.hpp"
  31 #include "code/nmethod.hpp"
  32 #include "code/vtableStubs.hpp"
  33 #include "interpreter/interpreter.hpp"
  34 #include "interpreter/linkResolver.hpp"
  35 #include "memory/metadataFactory.hpp"
  36 #include "memory/oopFactory.hpp"
  37 #include "memory/resourceArea.hpp"
  38 #include "oops/method.inline.hpp"
  39 #include "oops/oop.inline.hpp"
  40 #include "oops/symbol.hpp"
  41 #include "runtime/handles.inline.hpp"
  42 #include "runtime/icache.hpp"
  43 #include "runtime/sharedRuntime.hpp"
  44 #include "runtime/stubRoutines.hpp"
  45 #include "utilities/events.hpp"
  46 
  47 
  48 // Every time a compiled IC is changed or its type is being accessed,
  49 // either the CompiledIC_lock must be set or we must be at a safe point.
  50 
  51 CompiledICLocker::CompiledICLocker(CompiledMethod* method)
  52   : _method(method),
  53     _behaviour(CompiledICProtectionBehaviour::current()),
  54     _locked(_behaviour->lock(_method)){
  55 }
  56 
  57 CompiledICLocker::~CompiledICLocker() {
  58   if (_locked) {
  59     _behaviour->unlock(_method);
  60   }
  61 }
  62 
  63 bool CompiledICLocker::is_safe(CompiledMethod* method) {
  64   return CompiledICProtectionBehaviour::current()->is_safe(method);
  65 }
  66 
  67 bool CompiledICLocker::is_safe(address code) {
  68   CodeBlob* cb = CodeCache::find_blob_unsafe(code);
  69   assert(cb != NULL && cb->is_compiled(), "must be compiled");
  70   CompiledMethod* cm = cb->as_compiled_method();
  71   return CompiledICProtectionBehaviour::current()->is_safe(cm);
  72 }
  73 
  74 //-----------------------------------------------------------------------------
  75 // Low-level access to an inline cache. Private, since they might not be
  76 // MT-safe to use.
  77 
  78 void* CompiledIC::cached_value() const {
  79   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
  80   assert (!is_optimized(), "an optimized virtual call does not have a cached metadata");
  81 
  82   if (!is_in_transition_state()) {
  83     void* data = get_data();
  84     // If we let the metadata value here be initialized to zero...
  85     assert(data != NULL || Universe::non_oop_word() == NULL,
  86            "no raw nulls in CompiledIC metadatas, because of patching races");
  87     return (data == (void*)Universe::non_oop_word()) ? NULL : data;
  88   } else {
  89     return InlineCacheBuffer::cached_value_for((CompiledIC *)this);
  90   }
  91 }
  92 
  93 
  94 void CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) {
  95   assert(entry_point != NULL, "must set legal entry point");
  96   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
  97   assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata");
  98   assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata");
  99 
 100   assert(!is_icholder || is_icholder_entry(entry_point), "must be");
 101 
 102   // Don't use ic_destination for this test since that forwards
 103   // through ICBuffer instead of returning the actual current state of
 104   // the CompiledIC.
 105   if (is_icholder_entry(_call->destination())) {
 106     // When patching for the ICStub case the cached value isn't
 107     // overwritten until the ICStub copied into the CompiledIC during
 108     // the next safepoint.  Make sure that the CompiledICHolder* is
 109     // marked for release at this point since it won't be identifiable
 110     // once the entry point is overwritten.
 111     InlineCacheBuffer::queue_for_release((CompiledICHolder*)get_data());
 112   }
 113 
 114   if (TraceCompiledIC) {
 115     tty->print("  ");
 116     print_compiled_ic();
 117     tty->print(" changing destination to " INTPTR_FORMAT, p2i(entry_point));
 118     if (!is_optimized()) {
 119       tty->print(" changing cached %s to " INTPTR_FORMAT, is_icholder ? "icholder" : "metadata", p2i((address)cache));
 120     }
 121     if (is_icstub) {
 122       tty->print(" (icstub)");
 123     }
 124     tty->cr();
 125   }
 126 
 127   {
 128     CodeBlob* cb = CodeCache::find_blob_unsafe(_call->instruction_address());
 129     assert(cb != NULL && cb->is_compiled(), "must be compiled");
 130     _call->set_destination_mt_safe(entry_point);
 131   }
 132 
 133   if (is_optimized() || is_icstub) {
 134     // Optimized call sites don't have a cache value and ICStub call
 135     // sites only change the entry point.  Changing the value in that
 136     // case could lead to MT safety issues.
 137     assert(cache == NULL, "must be null");
 138     return;
 139   }
 140 
 141   if (cache == NULL)  cache = (void*)Universe::non_oop_word();
 142 
 143   set_data((intptr_t)cache);
 144 }
 145 
 146 
 147 void CompiledIC::set_ic_destination(ICStub* stub) {
 148   internal_set_ic_destination(stub->code_begin(), true, NULL, false);
 149 }
 150 
 151 
 152 
 153 address CompiledIC::ic_destination() const {
 154   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 155   if (!is_in_transition_state()) {
 156     return _call->destination();
 157   } else {
 158     return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
 159   }
 160 }
 161 
 162 
 163 bool CompiledIC::is_in_transition_state() const {
 164   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 165   return InlineCacheBuffer::contains(_call->destination());;
 166 }
 167 
 168 
 169 bool CompiledIC::is_icholder_call() const {
 170   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 171   return !_is_optimized && is_icholder_entry(ic_destination());
 172 }
 173 
 174 // Returns native address of 'call' instruction in inline-cache. Used by
 175 // the InlineCacheBuffer when it needs to find the stub.
 176 address CompiledIC::stub_address() const {
 177   assert(is_in_transition_state(), "should only be called when we are in a transition state");
 178   return _call->destination();
 179 }
 180 
 181 // Clears the IC stub if the compiled IC is in transition state
 182 void CompiledIC::clear_ic_stub() {
 183   if (is_in_transition_state()) {
 184     ICStub* stub = ICStub_from_destination_address(stub_address());
 185     stub->clear();
 186   }
 187 }
 188 
 189 //-----------------------------------------------------------------------------
 190 // High-level access to an inline cache. Guaranteed to be MT-safe.
 191 
 192 void CompiledIC::initialize_from_iter(RelocIterator* iter) {
 193   assert(iter->addr() == _call->instruction_address(), "must find ic_call");
 194 
 195   if (iter->type() == relocInfo::virtual_call_type) {
 196     virtual_call_Relocation* r = iter->virtual_call_reloc();
 197     _is_optimized = false;
 198     _value = _call->get_load_instruction(r);
 199   } else {
 200     assert(iter->type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
 201     _is_optimized = true;
 202     _value = NULL;
 203   }
 204 }
 205 
 206 CompiledIC::CompiledIC(CompiledMethod* cm, NativeCall* call)
 207   : _method(cm)
 208 {
 209   _call = _method->call_wrapper_at((address) call);
 210   address ic_call = _call->instruction_address();
 211 
 212   assert(ic_call != NULL, "ic_call address must be set");
 213   assert(cm != NULL, "must pass compiled method");
 214   assert(cm->contains(ic_call), "must be in compiled method");
 215 
 216   // Search for the ic_call at the given address.
 217   RelocIterator iter(cm, ic_call, ic_call+1);
 218   bool ret = iter.next();
 219   assert(ret == true, "relocInfo must exist at this address");
 220   assert(iter.addr() == ic_call, "must find ic_call");
 221 
 222   initialize_from_iter(&iter);
 223 }
 224 
 225 CompiledIC::CompiledIC(RelocIterator* iter)
 226   : _method(iter->code())
 227 {
 228   _call = _method->call_wrapper_at(iter->addr());
 229   address ic_call = _call->instruction_address();
 230 
 231   CompiledMethod* nm = iter->code();
 232   assert(ic_call != NULL, "ic_call address must be set");
 233   assert(nm != NULL, "must pass compiled method");
 234   assert(nm->contains(ic_call), "must be in compiled method");
 235 
 236   initialize_from_iter(iter);
 237 }
 238 
 239 // This function may fail for two reasons: either due to running out of vtable
 240 // stubs, or due to running out of IC stubs in an attempted transition to a
 241 // transitional state. The needs_ic_stub_refill value will be set if the failure
 242 // was due to running out of IC stubs, in which case the caller will refill IC
 243 // stubs and retry.
 244 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,
 245                                     bool& needs_ic_stub_refill, TRAPS) {
 246   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 247   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
 248   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 249 
 250   address entry;
 251   if (call_info->call_kind() == CallInfo::itable_call) {
 252     assert(bytecode == Bytecodes::_invokeinterface, "");
 253     int itable_index = call_info->itable_index();
 254     entry = VtableStubs::find_itable_stub(itable_index);
 255     if (entry == NULL) {
 256       return false;
 257     }
 258 #ifdef ASSERT
 259     int index = call_info->resolved_method()->itable_index();
 260     assert(index == itable_index, "CallInfo pre-computes this");
 261     InstanceKlass* k = call_info->resolved_method()->method_holder();
 262     assert(k->verify_itable_index(itable_index), "sanity check");
 263 #endif //ASSERT
 264     CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
 265                                                     call_info->resolved_klass(), false);
 266     holder->claim();
 267     if (!InlineCacheBuffer::create_transition_stub(this, holder, entry)) {
 268       delete holder;
 269       needs_ic_stub_refill = true;
 270       return false;
 271     }
 272   } else {
 273     assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
 274     // Can be different than selected_method->vtable_index(), due to package-private etc.
 275     int vtable_index = call_info->vtable_index();
 276     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
 277     entry = VtableStubs::find_vtable_stub(vtable_index);
 278     if (entry == NULL) {
 279       return false;
 280     }
 281     if (!InlineCacheBuffer::create_transition_stub(this, NULL, entry)) {
 282       needs_ic_stub_refill = true;
 283       return false;
 284     }
 285   }
 286 
 287   if (TraceICs) {
 288     ResourceMark rm;
 289     assert(!call_info->selected_method().is_null(), "Unexpected null selected method");
 290     tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
 291                    p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
 292   }
 293 
 294   // We can't check this anymore. With lazy deopt we could have already
 295   // cleaned this IC entry before we even return. This is possible if
 296   // we ran out of space in the inline cache buffer trying to do the
 297   // set_next and we safepointed to free up space. This is a benign
 298   // race because the IC entry was complete when we safepointed so
 299   // cleaning it immediately is harmless.
 300   // assert(is_megamorphic(), "sanity check");
 301   return true;
 302 }
 303 
 304 
 305 // true if destination is megamorphic stub
 306 bool CompiledIC::is_megamorphic() const {
 307   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 308   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 309 
 310   // Cannot rely on cached_value. It is either an interface or a method.
 311   return VtableStubs::entry_point(ic_destination()) != NULL;
 312 }
 313 
 314 bool CompiledIC::is_call_to_compiled() const {
 315   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 316 
 317   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
 318   // method is guaranteed to still exist, since we only remove methods after all inline caches
 319   // has been cleaned up
 320   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 321   bool is_monomorphic = (cb != NULL && cb->is_compiled());
 322   // Check that the cached_value is a klass for non-optimized monomorphic calls
 323   // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
 324   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL).
 325   // For JVMCI this occurs because CHA is only used to improve inlining so call sites which could be optimized
 326   // virtuals because there are no currently loaded subclasses of a type are left as virtual call sites.
 327 #ifdef ASSERT
 328   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
 329   bool is_c1_or_jvmci_method = caller->is_compiled_by_c1() || caller->is_compiled_by_jvmci();
 330   assert( is_c1_or_jvmci_method ||
 331          !is_monomorphic ||
 332          is_optimized() ||
 333          !caller->is_alive() ||
 334          (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
 335 #endif // ASSERT
 336   return is_monomorphic;
 337 }
 338 
 339 
 340 bool CompiledIC::is_call_to_interpreted() const {
 341   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 342   // Call to interpreter if destination is either calling to a stub (if it
 343   // is optimized), or calling to an I2C blob
 344   bool is_call_to_interpreted = false;
 345   if (!is_optimized()) {
 346     // must use unsafe because the destination can be a zombie (and we're cleaning)
 347     // and the print_compiled_ic code wants to know if site (in the non-zombie)
 348     // is to the interpreter.
 349     CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 350     is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob());
 351     assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check");
 352   } else {
 353     // Check if we are calling into our own codeblob (i.e., to a stub)
 354     address dest = ic_destination();
 355 #ifdef ASSERT
 356     {
 357       _call->verify_resolve_call(dest);
 358     }
 359 #endif /* ASSERT */
 360     is_call_to_interpreted = _call->is_call_to_interpreted(dest);
 361   }
 362   return is_call_to_interpreted;
 363 }
 364 
 365 bool CompiledIC::set_to_clean(bool in_use) {
 366   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 367   if (TraceInlineCacheClearing || TraceICs) {
 368     tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));
 369     print();
 370   }
 371 
 372   address entry = _call->get_resolve_call_stub(is_optimized());
 373 
 374   // A zombie transition will always be safe, since the metadata has already been set to NULL, so
 375   // we only need to patch the destination
 376   bool safe_transition = _call->is_safe_for_patching() || !in_use || is_optimized() || SafepointSynchronize::is_at_safepoint();
 377 
 378   if (safe_transition) {
 379     // Kill any leftover stub we might have too
 380     clear_ic_stub();
 381     if (is_optimized()) {
 382       set_ic_destination(entry);
 383     } else {
 384       set_ic_destination_and_value(entry, (void*)NULL);
 385     }
 386   } else {
 387     // Unsafe transition - create stub.
 388     if (!InlineCacheBuffer::create_transition_stub(this, NULL, entry)) {
 389       return false;
 390     }
 391   }
 392   // We can't check this anymore. With lazy deopt we could have already
 393   // cleaned this IC entry before we even return. This is possible if
 394   // we ran out of space in the inline cache buffer trying to do the
 395   // set_next and we safepointed to free up space. This is a benign
 396   // race because the IC entry was complete when we safepointed so
 397   // cleaning it immediately is harmless.
 398   // assert(is_clean(), "sanity check");
 399   return true;
 400 }
 401 
 402 bool CompiledIC::is_clean() const {
 403   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 404   bool is_clean = false;
 405   address dest = ic_destination();
 406   is_clean = dest == _call->get_resolve_call_stub(is_optimized());
 407   assert(!is_clean || is_optimized() || cached_value() == NULL, "sanity check");
 408   return is_clean;
 409 }
 410 
 411 bool CompiledIC::set_to_monomorphic(CompiledICInfo& info) {
 412   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 413   // Updating a cache to the wrong entry can cause bugs that are very hard
 414   // to track down - if cache entry gets invalid - we just clean it. In
 415   // this way it is always the same code path that is responsible for
 416   // updating and resolving an inline cache
 417   //
 418   // The above is no longer true. SharedRuntime::fixup_callers_callsite will change optimized
 419   // callsites. In addition ic_miss code will update a site to monomorphic if it determines
 420   // that an monomorphic call to the interpreter can now be monomorphic to compiled code.
 421   //
 422   // In both of these cases the only thing being modifed is the jump/call target and these
 423   // transitions are mt_safe
 424 
 425   Thread *thread = Thread::current();
 426   if (info.to_interpreter() || info.to_aot()) {
 427     // Call to interpreter
 428     if (info.is_optimized() && is_optimized()) {
 429       assert(is_clean(), "unsafe IC path");
 430       // the call analysis (callee structure) specifies that the call is optimized
 431       // (either because of CHA or the static target is final)
 432       // At code generation time, this call has been emitted as static call
 433       // Call via stub
 434       assert(info.cached_metadata() != NULL && info.cached_metadata()->is_method(), "sanity check");
 435       methodHandle method (thread, (Method*)info.cached_metadata());
 436       _call->set_to_interpreted(method, info);
 437 
 438       if (TraceICs) {
 439          ResourceMark rm(thread);
 440          tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to %s: %s",
 441            p2i(instruction_address()),
 442            (info.to_aot() ? "aot" : "interpreter"),
 443            method->print_value_string());
 444       }
 445     } else {
 446       // Call via method-klass-holder
 447       CompiledICHolder* holder = info.claim_cached_icholder();
 448       if (!InlineCacheBuffer::create_transition_stub(this, holder, info.entry())) {
 449         delete holder;
 450         return false;
 451       }
 452       if (TraceICs) {
 453          ResourceMark rm(thread);
 454          tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter via icholder ", p2i(instruction_address()));
 455       }
 456     }
 457   } else {
 458     // Call to compiled code
 459     bool static_bound = info.is_optimized() || (info.cached_metadata() == NULL);
 460 #ifdef ASSERT
 461     CodeBlob* cb = CodeCache::find_blob_unsafe(info.entry());
 462     assert (cb != NULL && cb->is_compiled(), "must be compiled!");
 463 #endif /* ASSERT */
 464 
 465     // This is MT safe if we come from a clean-cache and go through a
 466     // non-verified entry point
 467     bool safe = SafepointSynchronize::is_at_safepoint() ||
 468                 (!is_in_transition_state() && (info.is_optimized() || static_bound || is_clean()));
 469 
 470     if (!safe) {
 471       if (!InlineCacheBuffer::create_transition_stub(this, info.cached_metadata(), info.entry())) {
 472         return false;
 473       }
 474     } else {
 475       if (is_optimized()) {
 476         set_ic_destination(info.entry());
 477       } else {
 478         set_ic_destination_and_value(info.entry(), info.cached_metadata());
 479       }
 480     }
 481 
 482     if (TraceICs) {
 483       ResourceMark rm(thread);
 484       assert(info.cached_metadata() == NULL || info.cached_metadata()->is_klass(), "must be");
 485       tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to compiled (rcvr klass = %s) %s",
 486         p2i(instruction_address()),
 487         (info.cached_metadata() != NULL) ? ((Klass*)info.cached_metadata())->print_value_string() : "NULL",
 488         (safe) ? "" : " via stub");
 489     }
 490   }
 491   // We can't check this anymore. With lazy deopt we could have already
 492   // cleaned this IC entry before we even return. This is possible if
 493   // we ran out of space in the inline cache buffer trying to do the
 494   // set_next and we safepointed to free up space. This is a benign
 495   // race because the IC entry was complete when we safepointed so
 496   // cleaning it immediately is harmless.
 497   // assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 498   return true;
 499 }
 500 
 501 
 502 // is_optimized: Compiler has generated an optimized call (i.e. fixed, no inline cache)
 503 // static_bound: The call can be static bound. If it isn't also optimized, the property
 504 // wasn't provable at time of compilation. An optimized call will have any necessary
 505 // null check, while a static_bound won't. A static_bound (but not optimized) must
 506 // therefore use the unverified entry point.
 507 void CompiledIC::compute_monomorphic_entry(const methodHandle& method,
 508                                            Klass* receiver_klass,
 509                                            bool is_optimized,
 510                                            bool static_bound,
 511                                            bool caller_is_nmethod,
 512                                            CompiledICInfo& info,
 513                                            TRAPS) {
 514   CompiledMethod* method_code = method->code();
 515 
 516   address entry = NULL;
 517   if (method_code != NULL && method_code->is_in_use()) {
 518     assert(method_code->is_compiled(), "must be compiled");
 519     // Call to compiled code
 520     //
 521     // Note: the following problem exists with Compiler1:
 522     //   - at compile time we may or may not know if the destination is final
 523     //   - if we know that the destination is final (is_optimized), we will emit
 524     //     an optimized virtual call (no inline cache), and need a Method* to make
 525     //     a call to the interpreter
 526     //   - if we don't know if the destination is final, we emit a standard
 527     //     virtual call, and use CompiledICHolder to call interpreted code
 528     //     (no static call stub has been generated)
 529     //   - In the case that we here notice the call is static bound we
 530     //     convert the call into what looks to be an optimized virtual call,
 531     //     but we must use the unverified entry point (since there will be no
 532     //     null check on a call when the target isn't loaded).
 533     //     This causes problems when verifying the IC because
 534     //     it looks vanilla but is optimized. Code in is_call_to_interpreted
 535     //     is aware of this and weakens its asserts.
 536     if (is_optimized) {
 537       entry      = method_code->verified_entry_point();
 538     } else {
 539       entry      = method_code->entry_point();
 540     }
 541   }
 542   bool far_c2a = entry != NULL && caller_is_nmethod && method_code->is_far_code();
 543   if (entry != NULL && !far_c2a) {
 544     // Call to near compiled code (nmethod or aot).
 545     info.set_compiled_entry(entry, is_optimized ? NULL : receiver_klass, is_optimized);
 546   } else {
 547     if (is_optimized) {
 548       if (far_c2a) {
 549         // Call to aot code from nmethod.
 550         info.set_aot_entry(entry, method());
 551       } else {
 552         // Use stub entry
 553         info.set_interpreter_entry(method()->get_c2i_entry(), method());
 554       }
 555     } else {
 556       // Use icholder entry
 557       assert(method_code == NULL || method_code->is_compiled(), "must be compiled");
 558       CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
 559       info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
 560     }
 561   }
 562   assert(info.is_optimized() == is_optimized, "must agree");
 563 }
 564 
 565 
 566 bool CompiledIC::is_icholder_entry(address entry) {
 567   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
 568   if (cb != NULL && cb->is_adapter_blob()) {
 569     return true;
 570   }
 571   // itable stubs also use CompiledICHolder
 572   if (cb != NULL && cb->is_vtable_blob()) {
 573     VtableStub* s = VtableStubs::entry_point(entry);
 574     return (s != NULL) && s->is_itable_stub();
 575   }
 576 
 577   return false;
 578 }
 579 
 580 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
 581   // This call site might have become stale so inspect it carefully.
 582   address dest = cm->call_wrapper_at(call_site->addr())->destination();
 583   return is_icholder_entry(dest);
 584 }
 585 
 586 // Release the CompiledICHolder* associated with this call site is there is one.
 587 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
 588   assert(cm->is_nmethod(), "must be nmethod");
 589   // This call site might have become stale so inspect it carefully.
 590   NativeCall* call = nativeCall_at(call_site->addr());
 591   if (is_icholder_entry(call->destination())) {
 592     NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
 593     InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
 594   }
 595 }
 596 
 597 // ----------------------------------------------------------------------------
 598 
 599 bool CompiledStaticCall::set_to_clean(bool in_use) {
 600   // in_use is unused but needed to match template function in CompiledMethod
 601   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
 602   // Reset call site
 603   set_destination_mt_safe(resolve_call_stub());
 604 
 605   // Do not reset stub here:  It is too expensive to call find_stub.
 606   // Instead, rely on caller (nmethod::clear_inline_caches) to clear
 607   // both the call and its stub.
 608   return true;
 609 }
 610 
 611 bool CompiledStaticCall::is_clean() const {
 612   return destination() == resolve_call_stub();
 613 }
 614 
 615 bool CompiledStaticCall::is_call_to_compiled() const {
 616   return CodeCache::contains(destination());
 617 }
 618 
 619 bool CompiledDirectStaticCall::is_call_to_interpreted() const {
 620   // It is a call to interpreted, if it calls to a stub. Hence, the destination
 621   // must be in the stub part of the nmethod that contains the call
 622   CompiledMethod* cm = CodeCache::find_compiled(instruction_address());
 623   return cm->stub_contains(destination());
 624 }
 625 
 626 bool CompiledDirectStaticCall::is_call_to_far() const {
 627   // It is a call to aot method, if it calls to a stub. Hence, the destination
 628   // must be in the stub part of the nmethod that contains the call
 629   CodeBlob* desc = CodeCache::find_blob(instruction_address());
 630   return desc->as_compiled_method()->stub_contains(destination());
 631 }
 632 
 633 void CompiledStaticCall::set_to_compiled(address entry) {
 634   if (TraceICs) {
 635     ResourceMark rm;
 636     tty->print_cr("%s@" INTPTR_FORMAT ": set_to_compiled " INTPTR_FORMAT,
 637         name(),
 638         p2i(instruction_address()),
 639         p2i(entry));
 640   }
 641   // Call to compiled code
 642   assert(CodeCache::contains(entry), "wrong entry point");
 643   set_destination_mt_safe(entry);
 644 }
 645 
 646 void CompiledStaticCall::set(const StaticCallInfo& info) {
 647   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
 648   // Updating a cache to the wrong entry can cause bugs that are very hard
 649   // to track down - if cache entry gets invalid - we just clean it. In
 650   // this way it is always the same code path that is responsible for
 651   // updating and resolving an inline cache
 652   assert(is_clean(), "do not update a call entry - use clean");
 653 
 654   if (info._to_interpreter) {
 655     // Call to interpreted code
 656     set_to_interpreted(info.callee(), info.entry());
 657 #if INCLUDE_AOT
 658   } else if (info._to_aot) {
 659     // Call to far code
 660     set_to_far(info.callee(), info.entry());
 661 #endif
 662   } else {
 663     set_to_compiled(info.entry());
 664   }
 665 }
 666 
 667 // Compute settings for a CompiledStaticCall. Since we might have to set
 668 // the stub when calling to the interpreter, we need to return arguments.
 669 void CompiledStaticCall::compute_entry(const methodHandle& m, bool caller_is_nmethod, StaticCallInfo& info) {
 670   CompiledMethod* m_code = m->code();
 671   info._callee = m;
 672   if (m_code != NULL && m_code->is_in_use()) {
 673     if (caller_is_nmethod && m_code->is_far_code()) {
 674       // Call to far aot code from nmethod.
 675       info._to_aot = true;
 676     } else {
 677       info._to_aot = false;
 678     }
 679     info._to_interpreter = false;
 680     info._entry  = m_code->verified_entry_point();
 681   } else {
 682     // Callee is interpreted code.  In any case entering the interpreter
 683     // puts a converter-frame on the stack to save arguments.
 684     assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
 685     info._to_interpreter = true;
 686     info._entry      = m()->get_c2i_entry();
 687   }
 688 }
 689 
 690 address CompiledDirectStaticCall::find_stub_for(address instruction, bool is_aot) {
 691   // Find reloc. information containing this call-site
 692   RelocIterator iter((nmethod*)NULL, instruction);
 693   while (iter.next()) {
 694     if (iter.addr() == instruction) {
 695       switch(iter.type()) {
 696         case relocInfo::static_call_type:
 697           return iter.static_call_reloc()->static_stub(is_aot);
 698         // We check here for opt_virtual_call_type, since we reuse the code
 699         // from the CompiledIC implementation
 700         case relocInfo::opt_virtual_call_type:
 701           return iter.opt_virtual_call_reloc()->static_stub(is_aot);
 702         case relocInfo::poll_type:
 703         case relocInfo::poll_return_type: // A safepoint can't overlap a call.
 704         default:
 705           ShouldNotReachHere();
 706       }
 707     }
 708   }
 709   return NULL;
 710 }
 711 
 712 address CompiledDirectStaticCall::find_stub(bool is_aot) {
 713   return CompiledDirectStaticCall::find_stub_for(instruction_address(), is_aot);
 714 }
 715 
 716 address CompiledDirectStaticCall::resolve_call_stub() const {
 717   return SharedRuntime::get_resolve_static_call_stub();
 718 }
 719 
 720 //-----------------------------------------------------------------------------
 721 // Non-product mode code
 722 #ifndef PRODUCT
 723 
 724 void CompiledIC::verify() {
 725   _call->verify();
 726   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted()
 727           || is_optimized() || is_megamorphic(), "sanity check");
 728 }
 729 
 730 void CompiledIC::print() {
 731   print_compiled_ic();
 732   tty->cr();
 733 }
 734 
 735 void CompiledIC::print_compiled_ic() {
 736   tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
 737              p2i(instruction_address()), is_call_to_interpreted() ? "interpreted " : "", p2i(ic_destination()), p2i(is_optimized() ? NULL : cached_value()));
 738 }
 739 
 740 void CompiledDirectStaticCall::print() {
 741   tty->print("static call at " INTPTR_FORMAT " -> ", p2i(instruction_address()));
 742   if (is_clean()) {
 743     tty->print("clean");
 744   } else if (is_call_to_compiled()) {
 745     tty->print("compiled");
 746   } else if (is_call_to_far()) {
 747     tty->print("far");
 748   } else if (is_call_to_interpreted()) {
 749     tty->print("interpreted");
 750   }
 751   tty->cr();
 752 }
 753 
 754 #endif // !PRODUCT