< prev index next >

src/share/vm/runtime/sharedRuntime.cpp

Print this page




   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "classfile/systemDictionary.hpp"
  27 #include "classfile/vmSymbols.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/compiledIC.hpp"
  30 #include "code/codeCacheExtensions.hpp"
  31 #include "code/scopeDesc.hpp"
  32 #include "code/vtableStubs.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compileBroker.hpp"
  35 #include "compiler/disassembler.hpp"
  36 #include "gc/shared/gcLocker.inline.hpp"
  37 #include "interpreter/interpreter.hpp"
  38 #include "interpreter/interpreterRuntime.hpp"
  39 #include "memory/universe.inline.hpp"
  40 #include "oops/oop.inline.hpp"
  41 #include "prims/forte.hpp"
  42 #include "prims/jvmtiExport.hpp"
  43 #include "prims/jvmtiRedefineClassesTrace.hpp"
  44 #include "prims/methodHandles.hpp"
  45 #include "prims/nativeLookup.hpp"
  46 #include "runtime/arguments.hpp"
  47 #include "runtime/atomic.inline.hpp"
  48 #include "runtime/biasedLocking.hpp"

  49 #include "runtime/handles.inline.hpp"
  50 #include "runtime/init.hpp"
  51 #include "runtime/interfaceSupport.hpp"
  52 #include "runtime/javaCalls.hpp"
  53 #include "runtime/sharedRuntime.hpp"
  54 #include "runtime/stubRoutines.hpp"
  55 #include "runtime/vframe.hpp"
  56 #include "runtime/vframeArray.hpp"
  57 #include "utilities/copy.hpp"
  58 #include "utilities/dtrace.hpp"
  59 #include "utilities/events.hpp"
  60 #include "utilities/hashtable.inline.hpp"
  61 #include "utilities/macros.hpp"
  62 #include "utilities/xmlstream.hpp"
  63 #ifdef COMPILER1
  64 #include "c1/c1_Runtime1.hpp"
  65 #endif
  66 
  67 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  68 


  76 
  77 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  78 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
  79 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
  80 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
  81 
  82 #ifdef COMPILER2
  83 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
  84 #endif // COMPILER2
  85 
  86 
  87 //----------------------------generate_stubs-----------------------------------
  88 void SharedRuntime::generate_stubs() {
  89   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
  90   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
  91   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
  92   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
  93   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
  94   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
  95 
  96 #ifdef COMPILER2
  97   // Vectors are generated only by C2.
  98   if (is_wide_vector(MaxVectorSize)) {

  99     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 100   }
 101 #endif // COMPILER2
 102   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 103   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 104 
 105   generate_deopt_blob();
 106 
 107 #ifdef COMPILER2
 108   generate_uncommon_trap_blob();
 109 #endif // COMPILER2
 110 }
 111 
 112 #include <math.h>
 113 
 114 // Implementation of SharedRuntime
 115 
 116 #ifndef PRODUCT
 117 // For statistics
 118 int SharedRuntime::_ic_miss_ctr = 0;
 119 int SharedRuntime::_wrong_method_ctr = 0;
 120 int SharedRuntime::_resolve_static_ctr = 0;
 121 int SharedRuntime::_resolve_virtual_ctr = 0;


 444 JRT_END
 445 
 446 
 447 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
 448   return (jdouble)x;
 449 JRT_END
 450 
 451 // Exception handling across interpreter/compiler boundaries
 452 //
 453 // exception_handler_for_return_address(...) returns the continuation address.
 454 // The continuation address is the entry point of the exception handler of the
 455 // previous frame depending on the return address.
 456 
 457 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
 458   assert(frame::verify_return_pc(return_address), err_msg("must be a return address: " INTPTR_FORMAT, return_address));
 459   assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
 460 
 461   // Reset method handle flag.
 462   thread->set_is_method_handle_return(false);
 463 






 464   // The fastest case first
 465   CodeBlob* blob = CodeCache::find_blob(return_address);
 466   nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
 467   if (nm != NULL) {
 468     // Set flag if return address is a method handle call site.
 469     thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
 470     // native nmethods don't have exception handlers
 471     assert(!nm->is_native_method(), "no exception handler");
 472     assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
 473     if (nm->is_deopt_pc(return_address)) {
 474       // If we come here because of a stack overflow, the stack may be
 475       // unguarded. Reguard the stack otherwise if we return to the
 476       // deopt blob and the stack bang causes a stack overflow we
 477       // crash.
 478       bool guard_pages_enabled = thread->stack_yellow_zone_enabled();
 479       if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack();
 480       assert(guard_pages_enabled, "stack banging in deopt blob may cause crash");
 481       return SharedRuntime::deopt_blob()->unpack_with_exception();
 482     } else {
 483       return nm->exception_begin();


 509 }
 510 
 511 
 512 JRT_LEAF(address, SharedRuntime::exception_handler_for_return_address(JavaThread* thread, address return_address))
 513   return raw_exception_handler_for_return_address(thread, return_address);
 514 JRT_END
 515 
 516 
 517 address SharedRuntime::get_poll_stub(address pc) {
 518   address stub;
 519   // Look up the code blob
 520   CodeBlob *cb = CodeCache::find_blob(pc);
 521 
 522   // Should be an nmethod
 523   assert(cb && cb->is_nmethod(), "safepoint polling: pc must refer to an nmethod");
 524 
 525   // Look up the relocation information
 526   assert(((nmethod*)cb)->is_at_poll_or_poll_return(pc),
 527     "safepoint polling: type must be poll");
 528 
 529   assert(((NativeInstruction*)pc)->is_safepoint_poll(),
 530     "Only polling locations are used for safepoint");





 531 
 532   bool at_poll_return = ((nmethod*)cb)->is_at_poll_return(pc);
 533   bool has_wide_vectors = ((nmethod*)cb)->has_wide_vectors();
 534   if (at_poll_return) {
 535     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
 536            "polling page return stub not created yet");
 537     stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();
 538   } else if (has_wide_vectors) {
 539     assert(SharedRuntime::polling_page_vectors_safepoint_handler_blob() != NULL,
 540            "polling page vectors safepoint stub not created yet");
 541     stub = SharedRuntime::polling_page_vectors_safepoint_handler_blob()->entry_point();
 542   } else {
 543     assert(SharedRuntime::polling_page_safepoint_handler_blob() != NULL,
 544            "polling page safepoint stub not created yet");
 545     stub = SharedRuntime::polling_page_safepoint_handler_blob()->entry_point();
 546   }
 547 #ifndef PRODUCT
 548   if (TraceSafepoint) {
 549     char buf[256];
 550     jio_snprintf(buf, sizeof(buf),


 600 
 601     // RC_TRACE macro has an embedded ResourceMark
 602     RC_TRACE_WITH_THREAD(0x00001000, thread,
 603                          ("calling obsolete method '%s'",
 604                           method->name_and_sig_as_C_string()));
 605     if (RC_TRACE_ENABLED(0x00002000)) {
 606       // this option is provided to debug calls to obsolete methods
 607       guarantee(false, "faulting at call to an obsolete method.");
 608     }
 609   }
 610   return 0;
 611 JRT_END
 612 
 613 // ret_pc points into caller; we are returning caller's exception handler
 614 // for given exception
 615 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 616                                                     bool force_unwind, bool top_frame_only) {
 617   assert(nm != NULL, "must exist");
 618   ResourceMark rm;
 619 



























 620   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
 621   // determine handler bci, if any
 622   EXCEPTION_MARK;
 623 
 624   int handler_bci = -1;
 625   int scope_depth = 0;
 626   if (!force_unwind) {
 627     int bci = sd->bci();
 628     bool recursive_exception = false;
 629     do {
 630       bool skip_scope_increment = false;
 631       // exception handler lookup
 632       KlassHandle ek (THREAD, exception->klass());
 633       methodHandle mh(THREAD, sd->method());
 634       handler_bci = Method::fast_exception_handler_bci_for(mh, ek, bci, THREAD);
 635       if (HAS_PENDING_EXCEPTION) {
 636         recursive_exception = true;
 637         // We threw an exception while trying to find the exception handler.
 638         // Transfer the new exception to the exception handle which will
 639         // be set into thread local storage, and do another lookup for an


 720 JRT_ENTRY(void, SharedRuntime::throw_NullPointerException_at_call(JavaThread* thread))
 721   // This entry point is effectively only used for NullPointerExceptions which occur at inline
 722   // cache sites (when the callee activation is not yet set up) so we are at a call site
 723   throw_and_post_jvmti_exception(thread, vmSymbols::java_lang_NullPointerException());
 724 JRT_END
 725 
 726 JRT_ENTRY(void, SharedRuntime::throw_StackOverflowError(JavaThread* thread))
 727   // We avoid using the normal exception construction in this case because
 728   // it performs an upcall to Java, and we're already out of stack space.
 729   Klass* k = SystemDictionary::StackOverflowError_klass();
 730   oop exception_oop = InstanceKlass::cast(k)->allocate_instance(CHECK);
 731   Handle exception (thread, exception_oop);
 732   if (StackTraceInThrowable) {
 733     java_lang_Throwable::fill_in_stack_trace(exception);
 734   }
 735   // Increment counter for hs_err file reporting
 736   Atomic::inc(&Exceptions::_stack_overflow_errors);
 737   throw_and_post_jvmti_exception(thread, exception);
 738 JRT_END
 739 









 740 address SharedRuntime::continuation_for_implicit_exception(JavaThread* thread,
 741                                                            address pc,
 742                                                            SharedRuntime::ImplicitExceptionKind exception_kind)
 743 {
 744   address target_pc = NULL;
 745 
 746   if (Interpreter::contains(pc)) {
 747 #ifdef CC_INTERP
 748     // C++ interpreter doesn't throw implicit exceptions
 749     ShouldNotReachHere();
 750 #else
 751     switch (exception_kind) {
 752       case IMPLICIT_NULL:           return Interpreter::throw_NullPointerException_entry();
 753       case IMPLICIT_DIVIDE_BY_ZERO: return Interpreter::throw_ArithmeticException_entry();
 754       case STACK_OVERFLOW:          return Interpreter::throw_StackOverflowError_entry();
 755       default:                      ShouldNotReachHere();
 756     }
 757 #endif // !CC_INTERP
 758   } else {
 759     switch (exception_kind) {


 789             Events::log_exception(thread, "AbstractMethodError at " INTPTR_FORMAT, pc);
 790             return StubRoutines::throw_AbstractMethodError_entry();
 791           } else {
 792             Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
 793             return StubRoutines::throw_NullPointerException_at_call_entry();
 794           }
 795         } else {
 796           CodeBlob* cb = CodeCache::find_blob(pc);
 797 
 798           // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
 799           if (cb == NULL) return NULL;
 800 
 801           // Exception happened in CodeCache. Must be either:
 802           // 1. Inline-cache check in C2I handler blob,
 803           // 2. Inline-cache check in nmethod, or
 804           // 3. Implicit null exception in nmethod
 805 
 806           if (!cb->is_nmethod()) {
 807             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
 808             if (!is_in_blob) {
 809               cb->print();
 810               fatal(err_msg("exception happened outside interpreter, nmethods and vtable stubs at pc " INTPTR_FORMAT, pc));
 811             }
 812             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
 813             // There is no handler here, so we will simply unwind.
 814             return StubRoutines::throw_NullPointerException_at_call_entry();
 815           }
 816 
 817           // Otherwise, it's an nmethod.  Consult its exception handlers.
 818           nmethod* nm = (nmethod*)cb;
 819           if (nm->inlinecache_check_contains(pc)) {
 820             // exception happened inside inline-cache check code
 821             // => the nmethod is not yet active (i.e., the frame
 822             // is not set up yet) => use return address pushed by
 823             // caller => don't push another return address
 824             Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
 825             return StubRoutines::throw_NullPointerException_at_call_entry();
 826           }
 827 
 828           if (nm->method()->is_method_handle_intrinsic()) {
 829             // exception happened inside MH dispatch code, similar to a vtable stub
 830             Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, pc);
 831             return StubRoutines::throw_NullPointerException_at_call_entry();
 832           }
 833 
 834 #ifndef PRODUCT
 835           _implicit_null_throws++;
 836 #endif









 837           target_pc = nm->continuation_for_implicit_exception(pc);



 838           // If there's an unexpected fault, target_pc might be NULL,
 839           // in which case we want to fall through into the normal
 840           // error handling code.
 841         }
 842 
 843         break; // fall through
 844       }
 845 
 846 
 847       case IMPLICIT_DIVIDE_BY_ZERO: {
 848         nmethod* nm = CodeCache::find_nmethod(pc);
 849         guarantee(nm != NULL, "must have containing nmethod for implicit division-by-zero exceptions");
 850 #ifndef PRODUCT
 851         _implicit_div0_throws++;
 852 #endif





 853         target_pc = nm->continuation_for_implicit_exception(pc);



 854         // If there's an unexpected fault, target_pc might be NULL,
 855         // in which case we want to fall through into the normal
 856         // error handling code.
 857         break; // fall through
 858       }
 859 
 860       default: ShouldNotReachHere();
 861     }
 862 
 863     assert(exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO, "wrong implicit exception kind");
 864 
 865     // for AbortVMOnException flag
 866     NOT_PRODUCT(Exceptions::debug_check_abort("java.lang.NullPointerException"));
 867     if (exception_kind == IMPLICIT_NULL) {




 868       Events::log_exception(thread, "Implicit null exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 869     } else {




 870       Events::log_exception(thread, "Implicit division by zero exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 871     }
 872     return target_pc;
 873   }
 874 
 875   ShouldNotReachHere();
 876   return NULL;
 877 }
 878 
 879 
 880 /**
 881  * Throws an java/lang/UnsatisfiedLinkError.  The address of this method is
 882  * installed in the native function entry of all native Java methods before
 883  * they get linked to their actual native methods.
 884  *
 885  * \note
 886  * This method actually never gets called!  The reason is because
 887  * the interpreter's native entries call NativeLookup::lookup() which
 888  * throws the exception when the lookup fails.  The exception is then
 889  * caught and forwarded on the return from NativeLookup::lookup() call


 899 
 900 address SharedRuntime::native_method_throw_unsatisfied_link_error_entry() {
 901   return CAST_FROM_FN_PTR(address, &throw_unsatisfied_link_error);
 902 }
 903 
 904 
 905 #ifndef PRODUCT
 906 JRT_ENTRY(intptr_t, SharedRuntime::trace_bytecode(JavaThread* thread, intptr_t preserve_this_value, intptr_t tos, intptr_t tos2))
 907   const frame f = thread->last_frame();
 908   assert(f.is_interpreted_frame(), "must be an interpreted frame");
 909 #ifndef PRODUCT
 910   methodHandle mh(THREAD, f.interpreter_frame_method());
 911   BytecodeTracer::trace(mh, f.interpreter_frame_bcp(), tos, tos2);
 912 #endif // !PRODUCT
 913   return preserve_this_value;
 914 JRT_END
 915 #endif // !PRODUCT
 916 
 917 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* thread, oopDesc* obj))
 918   assert(obj->is_oop(), "must be a valid oop");










 919   assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise");
 920   InstanceKlass::register_finalizer(instanceOop(obj), CHECK);
 921 JRT_END
 922 
 923 
 924 jlong SharedRuntime::get_java_tid(Thread* thread) {
 925   if (thread != NULL) {
 926     if (thread->is_Java_thread()) {
 927       oop obj = ((JavaThread*)thread)->threadObj();
 928       return (obj == NULL) ? 0 : java_lang_Thread::thread_id(obj);
 929     }
 930   }
 931   return 0;
 932 }
 933 
 934 /**
 935  * This function ought to be a void function, but cannot be because
 936  * it gets turned into a tail-call on sparc, which runs into dtrace bug
 937  * 6254741.  Once that is fixed we can remove the dummy return value.
 938  */


1140 
1141   CodeBlob* caller_cb = caller_frame.cb();
1142   guarantee(caller_cb != NULL && caller_cb->is_nmethod(), "must be called from nmethod");
1143   nmethod* caller_nm = caller_cb->as_nmethod_or_null();
1144 
1145   // make sure caller is not getting deoptimized
1146   // and removed before we are done with it.
1147   // CLEANUP - with lazy deopt shouldn't need this lock
1148   nmethodLocker caller_lock(caller_nm);
1149 
1150   // determine call info & receiver
1151   // note: a) receiver is NULL for static calls
1152   //       b) an exception is thrown if receiver is NULL for non-static calls
1153   CallInfo call_info;
1154   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1155   Handle receiver = find_callee_info(thread, invoke_code,
1156                                      call_info, CHECK_(methodHandle()));
1157   methodHandle callee_method = call_info.selected_method();
1158 
1159   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||

1160          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1161          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1162          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1163 
1164   assert(caller_nm->is_alive(), "It should be alive");
1165 
1166 #ifndef PRODUCT
1167   // tracing/debugging/statistics
1168   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1169                 (is_virtual) ? (&_resolve_virtual_ctr) :
1170                                (&_resolve_static_ctr);
1171   Atomic::inc(addr);
1172 
1173   if (TraceCallFixup) {
1174     ResourceMark rm(thread);
1175     tty->print("resolving %s%s (%s) call to",
1176       (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1177       Bytecodes::name(invoke_code));
1178     callee_method->print_short_name(tty);
1179     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT, caller_frame.pc(), callee_method->code());


1350   // return compiled code entry point after potential safepoints
1351   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1352   return callee_method->verified_code_entry();
1353 JRT_END
1354 
1355 
1356 // Resolve a virtual call that can be statically bound (e.g., always
1357 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1358 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread *thread))
1359   methodHandle callee_method;
1360   JRT_BLOCK
1361     callee_method = SharedRuntime::resolve_helper(thread, true, true, CHECK_NULL);
1362     thread->set_vm_result_2(callee_method());
1363   JRT_BLOCK_END
1364   // return compiled code entry point after potential safepoints
1365   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1366   return callee_method->verified_code_entry();
1367 JRT_END
1368 
1369 
1370 
1371 
1372 
1373 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, TRAPS) {
1374   ResourceMark rm(thread);
1375   CallInfo call_info;
1376   Bytecodes::Code bc;
1377 
1378   // receiver is NULL for static calls. An exception is thrown for NULL
1379   // receivers for non-static calls
1380   Handle receiver = find_callee_info(thread, bc, call_info,
1381                                      CHECK_(methodHandle()));
1382   // Compiler1 can produce virtual call sites that can actually be statically bound
1383   // If we fell thru to below we would think that the site was going megamorphic
1384   // when in fact the site can never miss. Worse because we'd think it was megamorphic
1385   // we'd try and do a vtable dispatch however methods that can be statically bound
1386   // don't have vtable entries (vtable_index < 0) and we'd blow up. So we force a
1387   // reresolution of the  call site (as if we did a handle_wrong_method and not an
1388   // plain ic_miss) and the site will be converted to an optimized virtual call site
1389   // never to miss again. I don't believe C2 will produce code like this but if it
1390   // did this would still be the correct thing to do for it too, hence no ifdef.
1391   //
1392   if (call_info.resolved_method()->can_be_statically_bound()) {


1476         // We have a path that was monomorphic but was going interpreted
1477         // and now we have (or had) a compiled entry. We correct the IC
1478         // by using a new icBuffer.
1479         CompiledICInfo info;
1480         KlassHandle receiver_klass(THREAD, receiver()->klass());
1481         inline_cache->compute_monomorphic_entry(callee_method,
1482                                                 receiver_klass,
1483                                                 inline_cache->is_optimized(),
1484                                                 false,
1485                                                 info, CHECK_(methodHandle()));
1486         inline_cache->set_to_monomorphic(info);
1487       } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1488         // Potential change to megamorphic
1489         bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));
1490         if (!successful) {
1491           inline_cache->set_to_clean();
1492         }
1493       } else {
1494         // Either clean or megamorphic
1495       }


1496     }
1497   } // Release CompiledIC_lock
1498 
1499   return callee_method;
1500 }
1501 
1502 //
1503 // Resets a call-site in compiled code so it will get resolved again.
1504 // This routines handles both virtual call sites, optimized virtual call
1505 // sites, and static call sites. Typically used to change a call sites
1506 // destination from compiled to interpreted.
1507 //
1508 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, TRAPS) {
1509   ResourceMark rm(thread);
1510   RegisterMap reg_map(thread, false);
1511   frame stub_frame = thread->last_frame();
1512   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1513   frame caller = stub_frame.sender(&reg_map);
1514 
1515   // Do nothing if the frame isn't a live compiled frame.
1516   // nmethod could be deoptimized by the time we get here
1517   // so no update to the caller is needed.
1518 
1519   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
1520 
1521     address pc = caller.pc();
1522 




1523     // Default call_addr is the location of the "basic" call.
1524     // Determine the address of the call we a reresolving. With
1525     // Inline Caches we will always find a recognizable call.
1526     // With Inline Caches disabled we may or may not find a
1527     // recognizable call. We will always find a call for static
1528     // calls and for optimized virtual calls. For vanilla virtual
1529     // calls it depends on the state of the UseInlineCaches switch.
1530     //
1531     // With Inline Caches disabled we can get here for a virtual call
1532     // for two reasons:
1533     //   1 - calling an abstract method. The vtable for abstract methods
1534     //       will run us thru handle_wrong_method and we will eventually
1535     //       end up in the interpreter to throw the ame.
1536     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1537     //       call and between the time we fetch the entry address and
1538     //       we jump to it the target gets deoptimized. Similar to 1
1539     //       we will wind up in the interprter (thru a c2i with c2).
1540     //
1541     address call_addr = NULL;
1542     {
1543       // Get call instruction under lock because another thread may be
1544       // busy patching it.
1545       MutexLockerEx ml_patch(Patching_lock, Mutex::_no_safepoint_check_flag);
1546       // Location of call instruction
1547       if (NativeCall::is_call_before(pc)) {
1548         NativeCall *ncall = nativeCall_before(pc);
1549         call_addr = ncall->instruction_address();
1550       }
1551     }
1552 
1553     // Check for static or virtual call
1554     bool is_static_call = false;
1555     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1556     // Make sure nmethod doesn't get deoptimized and removed until
1557     // this is done with it.
1558     // CLEANUP - with lazy deopt shouldn't need this lock
1559     nmethodLocker nmlock(caller_nm);
1560 
1561     if (call_addr != NULL) {
1562       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1563       int ret = iter.next(); // Get item
1564       if (ret) {
1565         assert(iter.addr() == call_addr, "must find call");
1566         if (iter.type() == relocInfo::static_call_type) {
1567           is_static_call = true;
1568         } else {
1569           assert(iter.type() == relocInfo::virtual_call_type ||
1570                  iter.type() == relocInfo::opt_virtual_call_type
1571                 , "unexpected relocInfo. type");
1572         }
1573       } else {
1574         assert(!UseInlineCaches, "relocation info. must exist for this address");
1575       }


2550 
2551 
2552 /**
2553  * Create a native wrapper for this native method.  The wrapper converts the
2554  * Java-compiled calling convention to the native convention, handles
2555  * arguments, and transitions to native.  On return from the native we transition
2556  * back to java blocking if a safepoint is in progress.
2557  */
2558 void AdapterHandlerLibrary::create_native_wrapper(methodHandle method) {
2559   ResourceMark rm;
2560   nmethod* nm = NULL;
2561 
2562   assert(method->is_native(), "must be native");
2563   assert(method->is_method_handle_intrinsic() ||
2564          method->has_native_function(), "must have something valid to call!");
2565 
2566   {
2567     // Perform the work while holding the lock, but perform any printing outside the lock
2568     MutexLocker mu(AdapterHandlerLibrary_lock);
2569     // See if somebody beat us to it
2570     nm = method->code();
2571     if (nm != NULL) {
2572       return;
2573     }
2574 
2575     const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci);
2576     assert(compile_id > 0, "Must generate native wrapper");
2577 
2578 
2579     ResourceMark rm;
2580     BufferBlob*  buf = buffer_blob(); // the temporary code buffer in CodeCache
2581     if (buf != NULL) {
2582       CodeBuffer buffer(buf);
2583       double locs_buf[20];
2584       buffer.insts()->initialize_shared_locs((relocInfo*)locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
2585       MacroAssembler _masm(&buffer);
2586 
2587       // Fill in the signature array, for the calling-convention call.
2588       const int total_args_passed = method->size_of_parameters();
2589 
2590       BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2591       VMRegPair*   regs = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);


2793        kptr2 = fr.next_monitor_in_interpreter_frame(kptr2) ) {
2794     if (kptr2->obj() != NULL) {         // Avoid 'holes' in the monitor array
2795       BasicLock *lock = kptr2->lock();
2796       // Inflate so the displaced header becomes position-independent
2797       if (lock->displaced_header()->is_unlocked())
2798         ObjectSynchronizer::inflate_helper(kptr2->obj());
2799       // Now the displaced header is free to move
2800       buf[i++] = (intptr_t)lock->displaced_header();
2801       buf[i++] = cast_from_oop<intptr_t>(kptr2->obj());
2802     }
2803   }
2804   assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors");
2805 
2806   return buf;
2807 JRT_END
2808 
2809 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
2810   FREE_C_HEAP_ARRAY(intptr_t, buf);
2811 JRT_END
2812 
2813 bool AdapterHandlerLibrary::contains(CodeBlob* b) {
2814   AdapterHandlerTableIterator iter(_adapters);
2815   while (iter.has_next()) {
2816     AdapterHandlerEntry* a = iter.next();
2817     if (b == CodeCache::find_blob(a->get_i2c_entry())) return true;
2818   }
2819   return false;
2820 }
2821 
2822 void AdapterHandlerLibrary::print_handler_on(outputStream* st, CodeBlob* b) {
2823   AdapterHandlerTableIterator iter(_adapters);
2824   while (iter.has_next()) {
2825     AdapterHandlerEntry* a = iter.next();
2826     if (b == CodeCache::find_blob(a->get_i2c_entry())) {
2827       st->print("Adapter for signature: ");
2828       a->print_adapter_on(tty);
2829       return;
2830     }
2831   }
2832   assert(false, "Should have found handler");
2833 }
2834 
2835 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
2836   st->print_cr("AHE@" INTPTR_FORMAT ": %s i2c: " INTPTR_FORMAT " c2i: " INTPTR_FORMAT " c2iUV: " INTPTR_FORMAT,
2837                (intptr_t) this, fingerprint()->as_string(),
2838                get_i2c_entry(), get_c2i_entry(), get_c2i_unverified_entry());
2839 
2840 }
2841 
2842 #ifndef PRODUCT


   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/stringTable.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/compiledIC.hpp"
  31 #include "code/codeCacheExtensions.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "code/vtableStubs.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/disassembler.hpp"
  37 #include "gc/shared/gcLocker.inline.hpp"
  38 #include "interpreter/interpreter.hpp"
  39 #include "interpreter/interpreterRuntime.hpp"
  40 #include "memory/universe.inline.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "prims/forte.hpp"
  43 #include "prims/jvmtiExport.hpp"
  44 #include "prims/jvmtiRedefineClassesTrace.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "prims/nativeLookup.hpp"
  47 #include "runtime/arguments.hpp"
  48 #include "runtime/atomic.inline.hpp"
  49 #include "runtime/biasedLocking.hpp"
  50 #include "runtime/compilationPolicy.hpp"
  51 #include "runtime/handles.inline.hpp"
  52 #include "runtime/init.hpp"
  53 #include "runtime/interfaceSupport.hpp"
  54 #include "runtime/javaCalls.hpp"
  55 #include "runtime/sharedRuntime.hpp"
  56 #include "runtime/stubRoutines.hpp"
  57 #include "runtime/vframe.hpp"
  58 #include "runtime/vframeArray.hpp"
  59 #include "utilities/copy.hpp"
  60 #include "utilities/dtrace.hpp"
  61 #include "utilities/events.hpp"
  62 #include "utilities/hashtable.inline.hpp"
  63 #include "utilities/macros.hpp"
  64 #include "utilities/xmlstream.hpp"
  65 #ifdef COMPILER1
  66 #include "c1/c1_Runtime1.hpp"
  67 #endif
  68 
  69 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  70 


  78 
  79 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  80 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
  81 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
  82 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
  83 
  84 #ifdef COMPILER2
  85 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
  86 #endif // COMPILER2
  87 
  88 
  89 //----------------------------generate_stubs-----------------------------------
  90 void SharedRuntime::generate_stubs() {
  91   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
  92   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
  93   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
  94   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
  95   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
  96   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
  97 
  98 #if defined(COMPILER2) || INCLUDE_JVMCI
  99   // Vectors are generated only by C2 and JVMCI.
 100   bool support_wide = is_wide_vector(MaxVectorSize);
 101   if (support_wide) {
 102     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 103   }
 104 #endif // COMPILER2 || INCLUDE_JVMCI
 105   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 106   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 107 
 108   generate_deopt_blob();
 109 
 110 #ifdef COMPILER2
 111   generate_uncommon_trap_blob();
 112 #endif // COMPILER2
 113 }
 114 
 115 #include <math.h>
 116 
 117 // Implementation of SharedRuntime
 118 
 119 #ifndef PRODUCT
 120 // For statistics
 121 int SharedRuntime::_ic_miss_ctr = 0;
 122 int SharedRuntime::_wrong_method_ctr = 0;
 123 int SharedRuntime::_resolve_static_ctr = 0;
 124 int SharedRuntime::_resolve_virtual_ctr = 0;


 447 JRT_END
 448 
 449 
 450 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
 451   return (jdouble)x;
 452 JRT_END
 453 
 454 // Exception handling across interpreter/compiler boundaries
 455 //
 456 // exception_handler_for_return_address(...) returns the continuation address.
 457 // The continuation address is the entry point of the exception handler of the
 458 // previous frame depending on the return address.
 459 
 460 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
 461   assert(frame::verify_return_pc(return_address), err_msg("must be a return address: " INTPTR_FORMAT, return_address));
 462   assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
 463 
 464   // Reset method handle flag.
 465   thread->set_is_method_handle_return(false);
 466 
 467 #if INCLUDE_JVMCI
 468   // JVMCI's ExceptionHandlerStub expects the thread local exception PC to be clear
 469   // and other exception handler continuations do not read it
 470   thread->set_exception_pc(NULL);
 471 #endif
 472 
 473   // The fastest case first
 474   CodeBlob* blob = CodeCache::find_blob(return_address);
 475   nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
 476   if (nm != NULL) {
 477     // Set flag if return address is a method handle call site.
 478     thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
 479     // native nmethods don't have exception handlers
 480     assert(!nm->is_native_method(), "no exception handler");
 481     assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
 482     if (nm->is_deopt_pc(return_address)) {
 483       // If we come here because of a stack overflow, the stack may be
 484       // unguarded. Reguard the stack otherwise if we return to the
 485       // deopt blob and the stack bang causes a stack overflow we
 486       // crash.
 487       bool guard_pages_enabled = thread->stack_yellow_zone_enabled();
 488       if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack();
 489       assert(guard_pages_enabled, "stack banging in deopt blob may cause crash");
 490       return SharedRuntime::deopt_blob()->unpack_with_exception();
 491     } else {
 492       return nm->exception_begin();


 518 }
 519 
 520 
 521 JRT_LEAF(address, SharedRuntime::exception_handler_for_return_address(JavaThread* thread, address return_address))
 522   return raw_exception_handler_for_return_address(thread, return_address);
 523 JRT_END
 524 
 525 
 526 address SharedRuntime::get_poll_stub(address pc) {
 527   address stub;
 528   // Look up the code blob
 529   CodeBlob *cb = CodeCache::find_blob(pc);
 530 
 531   // Should be an nmethod
 532   assert(cb && cb->is_nmethod(), "safepoint polling: pc must refer to an nmethod");
 533 
 534   // Look up the relocation information
 535   assert(((nmethod*)cb)->is_at_poll_or_poll_return(pc),
 536     "safepoint polling: type must be poll");
 537 
 538 #ifdef ASSERT
 539   if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
 540     tty->print_cr("bad pc: " PTR_FORMAT, p2i(pc));
 541     Disassembler::decode(cb);
 542     fatal("Only polling locations are used for safepoint");
 543   }
 544 #endif
 545 
 546   bool at_poll_return = ((nmethod*)cb)->is_at_poll_return(pc);
 547   bool has_wide_vectors = ((nmethod*)cb)->has_wide_vectors();
 548   if (at_poll_return) {
 549     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
 550            "polling page return stub not created yet");
 551     stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();
 552   } else if (has_wide_vectors) {
 553     assert(SharedRuntime::polling_page_vectors_safepoint_handler_blob() != NULL,
 554            "polling page vectors safepoint stub not created yet");
 555     stub = SharedRuntime::polling_page_vectors_safepoint_handler_blob()->entry_point();
 556   } else {
 557     assert(SharedRuntime::polling_page_safepoint_handler_blob() != NULL,
 558            "polling page safepoint stub not created yet");
 559     stub = SharedRuntime::polling_page_safepoint_handler_blob()->entry_point();
 560   }
 561 #ifndef PRODUCT
 562   if (TraceSafepoint) {
 563     char buf[256];
 564     jio_snprintf(buf, sizeof(buf),


 614 
 615     // RC_TRACE macro has an embedded ResourceMark
 616     RC_TRACE_WITH_THREAD(0x00001000, thread,
 617                          ("calling obsolete method '%s'",
 618                           method->name_and_sig_as_C_string()));
 619     if (RC_TRACE_ENABLED(0x00002000)) {
 620       // this option is provided to debug calls to obsolete methods
 621       guarantee(false, "faulting at call to an obsolete method.");
 622     }
 623   }
 624   return 0;
 625 JRT_END
 626 
 627 // ret_pc points into caller; we are returning caller's exception handler
 628 // for given exception
 629 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 630                                                     bool force_unwind, bool top_frame_only) {
 631   assert(nm != NULL, "must exist");
 632   ResourceMark rm;
 633 
 634 #if INCLUDE_JVMCI
 635   if (nm->is_compiled_by_jvmci()) {
 636     // lookup exception handler for this pc
 637     int catch_pco = ret_pc - nm->code_begin();
 638     ExceptionHandlerTable table(nm);
 639     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 640     if (t != NULL) {
 641       return nm->code_begin() + t->pco();
 642     } else {
 643       // there is no exception handler for this pc => deoptimize
 644       nm->make_not_entrant();
 645 
 646       // Use Deoptimization::deoptimize for all of its side-effects:
 647       // revoking biases of monitors, gathering traps statistics, logging...
 648       // it also patches the return pc but we do not care about that
 649       // since we return a continuation to the deopt_blob below.
 650       JavaThread* thread = JavaThread::current();
 651       RegisterMap reg_map(thread, UseBiasedLocking);
 652       frame runtime_frame = thread->last_frame();
 653       frame caller_frame = runtime_frame.sender(&reg_map);
 654       Deoptimization::deoptimize(thread, caller_frame, &reg_map, Deoptimization::Reason_not_compiled_exception_handler);
 655 
 656       return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
 657     }
 658   }
 659 #endif // INCLUDE_JVMCI
 660 
 661   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
 662   // determine handler bci, if any
 663   EXCEPTION_MARK;
 664 
 665   int handler_bci = -1;
 666   int scope_depth = 0;
 667   if (!force_unwind) {
 668     int bci = sd->bci();
 669     bool recursive_exception = false;
 670     do {
 671       bool skip_scope_increment = false;
 672       // exception handler lookup
 673       KlassHandle ek (THREAD, exception->klass());
 674       methodHandle mh(THREAD, sd->method());
 675       handler_bci = Method::fast_exception_handler_bci_for(mh, ek, bci, THREAD);
 676       if (HAS_PENDING_EXCEPTION) {
 677         recursive_exception = true;
 678         // We threw an exception while trying to find the exception handler.
 679         // Transfer the new exception to the exception handle which will
 680         // be set into thread local storage, and do another lookup for an


 761 JRT_ENTRY(void, SharedRuntime::throw_NullPointerException_at_call(JavaThread* thread))
 762   // This entry point is effectively only used for NullPointerExceptions which occur at inline
 763   // cache sites (when the callee activation is not yet set up) so we are at a call site
 764   throw_and_post_jvmti_exception(thread, vmSymbols::java_lang_NullPointerException());
 765 JRT_END
 766 
 767 JRT_ENTRY(void, SharedRuntime::throw_StackOverflowError(JavaThread* thread))
 768   // We avoid using the normal exception construction in this case because
 769   // it performs an upcall to Java, and we're already out of stack space.
 770   Klass* k = SystemDictionary::StackOverflowError_klass();
 771   oop exception_oop = InstanceKlass::cast(k)->allocate_instance(CHECK);
 772   Handle exception (thread, exception_oop);
 773   if (StackTraceInThrowable) {
 774     java_lang_Throwable::fill_in_stack_trace(exception);
 775   }
 776   // Increment counter for hs_err file reporting
 777   Atomic::inc(&Exceptions::_stack_overflow_errors);
 778   throw_and_post_jvmti_exception(thread, exception);
 779 JRT_END
 780 
 781 #if INCLUDE_JVMCI
 782 address SharedRuntime::deoptimize_for_implicit_exception(JavaThread* thread, address pc, nmethod* nm, int deopt_reason) {
 783   assert(deopt_reason > Deoptimization::Reason_none && deopt_reason < Deoptimization::Reason_LIMIT, "invalid deopt reason");
 784   thread->set_jvmci_implicit_exception_pc(pc);
 785   thread->set_pending_deoptimization(Deoptimization::make_trap_request((Deoptimization::DeoptReason)deopt_reason, Deoptimization::Action_reinterpret));
 786   return (SharedRuntime::deopt_blob()->implicit_exception_uncommon_trap());
 787 }
 788 #endif // INCLUDE_JVMCI
 789 
 790 address SharedRuntime::continuation_for_implicit_exception(JavaThread* thread,
 791                                                            address pc,
 792                                                            SharedRuntime::ImplicitExceptionKind exception_kind)
 793 {
 794   address target_pc = NULL;
 795 
 796   if (Interpreter::contains(pc)) {
 797 #ifdef CC_INTERP
 798     // C++ interpreter doesn't throw implicit exceptions
 799     ShouldNotReachHere();
 800 #else
 801     switch (exception_kind) {
 802       case IMPLICIT_NULL:           return Interpreter::throw_NullPointerException_entry();
 803       case IMPLICIT_DIVIDE_BY_ZERO: return Interpreter::throw_ArithmeticException_entry();
 804       case STACK_OVERFLOW:          return Interpreter::throw_StackOverflowError_entry();
 805       default:                      ShouldNotReachHere();
 806     }
 807 #endif // !CC_INTERP
 808   } else {
 809     switch (exception_kind) {


 839             Events::log_exception(thread, "AbstractMethodError at " INTPTR_FORMAT, pc);
 840             return StubRoutines::throw_AbstractMethodError_entry();
 841           } else {
 842             Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
 843             return StubRoutines::throw_NullPointerException_at_call_entry();
 844           }
 845         } else {
 846           CodeBlob* cb = CodeCache::find_blob(pc);
 847 
 848           // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
 849           if (cb == NULL) return NULL;
 850 
 851           // Exception happened in CodeCache. Must be either:
 852           // 1. Inline-cache check in C2I handler blob,
 853           // 2. Inline-cache check in nmethod, or
 854           // 3. Implicit null exception in nmethod
 855 
 856           if (!cb->is_nmethod()) {
 857             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
 858             if (!is_in_blob) {
 859               // Allow normal crash reporting to handle this
 860               return NULL;
 861             }
 862             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
 863             // There is no handler here, so we will simply unwind.
 864             return StubRoutines::throw_NullPointerException_at_call_entry();
 865           }
 866 
 867           // Otherwise, it's an nmethod.  Consult its exception handlers.
 868           nmethod* nm = (nmethod*)cb;
 869           if (nm->inlinecache_check_contains(pc)) {
 870             // exception happened inside inline-cache check code
 871             // => the nmethod is not yet active (i.e., the frame
 872             // is not set up yet) => use return address pushed by
 873             // caller => don't push another return address
 874             Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
 875             return StubRoutines::throw_NullPointerException_at_call_entry();
 876           }
 877 
 878           if (nm->method()->is_method_handle_intrinsic()) {
 879             // exception happened inside MH dispatch code, similar to a vtable stub
 880             Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, pc);
 881             return StubRoutines::throw_NullPointerException_at_call_entry();
 882           }
 883 
 884 #ifndef PRODUCT
 885           _implicit_null_throws++;
 886 #endif
 887 #if INCLUDE_JVMCI
 888           if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
 889             // If there's no PcDesc then we'll die way down inside of
 890             // deopt instead of just getting normal error reporting,
 891             // so only go there if it will succeed.
 892             return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_null_check);
 893           } else {
 894 #endif // INCLUDE_JVMCI
 895           assert (nm->is_nmethod(), "Expect nmethod");
 896           target_pc = nm->continuation_for_implicit_exception(pc);
 897 #if INCLUDE_JVMCI
 898           }
 899 #endif // INCLUDE_JVMCI
 900           // If there's an unexpected fault, target_pc might be NULL,
 901           // in which case we want to fall through into the normal
 902           // error handling code.
 903         }
 904 
 905         break; // fall through
 906       }
 907 
 908 
 909       case IMPLICIT_DIVIDE_BY_ZERO: {
 910         nmethod* nm = CodeCache::find_nmethod(pc);
 911         guarantee(nm != NULL, "must have containing compiled method for implicit division-by-zero exceptions");
 912 #ifndef PRODUCT
 913         _implicit_div0_throws++;
 914 #endif
 915 #if INCLUDE_JVMCI
 916         if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
 917           return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_div0_check);
 918         } else {
 919 #endif // INCLUDE_JVMCI
 920         target_pc = nm->continuation_for_implicit_exception(pc);
 921 #if INCLUDE_JVMCI
 922         }
 923 #endif // INCLUDE_JVMCI
 924         // If there's an unexpected fault, target_pc might be NULL,
 925         // in which case we want to fall through into the normal
 926         // error handling code.
 927         break; // fall through
 928       }
 929 
 930       default: ShouldNotReachHere();
 931     }
 932 
 933     assert(exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO, "wrong implicit exception kind");
 934 


 935     if (exception_kind == IMPLICIT_NULL) {
 936 #ifndef PRODUCT
 937       // for AbortVMOnException flag
 938       Exceptions::debug_check_abort("java.lang.NullPointerException");
 939 #endif //PRODUCT
 940       Events::log_exception(thread, "Implicit null exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 941     } else {
 942 #ifndef PRODUCT
 943       // for AbortVMOnException flag
 944       Exceptions::debug_check_abort("java.lang.ArithmeticException");
 945 #endif //PRODUCT
 946       Events::log_exception(thread, "Implicit division by zero exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 947     }
 948     return target_pc;
 949   }
 950 
 951   ShouldNotReachHere();
 952   return NULL;
 953 }
 954 
 955 
 956 /**
 957  * Throws an java/lang/UnsatisfiedLinkError.  The address of this method is
 958  * installed in the native function entry of all native Java methods before
 959  * they get linked to their actual native methods.
 960  *
 961  * \note
 962  * This method actually never gets called!  The reason is because
 963  * the interpreter's native entries call NativeLookup::lookup() which
 964  * throws the exception when the lookup fails.  The exception is then
 965  * caught and forwarded on the return from NativeLookup::lookup() call


 975 
 976 address SharedRuntime::native_method_throw_unsatisfied_link_error_entry() {
 977   return CAST_FROM_FN_PTR(address, &throw_unsatisfied_link_error);
 978 }
 979 
 980 
 981 #ifndef PRODUCT
 982 JRT_ENTRY(intptr_t, SharedRuntime::trace_bytecode(JavaThread* thread, intptr_t preserve_this_value, intptr_t tos, intptr_t tos2))
 983   const frame f = thread->last_frame();
 984   assert(f.is_interpreted_frame(), "must be an interpreted frame");
 985 #ifndef PRODUCT
 986   methodHandle mh(THREAD, f.interpreter_frame_method());
 987   BytecodeTracer::trace(mh, f.interpreter_frame_bcp(), tos, tos2);
 988 #endif // !PRODUCT
 989   return preserve_this_value;
 990 JRT_END
 991 #endif // !PRODUCT
 992 
 993 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* thread, oopDesc* obj))
 994   assert(obj->is_oop(), "must be a valid oop");
 995 #if INCLUDE_JVMCI
 996   // This removes the requirement for JVMCI compilers to emit code
 997   // performing a dynamic check that obj has a finalizer before
 998   // calling this routine. There should be no performance impact
 999   // for C1 since it emits a dynamic check. C2 and the interpreter
1000   // uses other runtime routines for registering finalizers.
1001   if (!obj->klass()->has_finalizer()) {
1002     return;
1003   }
1004 #endif // INCLUDE_JVMCI
1005   assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise");
1006   InstanceKlass::register_finalizer(instanceOop(obj), CHECK);
1007 JRT_END
1008 
1009 
1010 jlong SharedRuntime::get_java_tid(Thread* thread) {
1011   if (thread != NULL) {
1012     if (thread->is_Java_thread()) {
1013       oop obj = ((JavaThread*)thread)->threadObj();
1014       return (obj == NULL) ? 0 : java_lang_Thread::thread_id(obj);
1015     }
1016   }
1017   return 0;
1018 }
1019 
1020 /**
1021  * This function ought to be a void function, but cannot be because
1022  * it gets turned into a tail-call on sparc, which runs into dtrace bug
1023  * 6254741.  Once that is fixed we can remove the dummy return value.
1024  */


1226 
1227   CodeBlob* caller_cb = caller_frame.cb();
1228   guarantee(caller_cb != NULL && caller_cb->is_nmethod(), "must be called from nmethod");
1229   nmethod* caller_nm = caller_cb->as_nmethod_or_null();
1230 
1231   // make sure caller is not getting deoptimized
1232   // and removed before we are done with it.
1233   // CLEANUP - with lazy deopt shouldn't need this lock
1234   nmethodLocker caller_lock(caller_nm);
1235 
1236   // determine call info & receiver
1237   // note: a) receiver is NULL for static calls
1238   //       b) an exception is thrown if receiver is NULL for non-static calls
1239   CallInfo call_info;
1240   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1241   Handle receiver = find_callee_info(thread, invoke_code,
1242                                      call_info, CHECK_(methodHandle()));
1243   methodHandle callee_method = call_info.selected_method();
1244 
1245   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1246          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1247          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1248          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1249          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1250 
1251   assert(caller_nm->is_alive(), "It should be alive");
1252 
1253 #ifndef PRODUCT
1254   // tracing/debugging/statistics
1255   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1256                 (is_virtual) ? (&_resolve_virtual_ctr) :
1257                                (&_resolve_static_ctr);
1258   Atomic::inc(addr);
1259 
1260   if (TraceCallFixup) {
1261     ResourceMark rm(thread);
1262     tty->print("resolving %s%s (%s) call to",
1263       (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1264       Bytecodes::name(invoke_code));
1265     callee_method->print_short_name(tty);
1266     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT, caller_frame.pc(), callee_method->code());


1437   // return compiled code entry point after potential safepoints
1438   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1439   return callee_method->verified_code_entry();
1440 JRT_END
1441 
1442 
1443 // Resolve a virtual call that can be statically bound (e.g., always
1444 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1445 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread *thread))
1446   methodHandle callee_method;
1447   JRT_BLOCK
1448     callee_method = SharedRuntime::resolve_helper(thread, true, true, CHECK_NULL);
1449     thread->set_vm_result_2(callee_method());
1450   JRT_BLOCK_END
1451   // return compiled code entry point after potential safepoints
1452   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1453   return callee_method->verified_code_entry();
1454 JRT_END
1455 
1456 



1457 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, TRAPS) {
1458   ResourceMark rm(thread);
1459   CallInfo call_info;
1460   Bytecodes::Code bc;
1461 
1462   // receiver is NULL for static calls. An exception is thrown for NULL
1463   // receivers for non-static calls
1464   Handle receiver = find_callee_info(thread, bc, call_info,
1465                                      CHECK_(methodHandle()));
1466   // Compiler1 can produce virtual call sites that can actually be statically bound
1467   // If we fell thru to below we would think that the site was going megamorphic
1468   // when in fact the site can never miss. Worse because we'd think it was megamorphic
1469   // we'd try and do a vtable dispatch however methods that can be statically bound
1470   // don't have vtable entries (vtable_index < 0) and we'd blow up. So we force a
1471   // reresolution of the  call site (as if we did a handle_wrong_method and not an
1472   // plain ic_miss) and the site will be converted to an optimized virtual call site
1473   // never to miss again. I don't believe C2 will produce code like this but if it
1474   // did this would still be the correct thing to do for it too, hence no ifdef.
1475   //
1476   if (call_info.resolved_method()->can_be_statically_bound()) {


1560         // We have a path that was monomorphic but was going interpreted
1561         // and now we have (or had) a compiled entry. We correct the IC
1562         // by using a new icBuffer.
1563         CompiledICInfo info;
1564         KlassHandle receiver_klass(THREAD, receiver()->klass());
1565         inline_cache->compute_monomorphic_entry(callee_method,
1566                                                 receiver_klass,
1567                                                 inline_cache->is_optimized(),
1568                                                 false,
1569                                                 info, CHECK_(methodHandle()));
1570         inline_cache->set_to_monomorphic(info);
1571       } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1572         // Potential change to megamorphic
1573         bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));
1574         if (!successful) {
1575           inline_cache->set_to_clean();
1576         }
1577       } else {
1578         // Either clean or megamorphic
1579       }
1580     } else {
1581       fatal("Unimplemented");
1582     }
1583   } // Release CompiledIC_lock
1584 
1585   return callee_method;
1586 }
1587 
1588 //
1589 // Resets a call-site in compiled code so it will get resolved again.
1590 // This routines handles both virtual call sites, optimized virtual call
1591 // sites, and static call sites. Typically used to change a call sites
1592 // destination from compiled to interpreted.
1593 //
1594 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, TRAPS) {
1595   ResourceMark rm(thread);
1596   RegisterMap reg_map(thread, false);
1597   frame stub_frame = thread->last_frame();
1598   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1599   frame caller = stub_frame.sender(&reg_map);
1600 
1601   // Do nothing if the frame isn't a live compiled frame.
1602   // nmethod could be deoptimized by the time we get here
1603   // so no update to the caller is needed.
1604 
1605   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
1606 
1607     address pc = caller.pc();
1608 
1609     // Check for static or virtual call
1610     bool is_static_call = false;
1611     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1612 
1613     // Default call_addr is the location of the "basic" call.
1614     // Determine the address of the call we a reresolving. With
1615     // Inline Caches we will always find a recognizable call.
1616     // With Inline Caches disabled we may or may not find a
1617     // recognizable call. We will always find a call for static
1618     // calls and for optimized virtual calls. For vanilla virtual
1619     // calls it depends on the state of the UseInlineCaches switch.
1620     //
1621     // With Inline Caches disabled we can get here for a virtual call
1622     // for two reasons:
1623     //   1 - calling an abstract method. The vtable for abstract methods
1624     //       will run us thru handle_wrong_method and we will eventually
1625     //       end up in the interpreter to throw the ame.
1626     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1627     //       call and between the time we fetch the entry address and
1628     //       we jump to it the target gets deoptimized. Similar to 1
1629     //       we will wind up in the interprter (thru a c2i with c2).
1630     //
1631     address call_addr = NULL;
1632     {
1633       // Get call instruction under lock because another thread may be
1634       // busy patching it.
1635       MutexLockerEx ml_patch(Patching_lock, Mutex::_no_safepoint_check_flag);
1636       // Location of call instruction
1637       if (NativeCall::is_call_before(pc)) {
1638         NativeCall *ncall = nativeCall_before(pc);
1639         call_addr = ncall->instruction_address();
1640       }
1641     }




1642     // Make sure nmethod doesn't get deoptimized and removed until
1643     // this is done with it.
1644     // CLEANUP - with lazy deopt shouldn't need this lock
1645     nmethodLocker nmlock(caller_nm);
1646 
1647     if (call_addr != NULL) {
1648       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1649       int ret = iter.next(); // Get item
1650       if (ret) {
1651         assert(iter.addr() == call_addr, "must find call");
1652         if (iter.type() == relocInfo::static_call_type) {
1653           is_static_call = true;
1654         } else {
1655           assert(iter.type() == relocInfo::virtual_call_type ||
1656                  iter.type() == relocInfo::opt_virtual_call_type
1657                 , "unexpected relocInfo. type");
1658         }
1659       } else {
1660         assert(!UseInlineCaches, "relocation info. must exist for this address");
1661       }


2636 
2637 
2638 /**
2639  * Create a native wrapper for this native method.  The wrapper converts the
2640  * Java-compiled calling convention to the native convention, handles
2641  * arguments, and transitions to native.  On return from the native we transition
2642  * back to java blocking if a safepoint is in progress.
2643  */
2644 void AdapterHandlerLibrary::create_native_wrapper(methodHandle method) {
2645   ResourceMark rm;
2646   nmethod* nm = NULL;
2647 
2648   assert(method->is_native(), "must be native");
2649   assert(method->is_method_handle_intrinsic() ||
2650          method->has_native_function(), "must have something valid to call!");
2651 
2652   {
2653     // Perform the work while holding the lock, but perform any printing outside the lock
2654     MutexLocker mu(AdapterHandlerLibrary_lock);
2655     // See if somebody beat us to it
2656     if (method->code() != NULL) {

2657       return;
2658     }
2659 
2660     const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci);
2661     assert(compile_id > 0, "Must generate native wrapper");
2662 
2663 
2664     ResourceMark rm;
2665     BufferBlob*  buf = buffer_blob(); // the temporary code buffer in CodeCache
2666     if (buf != NULL) {
2667       CodeBuffer buffer(buf);
2668       double locs_buf[20];
2669       buffer.insts()->initialize_shared_locs((relocInfo*)locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
2670       MacroAssembler _masm(&buffer);
2671 
2672       // Fill in the signature array, for the calling-convention call.
2673       const int total_args_passed = method->size_of_parameters();
2674 
2675       BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2676       VMRegPair*   regs = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);


2878        kptr2 = fr.next_monitor_in_interpreter_frame(kptr2) ) {
2879     if (kptr2->obj() != NULL) {         // Avoid 'holes' in the monitor array
2880       BasicLock *lock = kptr2->lock();
2881       // Inflate so the displaced header becomes position-independent
2882       if (lock->displaced_header()->is_unlocked())
2883         ObjectSynchronizer::inflate_helper(kptr2->obj());
2884       // Now the displaced header is free to move
2885       buf[i++] = (intptr_t)lock->displaced_header();
2886       buf[i++] = cast_from_oop<intptr_t>(kptr2->obj());
2887     }
2888   }
2889   assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors");
2890 
2891   return buf;
2892 JRT_END
2893 
2894 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
2895   FREE_C_HEAP_ARRAY(intptr_t, buf);
2896 JRT_END
2897 
2898 bool AdapterHandlerLibrary::contains(const CodeBlob* b) {
2899   AdapterHandlerTableIterator iter(_adapters);
2900   while (iter.has_next()) {
2901     AdapterHandlerEntry* a = iter.next();
2902     if (b == CodeCache::find_blob(a->get_i2c_entry())) return true;
2903   }
2904   return false;
2905 }
2906 
2907 void AdapterHandlerLibrary::print_handler_on(outputStream* st, const CodeBlob* b) {
2908   AdapterHandlerTableIterator iter(_adapters);
2909   while (iter.has_next()) {
2910     AdapterHandlerEntry* a = iter.next();
2911     if (b == CodeCache::find_blob(a->get_i2c_entry())) {
2912       st->print("Adapter for signature: ");
2913       a->print_adapter_on(tty);
2914       return;
2915     }
2916   }
2917   assert(false, "Should have found handler");
2918 }
2919 
2920 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
2921   st->print_cr("AHE@" INTPTR_FORMAT ": %s i2c: " INTPTR_FORMAT " c2i: " INTPTR_FORMAT " c2iUV: " INTPTR_FORMAT,
2922                (intptr_t) this, fingerprint()->as_string(),
2923                get_i2c_entry(), get_c2i_entry(), get_c2i_unverified_entry());
2924 
2925 }
2926 
2927 #ifndef PRODUCT
< prev index next >