src/share/vm/runtime/sharedRuntime.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/runtime

src/share/vm/runtime/sharedRuntime.cpp

Print this page




   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "classfile/systemDictionary.hpp"
  27 #include "classfile/vmSymbols.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/compiledIC.hpp"
  30 #include "code/codeCacheExtensions.hpp"
  31 #include "code/scopeDesc.hpp"
  32 #include "code/vtableStubs.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compileBroker.hpp"
  35 #include "compiler/compilerOracle.hpp"
  36 #include "compiler/disassembler.hpp"
  37 #include "gc/shared/gcLocker.inline.hpp"
  38 #include "interpreter/interpreter.hpp"
  39 #include "interpreter/interpreterRuntime.hpp"
  40 #include "memory/universe.inline.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "prims/forte.hpp"
  43 #include "prims/jvmtiExport.hpp"
  44 #include "prims/jvmtiRedefineClassesTrace.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "prims/nativeLookup.hpp"
  47 #include "runtime/arguments.hpp"
  48 #include "runtime/atomic.inline.hpp"
  49 #include "runtime/biasedLocking.hpp"

  50 #include "runtime/handles.inline.hpp"
  51 #include "runtime/init.hpp"
  52 #include "runtime/interfaceSupport.hpp"
  53 #include "runtime/javaCalls.hpp"
  54 #include "runtime/sharedRuntime.hpp"
  55 #include "runtime/stubRoutines.hpp"
  56 #include "runtime/vframe.hpp"
  57 #include "runtime/vframeArray.hpp"
  58 #include "utilities/copy.hpp"
  59 #include "utilities/dtrace.hpp"
  60 #include "utilities/events.hpp"
  61 #include "utilities/hashtable.inline.hpp"
  62 #include "utilities/macros.hpp"
  63 #include "utilities/xmlstream.hpp"
  64 #ifdef COMPILER1
  65 #include "c1/c1_Runtime1.hpp"
  66 #endif
  67 
  68 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  69 


  77 
  78 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  79 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
  80 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
  81 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
  82 
  83 #ifdef COMPILER2
  84 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
  85 #endif // COMPILER2
  86 
  87 
  88 //----------------------------generate_stubs-----------------------------------
  89 void SharedRuntime::generate_stubs() {
  90   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
  91   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
  92   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
  93   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
  94   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
  95   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
  96 
  97 #ifdef COMPILER2
  98   // Vectors are generated only by C2.
  99   if (is_wide_vector(MaxVectorSize)) {

 100     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 101   }
 102 #endif // COMPILER2
 103   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 104   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 105 
 106   generate_deopt_blob();
 107 
 108 #ifdef COMPILER2
 109   generate_uncommon_trap_blob();
 110 #endif // COMPILER2
 111 }
 112 
 113 #include <math.h>
 114 
 115 // Implementation of SharedRuntime
 116 
 117 #ifndef PRODUCT
 118 // For statistics
 119 int SharedRuntime::_ic_miss_ctr = 0;


 445 JRT_END
 446 
 447 
 448 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
 449   return (jdouble)x;
 450 JRT_END
 451 
 452 // Exception handling across interpreter/compiler boundaries
 453 //
 454 // exception_handler_for_return_address(...) returns the continuation address.
 455 // The continuation address is the entry point of the exception handler of the
 456 // previous frame depending on the return address.
 457 
 458 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
 459   assert(frame::verify_return_pc(return_address), err_msg("must be a return address: " INTPTR_FORMAT, return_address));
 460   assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
 461 
 462   // Reset method handle flag.
 463   thread->set_is_method_handle_return(false);
 464 






 465   // The fastest case first
 466   CodeBlob* blob = CodeCache::find_blob(return_address);
 467   nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
 468   if (nm != NULL) {
 469     // Set flag if return address is a method handle call site.
 470     thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
 471     // native nmethods don't have exception handlers
 472     assert(!nm->is_native_method(), "no exception handler");
 473     assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
 474     if (nm->is_deopt_pc(return_address)) {
 475       // If we come here because of a stack overflow, the stack may be
 476       // unguarded. Reguard the stack otherwise if we return to the
 477       // deopt blob and the stack bang causes a stack overflow we
 478       // crash.
 479       bool guard_pages_enabled = thread->stack_yellow_zone_enabled();
 480       if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack();
 481       assert(guard_pages_enabled, "stack banging in deopt blob may cause crash");
 482       return SharedRuntime::deopt_blob()->unpack_with_exception();
 483     } else {
 484       return nm->exception_begin();


 510 }
 511 
 512 
 513 JRT_LEAF(address, SharedRuntime::exception_handler_for_return_address(JavaThread* thread, address return_address))
 514   return raw_exception_handler_for_return_address(thread, return_address);
 515 JRT_END
 516 
 517 
 518 address SharedRuntime::get_poll_stub(address pc) {
 519   address stub;
 520   // Look up the code blob
 521   CodeBlob *cb = CodeCache::find_blob(pc);
 522 
 523   // Should be an nmethod
 524   assert(cb && cb->is_nmethod(), "safepoint polling: pc must refer to an nmethod");
 525 
 526   // Look up the relocation information
 527   assert(((nmethod*)cb)->is_at_poll_or_poll_return(pc),
 528     "safepoint polling: type must be poll");
 529 
 530   assert(((NativeInstruction*)pc)->is_safepoint_poll(),
 531     "Only polling locations are used for safepoint");



 532 
 533   bool at_poll_return = ((nmethod*)cb)->is_at_poll_return(pc);
 534   bool has_wide_vectors = ((nmethod*)cb)->has_wide_vectors();
 535   if (at_poll_return) {
 536     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
 537            "polling page return stub not created yet");
 538     stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();
 539   } else if (has_wide_vectors) {
 540     assert(SharedRuntime::polling_page_vectors_safepoint_handler_blob() != NULL,
 541            "polling page vectors safepoint stub not created yet");
 542     stub = SharedRuntime::polling_page_vectors_safepoint_handler_blob()->entry_point();
 543   } else {
 544     assert(SharedRuntime::polling_page_safepoint_handler_blob() != NULL,
 545            "polling page safepoint stub not created yet");
 546     stub = SharedRuntime::polling_page_safepoint_handler_blob()->entry_point();
 547   }
 548 #ifndef PRODUCT
 549   if (TraceSafepoint) {
 550     char buf[256];
 551     jio_snprintf(buf, sizeof(buf),


 601 
 602     // RC_TRACE macro has an embedded ResourceMark
 603     RC_TRACE_WITH_THREAD(0x00001000, thread,
 604                          ("calling obsolete method '%s'",
 605                           method->name_and_sig_as_C_string()));
 606     if (RC_TRACE_ENABLED(0x00002000)) {
 607       // this option is provided to debug calls to obsolete methods
 608       guarantee(false, "faulting at call to an obsolete method.");
 609     }
 610   }
 611   return 0;
 612 JRT_END
 613 
 614 // ret_pc points into caller; we are returning caller's exception handler
 615 // for given exception
 616 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 617                                                     bool force_unwind, bool top_frame_only) {
 618   assert(nm != NULL, "must exist");
 619   ResourceMark rm;
 620 



























 621   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
 622   // determine handler bci, if any
 623   EXCEPTION_MARK;
 624 
 625   int handler_bci = -1;
 626   int scope_depth = 0;
 627   if (!force_unwind) {
 628     int bci = sd->bci();
 629     bool recursive_exception = false;
 630     do {
 631       bool skip_scope_increment = false;
 632       // exception handler lookup
 633       KlassHandle ek (THREAD, exception->klass());
 634       methodHandle mh(THREAD, sd->method());
 635       handler_bci = Method::fast_exception_handler_bci_for(mh, ek, bci, THREAD);
 636       if (HAS_PENDING_EXCEPTION) {
 637         recursive_exception = true;
 638         // We threw an exception while trying to find the exception handler.
 639         // Transfer the new exception to the exception handle which will
 640         // be set into thread local storage, and do another lookup for an


 721 JRT_ENTRY(void, SharedRuntime::throw_NullPointerException_at_call(JavaThread* thread))
 722   // This entry point is effectively only used for NullPointerExceptions which occur at inline
 723   // cache sites (when the callee activation is not yet set up) so we are at a call site
 724   throw_and_post_jvmti_exception(thread, vmSymbols::java_lang_NullPointerException());
 725 JRT_END
 726 
 727 JRT_ENTRY(void, SharedRuntime::throw_StackOverflowError(JavaThread* thread))
 728   // We avoid using the normal exception construction in this case because
 729   // it performs an upcall to Java, and we're already out of stack space.
 730   Klass* k = SystemDictionary::StackOverflowError_klass();
 731   oop exception_oop = InstanceKlass::cast(k)->allocate_instance(CHECK);
 732   Handle exception (thread, exception_oop);
 733   if (StackTraceInThrowable) {
 734     java_lang_Throwable::fill_in_stack_trace(exception);
 735   }
 736   // Increment counter for hs_err file reporting
 737   Atomic::inc(&Exceptions::_stack_overflow_errors);
 738   throw_and_post_jvmti_exception(thread, exception);
 739 JRT_END
 740 









 741 address SharedRuntime::continuation_for_implicit_exception(JavaThread* thread,
 742                                                            address pc,
 743                                                            SharedRuntime::ImplicitExceptionKind exception_kind)
 744 {
 745   address target_pc = NULL;
 746 
 747   if (Interpreter::contains(pc)) {
 748 #ifdef CC_INTERP
 749     // C++ interpreter doesn't throw implicit exceptions
 750     ShouldNotReachHere();
 751 #else
 752     switch (exception_kind) {
 753       case IMPLICIT_NULL:           return Interpreter::throw_NullPointerException_entry();
 754       case IMPLICIT_DIVIDE_BY_ZERO: return Interpreter::throw_ArithmeticException_entry();
 755       case STACK_OVERFLOW:          return Interpreter::throw_StackOverflowError_entry();
 756       default:                      ShouldNotReachHere();
 757     }
 758 #endif // !CC_INTERP
 759   } else {
 760     switch (exception_kind) {


 790             Events::log_exception(thread, "AbstractMethodError at " INTPTR_FORMAT, pc);
 791             return StubRoutines::throw_AbstractMethodError_entry();
 792           } else {
 793             Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
 794             return StubRoutines::throw_NullPointerException_at_call_entry();
 795           }
 796         } else {
 797           CodeBlob* cb = CodeCache::find_blob(pc);
 798 
 799           // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
 800           if (cb == NULL) return NULL;
 801 
 802           // Exception happened in CodeCache. Must be either:
 803           // 1. Inline-cache check in C2I handler blob,
 804           // 2. Inline-cache check in nmethod, or
 805           // 3. Implicit null exception in nmethod
 806 
 807           if (!cb->is_nmethod()) {
 808             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
 809             if (!is_in_blob) {
 810               cb->print();
 811               fatal(err_msg("exception happened outside interpreter, nmethods and vtable stubs at pc " INTPTR_FORMAT, pc));
 812             }
 813             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
 814             // There is no handler here, so we will simply unwind.
 815             return StubRoutines::throw_NullPointerException_at_call_entry();
 816           }
 817 
 818           // Otherwise, it's an nmethod.  Consult its exception handlers.
 819           nmethod* nm = (nmethod*)cb;
 820           if (nm->inlinecache_check_contains(pc)) {
 821             // exception happened inside inline-cache check code
 822             // => the nmethod is not yet active (i.e., the frame
 823             // is not set up yet) => use return address pushed by
 824             // caller => don't push another return address
 825             Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
 826             return StubRoutines::throw_NullPointerException_at_call_entry();
 827           }
 828 
 829           if (nm->method()->is_method_handle_intrinsic()) {
 830             // exception happened inside MH dispatch code, similar to a vtable stub
 831             Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, pc);
 832             return StubRoutines::throw_NullPointerException_at_call_entry();
 833           }
 834 
 835 #ifndef PRODUCT
 836           _implicit_null_throws++;
 837 #endif









 838           target_pc = nm->continuation_for_implicit_exception(pc);



 839           // If there's an unexpected fault, target_pc might be NULL,
 840           // in which case we want to fall through into the normal
 841           // error handling code.
 842         }
 843 
 844         break; // fall through
 845       }
 846 
 847 
 848       case IMPLICIT_DIVIDE_BY_ZERO: {
 849         nmethod* nm = CodeCache::find_nmethod(pc);
 850         guarantee(nm != NULL, "must have containing nmethod for implicit division-by-zero exceptions");
 851 #ifndef PRODUCT
 852         _implicit_div0_throws++;
 853 #endif





 854         target_pc = nm->continuation_for_implicit_exception(pc);



 855         // If there's an unexpected fault, target_pc might be NULL,
 856         // in which case we want to fall through into the normal
 857         // error handling code.
 858         break; // fall through
 859       }
 860 
 861       default: ShouldNotReachHere();
 862     }
 863 
 864     assert(exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO, "wrong implicit exception kind");
 865 
 866     // for AbortVMOnException flag
 867     NOT_PRODUCT(Exceptions::debug_check_abort("java.lang.NullPointerException"));
 868     if (exception_kind == IMPLICIT_NULL) {




 869       Events::log_exception(thread, "Implicit null exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 870     } else {




 871       Events::log_exception(thread, "Implicit division by zero exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 872     }
 873     return target_pc;
 874   }
 875 
 876   ShouldNotReachHere();
 877   return NULL;
 878 }
 879 
 880 
 881 /**
 882  * Throws an java/lang/UnsatisfiedLinkError.  The address of this method is
 883  * installed in the native function entry of all native Java methods before
 884  * they get linked to their actual native methods.
 885  *
 886  * \note
 887  * This method actually never gets called!  The reason is because
 888  * the interpreter's native entries call NativeLookup::lookup() which
 889  * throws the exception when the lookup fails.  The exception is then
 890  * caught and forwarded on the return from NativeLookup::lookup() call


 900 
 901 address SharedRuntime::native_method_throw_unsatisfied_link_error_entry() {
 902   return CAST_FROM_FN_PTR(address, &throw_unsatisfied_link_error);
 903 }
 904 
 905 
 906 #ifndef PRODUCT
 907 JRT_ENTRY(intptr_t, SharedRuntime::trace_bytecode(JavaThread* thread, intptr_t preserve_this_value, intptr_t tos, intptr_t tos2))
 908   const frame f = thread->last_frame();
 909   assert(f.is_interpreted_frame(), "must be an interpreted frame");
 910 #ifndef PRODUCT
 911   methodHandle mh(THREAD, f.interpreter_frame_method());
 912   BytecodeTracer::trace(mh, f.interpreter_frame_bcp(), tos, tos2);
 913 #endif // !PRODUCT
 914   return preserve_this_value;
 915 JRT_END
 916 #endif // !PRODUCT
 917 
 918 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* thread, oopDesc* obj))
 919   assert(obj->is_oop(), "must be a valid oop");










 920   assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise");
 921   InstanceKlass::register_finalizer(instanceOop(obj), CHECK);
 922 JRT_END
 923 
 924 
 925 jlong SharedRuntime::get_java_tid(Thread* thread) {
 926   if (thread != NULL) {
 927     if (thread->is_Java_thread()) {
 928       oop obj = ((JavaThread*)thread)->threadObj();
 929       return (obj == NULL) ? 0 : java_lang_Thread::thread_id(obj);
 930     }
 931   }
 932   return 0;
 933 }
 934 
 935 /**
 936  * This function ought to be a void function, but cannot be because
 937  * it gets turned into a tail-call on sparc, which runs into dtrace bug
 938  * 6254741.  Once that is fixed we can remove the dummy return value.
 939  */


1141 
1142   CodeBlob* caller_cb = caller_frame.cb();
1143   guarantee(caller_cb != NULL && caller_cb->is_nmethod(), "must be called from nmethod");
1144   nmethod* caller_nm = caller_cb->as_nmethod_or_null();
1145 
1146   // make sure caller is not getting deoptimized
1147   // and removed before we are done with it.
1148   // CLEANUP - with lazy deopt shouldn't need this lock
1149   nmethodLocker caller_lock(caller_nm);
1150 
1151   // determine call info & receiver
1152   // note: a) receiver is NULL for static calls
1153   //       b) an exception is thrown if receiver is NULL for non-static calls
1154   CallInfo call_info;
1155   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1156   Handle receiver = find_callee_info(thread, invoke_code,
1157                                      call_info, CHECK_(methodHandle()));
1158   methodHandle callee_method = call_info.selected_method();
1159 
1160   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||

1161          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1162          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1163          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1164 
1165   assert(caller_nm->is_alive(), "It should be alive");
1166 
1167 #ifndef PRODUCT
1168   // tracing/debugging/statistics
1169   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1170                 (is_virtual) ? (&_resolve_virtual_ctr) :
1171                                (&_resolve_static_ctr);
1172   Atomic::inc(addr);
1173 
1174   if (TraceCallFixup) {
1175     ResourceMark rm(thread);
1176     tty->print("resolving %s%s (%s) call to",
1177       (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1178       Bytecodes::name(invoke_code));
1179     callee_method->print_short_name(tty);
1180     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT, caller_frame.pc(), callee_method->code());


1351   // return compiled code entry point after potential safepoints
1352   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1353   return callee_method->verified_code_entry();
1354 JRT_END
1355 
1356 
1357 // Resolve a virtual call that can be statically bound (e.g., always
1358 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1359 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread *thread))
1360   methodHandle callee_method;
1361   JRT_BLOCK
1362     callee_method = SharedRuntime::resolve_helper(thread, true, true, CHECK_NULL);
1363     thread->set_vm_result_2(callee_method());
1364   JRT_BLOCK_END
1365   // return compiled code entry point after potential safepoints
1366   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1367   return callee_method->verified_code_entry();
1368 JRT_END
1369 
1370 
1371 
1372 
1373 
1374 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, TRAPS) {
1375   ResourceMark rm(thread);
1376   CallInfo call_info;
1377   Bytecodes::Code bc;
1378 
1379   // receiver is NULL for static calls. An exception is thrown for NULL
1380   // receivers for non-static calls
1381   Handle receiver = find_callee_info(thread, bc, call_info,
1382                                      CHECK_(methodHandle()));
1383   // Compiler1 can produce virtual call sites that can actually be statically bound
1384   // If we fell thru to below we would think that the site was going megamorphic
1385   // when in fact the site can never miss. Worse because we'd think it was megamorphic
1386   // we'd try and do a vtable dispatch however methods that can be statically bound
1387   // don't have vtable entries (vtable_index < 0) and we'd blow up. So we force a
1388   // reresolution of the  call site (as if we did a handle_wrong_method and not an
1389   // plain ic_miss) and the site will be converted to an optimized virtual call site
1390   // never to miss again. I don't believe C2 will produce code like this but if it
1391   // did this would still be the correct thing to do for it too, hence no ifdef.
1392   //
1393   if (call_info.resolved_method()->can_be_statically_bound()) {


1477         // We have a path that was monomorphic but was going interpreted
1478         // and now we have (or had) a compiled entry. We correct the IC
1479         // by using a new icBuffer.
1480         CompiledICInfo info;
1481         KlassHandle receiver_klass(THREAD, receiver()->klass());
1482         inline_cache->compute_monomorphic_entry(callee_method,
1483                                                 receiver_klass,
1484                                                 inline_cache->is_optimized(),
1485                                                 false,
1486                                                 info, CHECK_(methodHandle()));
1487         inline_cache->set_to_monomorphic(info);
1488       } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1489         // Potential change to megamorphic
1490         bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));
1491         if (!successful) {
1492           inline_cache->set_to_clean();
1493         }
1494       } else {
1495         // Either clean or megamorphic
1496       }


1497     }
1498   } // Release CompiledIC_lock
1499 
1500   return callee_method;
1501 }
1502 
1503 //
1504 // Resets a call-site in compiled code so it will get resolved again.
1505 // This routines handles both virtual call sites, optimized virtual call
1506 // sites, and static call sites. Typically used to change a call sites
1507 // destination from compiled to interpreted.
1508 //
1509 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, TRAPS) {
1510   ResourceMark rm(thread);
1511   RegisterMap reg_map(thread, false);
1512   frame stub_frame = thread->last_frame();
1513   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1514   frame caller = stub_frame.sender(&reg_map);
1515 
1516   // Do nothing if the frame isn't a live compiled frame.
1517   // nmethod could be deoptimized by the time we get here
1518   // so no update to the caller is needed.
1519 
1520   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
1521 
1522     address pc = caller.pc();
1523 




1524     // Default call_addr is the location of the "basic" call.
1525     // Determine the address of the call we a reresolving. With
1526     // Inline Caches we will always find a recognizable call.
1527     // With Inline Caches disabled we may or may not find a
1528     // recognizable call. We will always find a call for static
1529     // calls and for optimized virtual calls. For vanilla virtual
1530     // calls it depends on the state of the UseInlineCaches switch.
1531     //
1532     // With Inline Caches disabled we can get here for a virtual call
1533     // for two reasons:
1534     //   1 - calling an abstract method. The vtable for abstract methods
1535     //       will run us thru handle_wrong_method and we will eventually
1536     //       end up in the interpreter to throw the ame.
1537     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1538     //       call and between the time we fetch the entry address and
1539     //       we jump to it the target gets deoptimized. Similar to 1
1540     //       we will wind up in the interprter (thru a c2i with c2).
1541     //
1542     address call_addr = NULL;
1543     {
1544       // Get call instruction under lock because another thread may be
1545       // busy patching it.
1546       MutexLockerEx ml_patch(Patching_lock, Mutex::_no_safepoint_check_flag);
1547       // Location of call instruction
1548       if (NativeCall::is_call_before(pc)) {
1549         NativeCall *ncall = nativeCall_before(pc);
1550         call_addr = ncall->instruction_address();
1551       }
1552     }
1553 
1554     // Check for static or virtual call
1555     bool is_static_call = false;
1556     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1557     // Make sure nmethod doesn't get deoptimized and removed until
1558     // this is done with it.
1559     // CLEANUP - with lazy deopt shouldn't need this lock
1560     nmethodLocker nmlock(caller_nm);
1561 
1562     if (call_addr != NULL) {
1563       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1564       int ret = iter.next(); // Get item
1565       if (ret) {
1566         assert(iter.addr() == call_addr, "must find call");
1567         if (iter.type() == relocInfo::static_call_type) {
1568           is_static_call = true;
1569         } else {
1570           assert(iter.type() == relocInfo::virtual_call_type ||
1571                  iter.type() == relocInfo::opt_virtual_call_type
1572                 , "unexpected relocInfo. type");
1573         }
1574       } else {
1575         assert(!UseInlineCaches, "relocation info. must exist for this address");
1576       }


2551 
2552 
2553 /**
2554  * Create a native wrapper for this native method.  The wrapper converts the
2555  * Java-compiled calling convention to the native convention, handles
2556  * arguments, and transitions to native.  On return from the native we transition
2557  * back to java blocking if a safepoint is in progress.
2558  */
2559 void AdapterHandlerLibrary::create_native_wrapper(methodHandle method) {
2560   ResourceMark rm;
2561   nmethod* nm = NULL;
2562 
2563   assert(method->is_native(), "must be native");
2564   assert(method->is_method_handle_intrinsic() ||
2565          method->has_native_function(), "must have something valid to call!");
2566 
2567   {
2568     // Perform the work while holding the lock, but perform any printing outside the lock
2569     MutexLocker mu(AdapterHandlerLibrary_lock);
2570     // See if somebody beat us to it
2571     nm = method->code();
2572     if (nm != NULL) {
2573       return;
2574     }
2575 
2576     const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci);
2577     assert(compile_id > 0, "Must generate native wrapper");
2578 
2579 
2580     ResourceMark rm;
2581     BufferBlob*  buf = buffer_blob(); // the temporary code buffer in CodeCache
2582     if (buf != NULL) {
2583       CodeBuffer buffer(buf);
2584       double locs_buf[20];
2585       buffer.insts()->initialize_shared_locs((relocInfo*)locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
2586       MacroAssembler _masm(&buffer);
2587 
2588       // Fill in the signature array, for the calling-convention call.
2589       const int total_args_passed = method->size_of_parameters();
2590 
2591       BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2592       VMRegPair*   regs = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);


2794        kptr2 = fr.next_monitor_in_interpreter_frame(kptr2) ) {
2795     if (kptr2->obj() != NULL) {         // Avoid 'holes' in the monitor array
2796       BasicLock *lock = kptr2->lock();
2797       // Inflate so the displaced header becomes position-independent
2798       if (lock->displaced_header()->is_unlocked())
2799         ObjectSynchronizer::inflate_helper(kptr2->obj());
2800       // Now the displaced header is free to move
2801       buf[i++] = (intptr_t)lock->displaced_header();
2802       buf[i++] = cast_from_oop<intptr_t>(kptr2->obj());
2803     }
2804   }
2805   assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors");
2806 
2807   return buf;
2808 JRT_END
2809 
2810 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
2811   FREE_C_HEAP_ARRAY(intptr_t, buf);
2812 JRT_END
2813 
2814 bool AdapterHandlerLibrary::contains(CodeBlob* b) {
2815   AdapterHandlerTableIterator iter(_adapters);
2816   while (iter.has_next()) {
2817     AdapterHandlerEntry* a = iter.next();
2818     if (b == CodeCache::find_blob(a->get_i2c_entry())) return true;
2819   }
2820   return false;
2821 }
2822 
2823 void AdapterHandlerLibrary::print_handler_on(outputStream* st, CodeBlob* b) {
2824   AdapterHandlerTableIterator iter(_adapters);
2825   while (iter.has_next()) {
2826     AdapterHandlerEntry* a = iter.next();
2827     if (b == CodeCache::find_blob(a->get_i2c_entry())) {
2828       st->print("Adapter for signature: ");
2829       a->print_adapter_on(tty);
2830       return;
2831     }
2832   }
2833   assert(false, "Should have found handler");
2834 }
2835 
2836 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
2837   st->print_cr("AHE@" INTPTR_FORMAT ": %s i2c: " INTPTR_FORMAT " c2i: " INTPTR_FORMAT " c2iUV: " INTPTR_FORMAT,
2838                (intptr_t) this, fingerprint()->as_string(),
2839                get_i2c_entry(), get_c2i_entry(), get_c2i_unverified_entry());
2840 
2841 }
2842 
2843 #ifndef PRODUCT


   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/stringTable.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/compiledIC.hpp"
  31 #include "code/codeCacheExtensions.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "code/vtableStubs.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compilerOracle.hpp"
  37 #include "compiler/disassembler.hpp"
  38 #include "gc/shared/gcLocker.inline.hpp"
  39 #include "interpreter/interpreter.hpp"
  40 #include "interpreter/interpreterRuntime.hpp"
  41 #include "memory/universe.inline.hpp"
  42 #include "oops/oop.inline.hpp"
  43 #include "prims/forte.hpp"
  44 #include "prims/jvmtiExport.hpp"
  45 #include "prims/jvmtiRedefineClassesTrace.hpp"
  46 #include "prims/methodHandles.hpp"
  47 #include "prims/nativeLookup.hpp"
  48 #include "runtime/arguments.hpp"
  49 #include "runtime/atomic.inline.hpp"
  50 #include "runtime/biasedLocking.hpp"
  51 #include "runtime/compilationPolicy.hpp"
  52 #include "runtime/handles.inline.hpp"
  53 #include "runtime/init.hpp"
  54 #include "runtime/interfaceSupport.hpp"
  55 #include "runtime/javaCalls.hpp"
  56 #include "runtime/sharedRuntime.hpp"
  57 #include "runtime/stubRoutines.hpp"
  58 #include "runtime/vframe.hpp"
  59 #include "runtime/vframeArray.hpp"
  60 #include "utilities/copy.hpp"
  61 #include "utilities/dtrace.hpp"
  62 #include "utilities/events.hpp"
  63 #include "utilities/hashtable.inline.hpp"
  64 #include "utilities/macros.hpp"
  65 #include "utilities/xmlstream.hpp"
  66 #ifdef COMPILER1
  67 #include "c1/c1_Runtime1.hpp"
  68 #endif
  69 
  70 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  71 


  79 
  80 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  81 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
  82 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
  83 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
  84 
  85 #ifdef COMPILER2
  86 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
  87 #endif // COMPILER2
  88 
  89 
  90 //----------------------------generate_stubs-----------------------------------
  91 void SharedRuntime::generate_stubs() {
  92   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
  93   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
  94   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
  95   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
  96   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
  97   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
  98 
  99 #if defined(COMPILER2) || INCLUDE_JVMCI
 100   // Vectors are generated only by C2 and JVMCI.
 101   bool support_wide = is_wide_vector(MaxVectorSize);
 102   if (support_wide) {
 103     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 104   }
 105 #endif // COMPILER2
 106   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 107   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 108 
 109   generate_deopt_blob();
 110 
 111 #ifdef COMPILER2
 112   generate_uncommon_trap_blob();
 113 #endif // COMPILER2
 114 }
 115 
 116 #include <math.h>
 117 
 118 // Implementation of SharedRuntime
 119 
 120 #ifndef PRODUCT
 121 // For statistics
 122 int SharedRuntime::_ic_miss_ctr = 0;


 448 JRT_END
 449 
 450 
 451 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
 452   return (jdouble)x;
 453 JRT_END
 454 
 455 // Exception handling across interpreter/compiler boundaries
 456 //
 457 // exception_handler_for_return_address(...) returns the continuation address.
 458 // The continuation address is the entry point of the exception handler of the
 459 // previous frame depending on the return address.
 460 
 461 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
 462   assert(frame::verify_return_pc(return_address), err_msg("must be a return address: " INTPTR_FORMAT, return_address));
 463   assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
 464 
 465   // Reset method handle flag.
 466   thread->set_is_method_handle_return(false);
 467 
 468 #if INCLUDE_JVMCI
 469   // JVMCI's ExceptionHandlerStub expects the thread local exception PC to be clear
 470   // and other exception handler continuations do not read it
 471   thread->set_exception_pc(NULL);
 472 #endif
 473 
 474   // The fastest case first
 475   CodeBlob* blob = CodeCache::find_blob(return_address);
 476   nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
 477   if (nm != NULL) {
 478     // Set flag if return address is a method handle call site.
 479     thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
 480     // native nmethods don't have exception handlers
 481     assert(!nm->is_native_method(), "no exception handler");
 482     assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
 483     if (nm->is_deopt_pc(return_address)) {
 484       // If we come here because of a stack overflow, the stack may be
 485       // unguarded. Reguard the stack otherwise if we return to the
 486       // deopt blob and the stack bang causes a stack overflow we
 487       // crash.
 488       bool guard_pages_enabled = thread->stack_yellow_zone_enabled();
 489       if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack();
 490       assert(guard_pages_enabled, "stack banging in deopt blob may cause crash");
 491       return SharedRuntime::deopt_blob()->unpack_with_exception();
 492     } else {
 493       return nm->exception_begin();


 519 }
 520 
 521 
 522 JRT_LEAF(address, SharedRuntime::exception_handler_for_return_address(JavaThread* thread, address return_address))
 523   return raw_exception_handler_for_return_address(thread, return_address);
 524 JRT_END
 525 
 526 
 527 address SharedRuntime::get_poll_stub(address pc) {
 528   address stub;
 529   // Look up the code blob
 530   CodeBlob *cb = CodeCache::find_blob(pc);
 531 
 532   // Should be an nmethod
 533   assert(cb && cb->is_nmethod(), "safepoint polling: pc must refer to an nmethod");
 534 
 535   // Look up the relocation information
 536   assert(((nmethod*)cb)->is_at_poll_or_poll_return(pc),
 537     "safepoint polling: type must be poll");
 538 
 539   if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
 540     tty->print_cr("bad pc: %p", pc);
 541     Disassembler::decode(cb);
 542     assert(false, "Only polling locations are used for safepoint");
 543   }
 544 
 545   bool at_poll_return = ((nmethod*)cb)->is_at_poll_return(pc);
 546   bool has_wide_vectors = ((nmethod*)cb)->has_wide_vectors();
 547   if (at_poll_return) {
 548     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
 549            "polling page return stub not created yet");
 550     stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();
 551   } else if (has_wide_vectors) {
 552     assert(SharedRuntime::polling_page_vectors_safepoint_handler_blob() != NULL,
 553            "polling page vectors safepoint stub not created yet");
 554     stub = SharedRuntime::polling_page_vectors_safepoint_handler_blob()->entry_point();
 555   } else {
 556     assert(SharedRuntime::polling_page_safepoint_handler_blob() != NULL,
 557            "polling page safepoint stub not created yet");
 558     stub = SharedRuntime::polling_page_safepoint_handler_blob()->entry_point();
 559   }
 560 #ifndef PRODUCT
 561   if (TraceSafepoint) {
 562     char buf[256];
 563     jio_snprintf(buf, sizeof(buf),


 613 
 614     // RC_TRACE macro has an embedded ResourceMark
 615     RC_TRACE_WITH_THREAD(0x00001000, thread,
 616                          ("calling obsolete method '%s'",
 617                           method->name_and_sig_as_C_string()));
 618     if (RC_TRACE_ENABLED(0x00002000)) {
 619       // this option is provided to debug calls to obsolete methods
 620       guarantee(false, "faulting at call to an obsolete method.");
 621     }
 622   }
 623   return 0;
 624 JRT_END
 625 
 626 // ret_pc points into caller; we are returning caller's exception handler
 627 // for given exception
 628 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 629                                                     bool force_unwind, bool top_frame_only) {
 630   assert(nm != NULL, "must exist");
 631   ResourceMark rm;
 632 
 633 #if INCLUDE_JVMCI
 634   if (nm->is_compiled_by_jvmci()) {
 635     // lookup exception handler for this pc
 636     int catch_pco = ret_pc - nm->code_begin();
 637     ExceptionHandlerTable table(nm);
 638     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 639     if (t != NULL) {
 640       return nm->code_begin() + t->pco();
 641     } else {
 642       // there is no exception handler for this pc => deoptimize
 643       nm->make_not_entrant();
 644 
 645       // Use Deoptimization::deoptimize for all of its side-effects:
 646       // revoking biases of monitors, gathering traps statistics, logging...
 647       // it also patches the return pc but we do not care about that
 648       // since we return a continuation to the deopt_blob below.
 649       JavaThread* thread = JavaThread::current();
 650       RegisterMap reg_map(thread, UseBiasedLocking);
 651       frame runtime_frame = thread->last_frame();
 652       frame caller_frame = runtime_frame.sender(&reg_map);
 653       Deoptimization::deoptimize(thread, caller_frame, &reg_map, Deoptimization::Reason_not_compiled_exception_handler);
 654 
 655       return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
 656     }
 657   }
 658 #endif
 659 
 660   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
 661   // determine handler bci, if any
 662   EXCEPTION_MARK;
 663 
 664   int handler_bci = -1;
 665   int scope_depth = 0;
 666   if (!force_unwind) {
 667     int bci = sd->bci();
 668     bool recursive_exception = false;
 669     do {
 670       bool skip_scope_increment = false;
 671       // exception handler lookup
 672       KlassHandle ek (THREAD, exception->klass());
 673       methodHandle mh(THREAD, sd->method());
 674       handler_bci = Method::fast_exception_handler_bci_for(mh, ek, bci, THREAD);
 675       if (HAS_PENDING_EXCEPTION) {
 676         recursive_exception = true;
 677         // We threw an exception while trying to find the exception handler.
 678         // Transfer the new exception to the exception handle which will
 679         // be set into thread local storage, and do another lookup for an


 760 JRT_ENTRY(void, SharedRuntime::throw_NullPointerException_at_call(JavaThread* thread))
 761   // This entry point is effectively only used for NullPointerExceptions which occur at inline
 762   // cache sites (when the callee activation is not yet set up) so we are at a call site
 763   throw_and_post_jvmti_exception(thread, vmSymbols::java_lang_NullPointerException());
 764 JRT_END
 765 
 766 JRT_ENTRY(void, SharedRuntime::throw_StackOverflowError(JavaThread* thread))
 767   // We avoid using the normal exception construction in this case because
 768   // it performs an upcall to Java, and we're already out of stack space.
 769   Klass* k = SystemDictionary::StackOverflowError_klass();
 770   oop exception_oop = InstanceKlass::cast(k)->allocate_instance(CHECK);
 771   Handle exception (thread, exception_oop);
 772   if (StackTraceInThrowable) {
 773     java_lang_Throwable::fill_in_stack_trace(exception);
 774   }
 775   // Increment counter for hs_err file reporting
 776   Atomic::inc(&Exceptions::_stack_overflow_errors);
 777   throw_and_post_jvmti_exception(thread, exception);
 778 JRT_END
 779 
 780 #if INCLUDE_JVMCI
 781 address SharedRuntime::deoptimize_for_implicit_exception(JavaThread* thread, address pc, nmethod* nm, int deopt_reason) {
 782   assert(deopt_reason > Deoptimization::Reason_none && deopt_reason < Deoptimization::Reason_LIMIT, "invalid deopt reason");
 783   thread->set_jvmci_implicit_exception_pc(pc);
 784   thread->set_pending_deoptimization(Deoptimization::make_trap_request((Deoptimization::DeoptReason)deopt_reason, Deoptimization::Action_reinterpret));
 785   return (SharedRuntime::deopt_blob()->implicit_exception_uncommon_trap());
 786 }
 787 #endif
 788 
 789 address SharedRuntime::continuation_for_implicit_exception(JavaThread* thread,
 790                                                            address pc,
 791                                                            SharedRuntime::ImplicitExceptionKind exception_kind)
 792 {
 793   address target_pc = NULL;
 794 
 795   if (Interpreter::contains(pc)) {
 796 #ifdef CC_INTERP
 797     // C++ interpreter doesn't throw implicit exceptions
 798     ShouldNotReachHere();
 799 #else
 800     switch (exception_kind) {
 801       case IMPLICIT_NULL:           return Interpreter::throw_NullPointerException_entry();
 802       case IMPLICIT_DIVIDE_BY_ZERO: return Interpreter::throw_ArithmeticException_entry();
 803       case STACK_OVERFLOW:          return Interpreter::throw_StackOverflowError_entry();
 804       default:                      ShouldNotReachHere();
 805     }
 806 #endif // !CC_INTERP
 807   } else {
 808     switch (exception_kind) {


 838             Events::log_exception(thread, "AbstractMethodError at " INTPTR_FORMAT, pc);
 839             return StubRoutines::throw_AbstractMethodError_entry();
 840           } else {
 841             Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
 842             return StubRoutines::throw_NullPointerException_at_call_entry();
 843           }
 844         } else {
 845           CodeBlob* cb = CodeCache::find_blob(pc);
 846 
 847           // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
 848           if (cb == NULL) return NULL;
 849 
 850           // Exception happened in CodeCache. Must be either:
 851           // 1. Inline-cache check in C2I handler blob,
 852           // 2. Inline-cache check in nmethod, or
 853           // 3. Implicit null exception in nmethod
 854 
 855           if (!cb->is_nmethod()) {
 856             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
 857             if (!is_in_blob) {
 858               // Allow normal crash reporting to handle this
 859               return NULL;
 860             }
 861             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
 862             // There is no handler here, so we will simply unwind.
 863             return StubRoutines::throw_NullPointerException_at_call_entry();
 864           }
 865 
 866           // Otherwise, it's an nmethod.  Consult its exception handlers.
 867           nmethod* nm = (nmethod*)cb;
 868           if (nm->inlinecache_check_contains(pc)) {
 869             // exception happened inside inline-cache check code
 870             // => the nmethod is not yet active (i.e., the frame
 871             // is not set up yet) => use return address pushed by
 872             // caller => don't push another return address
 873             Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
 874             return StubRoutines::throw_NullPointerException_at_call_entry();
 875           }
 876 
 877           if (nm->method()->is_method_handle_intrinsic()) {
 878             // exception happened inside MH dispatch code, similar to a vtable stub
 879             Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, pc);
 880             return StubRoutines::throw_NullPointerException_at_call_entry();
 881           }
 882 
 883 #ifndef PRODUCT
 884           _implicit_null_throws++;
 885 #endif
 886 #if INCLUDE_JVMCI
 887           if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
 888             // If there's no PcDesc then we'll die way down inside of
 889             // deopt instead of just getting normal error reporting,
 890             // so only go there if it will succeed.
 891             return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_null_check);
 892           } else {
 893 #endif
 894           assert (nm->is_nmethod(), "Expect nmethod");
 895           target_pc = nm->continuation_for_implicit_exception(pc);
 896 #if INCLUDE_JVMCI
 897           }
 898 #endif
 899           // If there's an unexpected fault, target_pc might be NULL,
 900           // in which case we want to fall through into the normal
 901           // error handling code.
 902         }
 903 
 904         break; // fall through
 905       }
 906 
 907 
 908       case IMPLICIT_DIVIDE_BY_ZERO: {
 909         nmethod* nm = CodeCache::find_nmethod(pc);
 910         guarantee(nm != NULL, "must have containing compiled method for implicit division-by-zero exceptions");
 911 #ifndef PRODUCT
 912         _implicit_div0_throws++;
 913 #endif
 914 #if INCLUDE_JVMCI
 915         if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
 916           return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_div0_check);
 917         } else {
 918 #endif
 919         target_pc = nm->continuation_for_implicit_exception(pc);
 920 #if INCLUDE_JVMCI
 921         }
 922 #endif
 923         // If there's an unexpected fault, target_pc might be NULL,
 924         // in which case we want to fall through into the normal
 925         // error handling code.
 926         break; // fall through
 927       }
 928 
 929       default: ShouldNotReachHere();
 930     }
 931 
 932     assert(exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO, "wrong implicit exception kind");
 933 


 934     if (exception_kind == IMPLICIT_NULL) {
 935 #ifndef PRODUCT
 936       // for AbortVMOnException flag
 937       Exceptions::debug_check_abort("java.lang.NullPointerException");
 938 #endif //PRODUCT
 939       Events::log_exception(thread, "Implicit null exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 940     } else {
 941 #ifndef PRODUCT
 942       // for AbortVMOnException flag
 943       Exceptions::debug_check_abort("java.lang.ArithmeticException");
 944 #endif //PRODUCT
 945       Events::log_exception(thread, "Implicit division by zero exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
 946     }
 947     return target_pc;
 948   }
 949 
 950   ShouldNotReachHere();
 951   return NULL;
 952 }
 953 
 954 
 955 /**
 956  * Throws an java/lang/UnsatisfiedLinkError.  The address of this method is
 957  * installed in the native function entry of all native Java methods before
 958  * they get linked to their actual native methods.
 959  *
 960  * \note
 961  * This method actually never gets called!  The reason is because
 962  * the interpreter's native entries call NativeLookup::lookup() which
 963  * throws the exception when the lookup fails.  The exception is then
 964  * caught and forwarded on the return from NativeLookup::lookup() call


 974 
 975 address SharedRuntime::native_method_throw_unsatisfied_link_error_entry() {
 976   return CAST_FROM_FN_PTR(address, &throw_unsatisfied_link_error);
 977 }
 978 
 979 
 980 #ifndef PRODUCT
 981 JRT_ENTRY(intptr_t, SharedRuntime::trace_bytecode(JavaThread* thread, intptr_t preserve_this_value, intptr_t tos, intptr_t tos2))
 982   const frame f = thread->last_frame();
 983   assert(f.is_interpreted_frame(), "must be an interpreted frame");
 984 #ifndef PRODUCT
 985   methodHandle mh(THREAD, f.interpreter_frame_method());
 986   BytecodeTracer::trace(mh, f.interpreter_frame_bcp(), tos, tos2);
 987 #endif // !PRODUCT
 988   return preserve_this_value;
 989 JRT_END
 990 #endif // !PRODUCT
 991 
 992 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* thread, oopDesc* obj))
 993   assert(obj->is_oop(), "must be a valid oop");
 994 #if INCLUDE_JVMCI
 995   // This removes the requirement for JVMCI compilers to emit code
 996   // performing a dynamic check that obj has a finalizer before
 997   // calling this routine. There should be no performance impact
 998   // for C1 since it emits a dynamic check. C2 and the interpreter
 999   // uses other runtime routines for registering finalizers.
1000   if (!obj->klass()->has_finalizer()) {
1001     return;
1002   }
1003 #endif
1004   assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise");
1005   InstanceKlass::register_finalizer(instanceOop(obj), CHECK);
1006 JRT_END
1007 
1008 
1009 jlong SharedRuntime::get_java_tid(Thread* thread) {
1010   if (thread != NULL) {
1011     if (thread->is_Java_thread()) {
1012       oop obj = ((JavaThread*)thread)->threadObj();
1013       return (obj == NULL) ? 0 : java_lang_Thread::thread_id(obj);
1014     }
1015   }
1016   return 0;
1017 }
1018 
1019 /**
1020  * This function ought to be a void function, but cannot be because
1021  * it gets turned into a tail-call on sparc, which runs into dtrace bug
1022  * 6254741.  Once that is fixed we can remove the dummy return value.
1023  */


1225 
1226   CodeBlob* caller_cb = caller_frame.cb();
1227   guarantee(caller_cb != NULL && caller_cb->is_nmethod(), "must be called from nmethod");
1228   nmethod* caller_nm = caller_cb->as_nmethod_or_null();
1229 
1230   // make sure caller is not getting deoptimized
1231   // and removed before we are done with it.
1232   // CLEANUP - with lazy deopt shouldn't need this lock
1233   nmethodLocker caller_lock(caller_nm);
1234 
1235   // determine call info & receiver
1236   // note: a) receiver is NULL for static calls
1237   //       b) an exception is thrown if receiver is NULL for non-static calls
1238   CallInfo call_info;
1239   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1240   Handle receiver = find_callee_info(thread, invoke_code,
1241                                      call_info, CHECK_(methodHandle()));
1242   methodHandle callee_method = call_info.selected_method();
1243 
1244   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1245          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1246          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1247          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1248          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1249 
1250   assert(caller_nm->is_alive(), "It should be alive");
1251 
1252 #ifndef PRODUCT
1253   // tracing/debugging/statistics
1254   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1255                 (is_virtual) ? (&_resolve_virtual_ctr) :
1256                                (&_resolve_static_ctr);
1257   Atomic::inc(addr);
1258 
1259   if (TraceCallFixup) {
1260     ResourceMark rm(thread);
1261     tty->print("resolving %s%s (%s) call to",
1262       (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1263       Bytecodes::name(invoke_code));
1264     callee_method->print_short_name(tty);
1265     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT, caller_frame.pc(), callee_method->code());


1436   // return compiled code entry point after potential safepoints
1437   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1438   return callee_method->verified_code_entry();
1439 JRT_END
1440 
1441 
1442 // Resolve a virtual call that can be statically bound (e.g., always
1443 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1444 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread *thread))
1445   methodHandle callee_method;
1446   JRT_BLOCK
1447     callee_method = SharedRuntime::resolve_helper(thread, true, true, CHECK_NULL);
1448     thread->set_vm_result_2(callee_method());
1449   JRT_BLOCK_END
1450   // return compiled code entry point after potential safepoints
1451   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
1452   return callee_method->verified_code_entry();
1453 JRT_END
1454 
1455 



1456 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, TRAPS) {
1457   ResourceMark rm(thread);
1458   CallInfo call_info;
1459   Bytecodes::Code bc;
1460 
1461   // receiver is NULL for static calls. An exception is thrown for NULL
1462   // receivers for non-static calls
1463   Handle receiver = find_callee_info(thread, bc, call_info,
1464                                      CHECK_(methodHandle()));
1465   // Compiler1 can produce virtual call sites that can actually be statically bound
1466   // If we fell thru to below we would think that the site was going megamorphic
1467   // when in fact the site can never miss. Worse because we'd think it was megamorphic
1468   // we'd try and do a vtable dispatch however methods that can be statically bound
1469   // don't have vtable entries (vtable_index < 0) and we'd blow up. So we force a
1470   // reresolution of the  call site (as if we did a handle_wrong_method and not an
1471   // plain ic_miss) and the site will be converted to an optimized virtual call site
1472   // never to miss again. I don't believe C2 will produce code like this but if it
1473   // did this would still be the correct thing to do for it too, hence no ifdef.
1474   //
1475   if (call_info.resolved_method()->can_be_statically_bound()) {


1559         // We have a path that was monomorphic but was going interpreted
1560         // and now we have (or had) a compiled entry. We correct the IC
1561         // by using a new icBuffer.
1562         CompiledICInfo info;
1563         KlassHandle receiver_klass(THREAD, receiver()->klass());
1564         inline_cache->compute_monomorphic_entry(callee_method,
1565                                                 receiver_klass,
1566                                                 inline_cache->is_optimized(),
1567                                                 false,
1568                                                 info, CHECK_(methodHandle()));
1569         inline_cache->set_to_monomorphic(info);
1570       } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1571         // Potential change to megamorphic
1572         bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));
1573         if (!successful) {
1574           inline_cache->set_to_clean();
1575         }
1576       } else {
1577         // Either clean or megamorphic
1578       }
1579     } else {
1580       fatal("Unimplemented");
1581     }
1582   } // Release CompiledIC_lock
1583 
1584   return callee_method;
1585 }
1586 
1587 //
1588 // Resets a call-site in compiled code so it will get resolved again.
1589 // This routines handles both virtual call sites, optimized virtual call
1590 // sites, and static call sites. Typically used to change a call sites
1591 // destination from compiled to interpreted.
1592 //
1593 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, TRAPS) {
1594   ResourceMark rm(thread);
1595   RegisterMap reg_map(thread, false);
1596   frame stub_frame = thread->last_frame();
1597   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1598   frame caller = stub_frame.sender(&reg_map);
1599 
1600   // Do nothing if the frame isn't a live compiled frame.
1601   // nmethod could be deoptimized by the time we get here
1602   // so no update to the caller is needed.
1603 
1604   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
1605 
1606     address pc = caller.pc();
1607 
1608     // Check for static or virtual call
1609     bool is_static_call = false;
1610     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1611 
1612     // Default call_addr is the location of the "basic" call.
1613     // Determine the address of the call we a reresolving. With
1614     // Inline Caches we will always find a recognizable call.
1615     // With Inline Caches disabled we may or may not find a
1616     // recognizable call. We will always find a call for static
1617     // calls and for optimized virtual calls. For vanilla virtual
1618     // calls it depends on the state of the UseInlineCaches switch.
1619     //
1620     // With Inline Caches disabled we can get here for a virtual call
1621     // for two reasons:
1622     //   1 - calling an abstract method. The vtable for abstract methods
1623     //       will run us thru handle_wrong_method and we will eventually
1624     //       end up in the interpreter to throw the ame.
1625     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1626     //       call and between the time we fetch the entry address and
1627     //       we jump to it the target gets deoptimized. Similar to 1
1628     //       we will wind up in the interprter (thru a c2i with c2).
1629     //
1630     address call_addr = NULL;
1631     {
1632       // Get call instruction under lock because another thread may be
1633       // busy patching it.
1634       MutexLockerEx ml_patch(Patching_lock, Mutex::_no_safepoint_check_flag);
1635       // Location of call instruction
1636       if (NativeCall::is_call_before(pc)) {
1637         NativeCall *ncall = nativeCall_before(pc);
1638         call_addr = ncall->instruction_address();
1639       }
1640     }




1641     // Make sure nmethod doesn't get deoptimized and removed until
1642     // this is done with it.
1643     // CLEANUP - with lazy deopt shouldn't need this lock
1644     nmethodLocker nmlock(caller_nm);
1645 
1646     if (call_addr != NULL) {
1647       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1648       int ret = iter.next(); // Get item
1649       if (ret) {
1650         assert(iter.addr() == call_addr, "must find call");
1651         if (iter.type() == relocInfo::static_call_type) {
1652           is_static_call = true;
1653         } else {
1654           assert(iter.type() == relocInfo::virtual_call_type ||
1655                  iter.type() == relocInfo::opt_virtual_call_type
1656                 , "unexpected relocInfo. type");
1657         }
1658       } else {
1659         assert(!UseInlineCaches, "relocation info. must exist for this address");
1660       }


2635 
2636 
2637 /**
2638  * Create a native wrapper for this native method.  The wrapper converts the
2639  * Java-compiled calling convention to the native convention, handles
2640  * arguments, and transitions to native.  On return from the native we transition
2641  * back to java blocking if a safepoint is in progress.
2642  */
2643 void AdapterHandlerLibrary::create_native_wrapper(methodHandle method) {
2644   ResourceMark rm;
2645   nmethod* nm = NULL;
2646 
2647   assert(method->is_native(), "must be native");
2648   assert(method->is_method_handle_intrinsic() ||
2649          method->has_native_function(), "must have something valid to call!");
2650 
2651   {
2652     // Perform the work while holding the lock, but perform any printing outside the lock
2653     MutexLocker mu(AdapterHandlerLibrary_lock);
2654     // See if somebody beat us to it
2655     if (method->code() != NULL) {

2656       return;
2657     }
2658 
2659     const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci);
2660     assert(compile_id > 0, "Must generate native wrapper");
2661 
2662 
2663     ResourceMark rm;
2664     BufferBlob*  buf = buffer_blob(); // the temporary code buffer in CodeCache
2665     if (buf != NULL) {
2666       CodeBuffer buffer(buf);
2667       double locs_buf[20];
2668       buffer.insts()->initialize_shared_locs((relocInfo*)locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
2669       MacroAssembler _masm(&buffer);
2670 
2671       // Fill in the signature array, for the calling-convention call.
2672       const int total_args_passed = method->size_of_parameters();
2673 
2674       BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2675       VMRegPair*   regs = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);


2877        kptr2 = fr.next_monitor_in_interpreter_frame(kptr2) ) {
2878     if (kptr2->obj() != NULL) {         // Avoid 'holes' in the monitor array
2879       BasicLock *lock = kptr2->lock();
2880       // Inflate so the displaced header becomes position-independent
2881       if (lock->displaced_header()->is_unlocked())
2882         ObjectSynchronizer::inflate_helper(kptr2->obj());
2883       // Now the displaced header is free to move
2884       buf[i++] = (intptr_t)lock->displaced_header();
2885       buf[i++] = cast_from_oop<intptr_t>(kptr2->obj());
2886     }
2887   }
2888   assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors");
2889 
2890   return buf;
2891 JRT_END
2892 
2893 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
2894   FREE_C_HEAP_ARRAY(intptr_t, buf);
2895 JRT_END
2896 
2897 bool AdapterHandlerLibrary::contains(const CodeBlob* b) {
2898   AdapterHandlerTableIterator iter(_adapters);
2899   while (iter.has_next()) {
2900     AdapterHandlerEntry* a = iter.next();
2901     if (b == CodeCache::find_blob(a->get_i2c_entry())) return true;
2902   }
2903   return false;
2904 }
2905 
2906 void AdapterHandlerLibrary::print_handler_on(outputStream* st, const CodeBlob* b) {
2907   AdapterHandlerTableIterator iter(_adapters);
2908   while (iter.has_next()) {
2909     AdapterHandlerEntry* a = iter.next();
2910     if (b == CodeCache::find_blob(a->get_i2c_entry())) {
2911       st->print("Adapter for signature: ");
2912       a->print_adapter_on(tty);
2913       return;
2914     }
2915   }
2916   assert(false, "Should have found handler");
2917 }
2918 
2919 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
2920   st->print_cr("AHE@" INTPTR_FORMAT ": %s i2c: " INTPTR_FORMAT " c2i: " INTPTR_FORMAT " c2iUV: " INTPTR_FORMAT,
2921                (intptr_t) this, fingerprint()->as_string(),
2922                get_i2c_entry(), get_c2i_entry(), get_c2i_unverified_entry());
2923 
2924 }
2925 
2926 #ifndef PRODUCT
src/share/vm/runtime/sharedRuntime.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File