438 return (jfloat)x;
439 JRT_END
440
441
442 JRT_LEAF(jfloat, SharedRuntime::l2f(jlong x))
443 return (jfloat)x;
444 JRT_END
445
446
447 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
448 return (jdouble)x;
449 JRT_END
450
451 // Exception handling across interpreter/compiler boundaries
452 //
453 // exception_handler_for_return_address(...) returns the continuation address.
454 // The continuation address is the entry point of the exception handler of the
455 // previous frame depending on the return address.
456
457 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
458 assert(frame::verify_return_pc(return_address), err_msg("must be a return address: " INTPTR_FORMAT, return_address));
459 assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
460
461 // Reset method handle flag.
462 thread->set_is_method_handle_return(false);
463
464 // The fastest case first
465 CodeBlob* blob = CodeCache::find_blob(return_address);
466 nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
467 if (nm != NULL) {
468 // Set flag if return address is a method handle call site.
469 thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
470 // native nmethods don't have exception handlers
471 assert(!nm->is_native_method(), "no exception handler");
472 assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
473 if (nm->is_deopt_pc(return_address)) {
474 // If we come here because of a stack overflow, the stack may be
475 // unguarded. Reguard the stack otherwise if we return to the
476 // deopt blob and the stack bang causes a stack overflow we
477 // crash.
478 bool guard_pages_enabled = thread->stack_yellow_zone_enabled();
790 return StubRoutines::throw_AbstractMethodError_entry();
791 } else {
792 Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
793 return StubRoutines::throw_NullPointerException_at_call_entry();
794 }
795 } else {
796 CodeBlob* cb = CodeCache::find_blob(pc);
797
798 // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
799 if (cb == NULL) return NULL;
800
801 // Exception happened in CodeCache. Must be either:
802 // 1. Inline-cache check in C2I handler blob,
803 // 2. Inline-cache check in nmethod, or
804 // 3. Implicit null exception in nmethod
805
806 if (!cb->is_nmethod()) {
807 bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
808 if (!is_in_blob) {
809 cb->print();
810 fatal(err_msg("exception happened outside interpreter, nmethods and vtable stubs at pc " INTPTR_FORMAT, pc));
811 }
812 Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
813 // There is no handler here, so we will simply unwind.
814 return StubRoutines::throw_NullPointerException_at_call_entry();
815 }
816
817 // Otherwise, it's an nmethod. Consult its exception handlers.
818 nmethod* nm = (nmethod*)cb;
819 if (nm->inlinecache_check_contains(pc)) {
820 // exception happened inside inline-cache check code
821 // => the nmethod is not yet active (i.e., the frame
822 // is not set up yet) => use return address pushed by
823 // caller => don't push another return address
824 Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
825 return StubRoutines::throw_NullPointerException_at_call_entry();
826 }
827
828 if (nm->method()->is_method_handle_intrinsic()) {
829 // exception happened inside MH dispatch code, similar to a vtable stub
830 Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, pc);
1613
1614 #ifdef ASSERT
1615 void SharedRuntime::check_member_name_argument_is_last_argument(methodHandle method,
1616 const BasicType* sig_bt,
1617 const VMRegPair* regs) {
1618 ResourceMark rm;
1619 const int total_args_passed = method->size_of_parameters();
1620 const VMRegPair* regs_with_member_name = regs;
1621 VMRegPair* regs_without_member_name = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed - 1);
1622
1623 const int member_arg_pos = total_args_passed - 1;
1624 assert(member_arg_pos >= 0 && member_arg_pos < total_args_passed, "oob");
1625 assert(sig_bt[member_arg_pos] == T_OBJECT, "dispatch argument must be an object");
1626
1627 const bool is_outgoing = method->is_method_handle_intrinsic();
1628 int comp_args_on_stack = java_calling_convention(sig_bt, regs_without_member_name, total_args_passed - 1, is_outgoing);
1629
1630 for (int i = 0; i < member_arg_pos; i++) {
1631 VMReg a = regs_with_member_name[i].first();
1632 VMReg b = regs_without_member_name[i].first();
1633 assert(a->value() == b->value(), err_msg_res("register allocation mismatch: a=%d, b=%d", a->value(), b->value()));
1634 }
1635 assert(regs_with_member_name[member_arg_pos].first()->is_valid(), "bad member arg");
1636 }
1637 #endif
1638
1639 // ---------------------------------------------------------------------------
1640 // We are calling the interpreter via a c2i. Normally this would mean that
1641 // we were called by a compiled method. However we could have lost a race
1642 // where we went int -> i2c -> c2i and so the caller could in fact be
1643 // interpreted. If the caller is compiled we attempt to patch the caller
1644 // so he no longer calls into the interpreter.
1645 IRT_LEAF(void, SharedRuntime::fixup_callers_callsite(Method* method, address caller_pc))
1646 Method* moop(method);
1647
1648 address entry_point = moop->from_compiled_entry();
1649
1650 // It's possible that deoptimization can occur at a call site which hasn't
1651 // been resolved yet, in which case this function will be called from
1652 // an nmethod that has been patched for deopt and we can ignore the
1653 // request for a fixup.
|
438 return (jfloat)x;
439 JRT_END
440
441
442 JRT_LEAF(jfloat, SharedRuntime::l2f(jlong x))
443 return (jfloat)x;
444 JRT_END
445
446
447 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
448 return (jdouble)x;
449 JRT_END
450
451 // Exception handling across interpreter/compiler boundaries
452 //
453 // exception_handler_for_return_address(...) returns the continuation address.
454 // The continuation address is the entry point of the exception handler of the
455 // previous frame depending on the return address.
456
457 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
458 assert(frame::verify_return_pc(return_address), "must be a return address: " INTPTR_FORMAT, return_address);
459 assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
460
461 // Reset method handle flag.
462 thread->set_is_method_handle_return(false);
463
464 // The fastest case first
465 CodeBlob* blob = CodeCache::find_blob(return_address);
466 nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
467 if (nm != NULL) {
468 // Set flag if return address is a method handle call site.
469 thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
470 // native nmethods don't have exception handlers
471 assert(!nm->is_native_method(), "no exception handler");
472 assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
473 if (nm->is_deopt_pc(return_address)) {
474 // If we come here because of a stack overflow, the stack may be
475 // unguarded. Reguard the stack otherwise if we return to the
476 // deopt blob and the stack bang causes a stack overflow we
477 // crash.
478 bool guard_pages_enabled = thread->stack_yellow_zone_enabled();
790 return StubRoutines::throw_AbstractMethodError_entry();
791 } else {
792 Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
793 return StubRoutines::throw_NullPointerException_at_call_entry();
794 }
795 } else {
796 CodeBlob* cb = CodeCache::find_blob(pc);
797
798 // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
799 if (cb == NULL) return NULL;
800
801 // Exception happened in CodeCache. Must be either:
802 // 1. Inline-cache check in C2I handler blob,
803 // 2. Inline-cache check in nmethod, or
804 // 3. Implicit null exception in nmethod
805
806 if (!cb->is_nmethod()) {
807 bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
808 if (!is_in_blob) {
809 cb->print();
810 fatal("exception happened outside interpreter, nmethods and vtable stubs at pc " INTPTR_FORMAT, pc);
811 }
812 Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
813 // There is no handler here, so we will simply unwind.
814 return StubRoutines::throw_NullPointerException_at_call_entry();
815 }
816
817 // Otherwise, it's an nmethod. Consult its exception handlers.
818 nmethod* nm = (nmethod*)cb;
819 if (nm->inlinecache_check_contains(pc)) {
820 // exception happened inside inline-cache check code
821 // => the nmethod is not yet active (i.e., the frame
822 // is not set up yet) => use return address pushed by
823 // caller => don't push another return address
824 Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
825 return StubRoutines::throw_NullPointerException_at_call_entry();
826 }
827
828 if (nm->method()->is_method_handle_intrinsic()) {
829 // exception happened inside MH dispatch code, similar to a vtable stub
830 Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, pc);
1613
1614 #ifdef ASSERT
1615 void SharedRuntime::check_member_name_argument_is_last_argument(methodHandle method,
1616 const BasicType* sig_bt,
1617 const VMRegPair* regs) {
1618 ResourceMark rm;
1619 const int total_args_passed = method->size_of_parameters();
1620 const VMRegPair* regs_with_member_name = regs;
1621 VMRegPair* regs_without_member_name = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed - 1);
1622
1623 const int member_arg_pos = total_args_passed - 1;
1624 assert(member_arg_pos >= 0 && member_arg_pos < total_args_passed, "oob");
1625 assert(sig_bt[member_arg_pos] == T_OBJECT, "dispatch argument must be an object");
1626
1627 const bool is_outgoing = method->is_method_handle_intrinsic();
1628 int comp_args_on_stack = java_calling_convention(sig_bt, regs_without_member_name, total_args_passed - 1, is_outgoing);
1629
1630 for (int i = 0; i < member_arg_pos; i++) {
1631 VMReg a = regs_with_member_name[i].first();
1632 VMReg b = regs_without_member_name[i].first();
1633 assert(a->value() == b->value(), "register allocation mismatch: a=%d, b=%d", a->value(), b->value());
1634 }
1635 assert(regs_with_member_name[member_arg_pos].first()->is_valid(), "bad member arg");
1636 }
1637 #endif
1638
1639 // ---------------------------------------------------------------------------
1640 // We are calling the interpreter via a c2i. Normally this would mean that
1641 // we were called by a compiled method. However we could have lost a race
1642 // where we went int -> i2c -> c2i and so the caller could in fact be
1643 // interpreted. If the caller is compiled we attempt to patch the caller
1644 // so he no longer calls into the interpreter.
1645 IRT_LEAF(void, SharedRuntime::fixup_callers_callsite(Method* method, address caller_pc))
1646 Method* moop(method);
1647
1648 address entry_point = moop->from_compiled_entry();
1649
1650 // It's possible that deoptimization can occur at a call site which hasn't
1651 // been resolved yet, in which case this function will be called from
1652 // an nmethod that has been patched for deopt and we can ignore the
1653 // request for a fixup.
|