455 if (x <= (jdouble) min_jlong)
456 return min_jlong;
457 return (jlong) x;
458 JRT_END
459
460
461 JRT_LEAF(jfloat, SharedRuntime::d2f(jdouble x))
462 return (jfloat)x;
463 JRT_END
464
465
466 JRT_LEAF(jfloat, SharedRuntime::l2f(jlong x))
467 return (jfloat)x;
468 JRT_END
469
470
471 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
472 return (jdouble)x;
473 JRT_END
474
475 // Exception handling accross interpreter/compiler boundaries
476 //
477 // exception_handler_for_return_address(...) returns the continuation address.
478 // The continuation address is the entry point of the exception handler of the
479 // previous frame depending on the return address.
480
481 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
482 assert(frame::verify_return_pc(return_address), err_msg("must be a return address: " INTPTR_FORMAT, return_address));
483
484 // Reset method handle flag.
485 thread->set_is_method_handle_return(false);
486
487 // The fastest case first
488 CodeBlob* blob = CodeCache::find_blob(return_address);
489 nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
490 if (nm != NULL) {
491 // Set flag if return address is a method handle call site.
492 thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
493 // native nmethods don't have exception handlers
494 assert(!nm->is_native_method(), "no exception handler");
495 assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
670 }
671 if (!top_frame_only && handler_bci < 0 && !skip_scope_increment) {
672 sd = sd->sender();
673 if (sd != NULL) {
674 bci = sd->bci();
675 }
676 ++scope_depth;
677 }
678 } while (recursive_exception || (!top_frame_only && handler_bci < 0 && sd != NULL));
679 }
680
681 // found handling method => lookup exception handler
682 int catch_pco = ret_pc - nm->code_begin();
683
684 ExceptionHandlerTable table(nm);
685 HandlerTableEntry *t = table.entry_for(catch_pco, handler_bci, scope_depth);
686 if (t == NULL && (nm->is_compiled_by_c1() || handler_bci != -1)) {
687 // Allow abbreviated catch tables. The idea is to allow a method
688 // to materialize its exceptions without committing to the exact
689 // routing of exceptions. In particular this is needed for adding
690 // a synthethic handler to unlock monitors when inlining
691 // synchonized methods since the unlock path isn't represented in
692 // the bytecodes.
693 t = table.entry_for(catch_pco, -1, 0);
694 }
695
696 #ifdef COMPILER1
697 if (t == NULL && nm->is_compiled_by_c1()) {
698 assert(nm->unwind_handler_begin() != NULL, "");
699 return nm->unwind_handler_begin();
700 }
701 #endif
702
703 if (t == NULL) {
704 tty->print_cr("MISSING EXCEPTION HANDLER for pc " INTPTR_FORMAT " and handler bci %d", ret_pc, handler_bci);
705 tty->print_cr(" Exception:");
706 exception->print();
707 tty->cr();
708 tty->print_cr(" Compiled exception table :");
709 table.print();
710 nm->print_code();
711 guarantee(false, "missing exception handler");
795 // If vt_stub is NULL, then return NULL to signal handler to report the SEGV error.
796 if (vt_stub == NULL) return NULL;
797
798 if (vt_stub->is_abstract_method_error(pc)) {
799 assert(!vt_stub->is_vtable_stub(), "should never see AbstractMethodErrors from vtable-type VtableStubs");
800 Events::log_exception(thread, "AbstractMethodError at " INTPTR_FORMAT, pc);
801 return StubRoutines::throw_AbstractMethodError_entry();
802 } else {
803 Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
804 return StubRoutines::throw_NullPointerException_at_call_entry();
805 }
806 } else {
807 CodeBlob* cb = CodeCache::find_blob(pc);
808
809 // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
810 if (cb == NULL) return NULL;
811
812 // Exception happened in CodeCache. Must be either:
813 // 1. Inline-cache check in C2I handler blob,
814 // 2. Inline-cache check in nmethod, or
815 // 3. Implict null exception in nmethod
816
817 if (!cb->is_nmethod()) {
818 bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
819 if (!is_in_blob) {
820 cb->print();
821 fatal(err_msg("exception happened outside interpreter, nmethods and vtable stubs at pc " INTPTR_FORMAT, pc));
822 }
823 Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
824 // There is no handler here, so we will simply unwind.
825 return StubRoutines::throw_NullPointerException_at_call_entry();
826 }
827
828 // Otherwise, it's an nmethod. Consult its exception handlers.
829 nmethod* nm = (nmethod*)cb;
830 if (nm->inlinecache_check_contains(pc)) {
831 // exception happened inside inline-cache check code
832 // => the nmethod is not yet active (i.e., the frame
833 // is not set up yet) => use return address pushed by
834 // caller => don't push another return address
835 Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
2808 VMReg reg2 = regs[i].second();
2809 if( reg2->is_stack()) {
2810 // Yuck
2811 reg2 = reg2->bias(out_preserve_stack_slots());
2812 }
2813 regs[i].set_pair(reg2, reg1);
2814 }
2815 }
2816
2817 // results
2818 *arg_size = cnt;
2819 return regs;
2820 }
2821
2822 // OSR Migration Code
2823 //
2824 // This code is used convert interpreter frames into compiled frames. It is
2825 // called from very start of a compiled OSR nmethod. A temp array is
2826 // allocated to hold the interesting bits of the interpreter frame. All
2827 // active locks are inflated to allow them to move. The displaced headers and
2828 // active interpeter locals are copied into the temp buffer. Then we return
2829 // back to the compiled code. The compiled code then pops the current
2830 // interpreter frame off the stack and pushes a new compiled frame. Then it
2831 // copies the interpreter locals and displaced headers where it wants.
2832 // Finally it calls back to free the temp buffer.
2833 //
2834 // All of this is done NOT at any Safepoint, nor is any safepoint or GC allowed.
2835
2836 JRT_LEAF(intptr_t*, SharedRuntime::OSR_migration_begin( JavaThread *thread) )
2837
2838 //
2839 // This code is dependent on the memory layout of the interpreter local
2840 // array and the monitors. On all of our platforms the layout is identical
2841 // so this code is shared. If some platform lays the their arrays out
2842 // differently then this code could move to platform specific code or
2843 // the code here could be modified to copy items one at a time using
2844 // frame accessor methods and be platform independent.
2845
2846 frame fr = thread->last_frame();
2847 assert( fr.is_interpreted_frame(), "" );
2848 assert( fr.interpreter_frame_expression_stack_size()==0, "only handle empty stacks" );
|
455 if (x <= (jdouble) min_jlong)
456 return min_jlong;
457 return (jlong) x;
458 JRT_END
459
460
461 JRT_LEAF(jfloat, SharedRuntime::d2f(jdouble x))
462 return (jfloat)x;
463 JRT_END
464
465
466 JRT_LEAF(jfloat, SharedRuntime::l2f(jlong x))
467 return (jfloat)x;
468 JRT_END
469
470
471 JRT_LEAF(jdouble, SharedRuntime::l2d(jlong x))
472 return (jdouble)x;
473 JRT_END
474
475 // Exception handling across interpreter/compiler boundaries
476 //
477 // exception_handler_for_return_address(...) returns the continuation address.
478 // The continuation address is the entry point of the exception handler of the
479 // previous frame depending on the return address.
480
481 address SharedRuntime::raw_exception_handler_for_return_address(JavaThread* thread, address return_address) {
482 assert(frame::verify_return_pc(return_address), err_msg("must be a return address: " INTPTR_FORMAT, return_address));
483
484 // Reset method handle flag.
485 thread->set_is_method_handle_return(false);
486
487 // The fastest case first
488 CodeBlob* blob = CodeCache::find_blob(return_address);
489 nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
490 if (nm != NULL) {
491 // Set flag if return address is a method handle call site.
492 thread->set_is_method_handle_return(nm->is_method_handle_return(return_address));
493 // native nmethods don't have exception handlers
494 assert(!nm->is_native_method(), "no exception handler");
495 assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
670 }
671 if (!top_frame_only && handler_bci < 0 && !skip_scope_increment) {
672 sd = sd->sender();
673 if (sd != NULL) {
674 bci = sd->bci();
675 }
676 ++scope_depth;
677 }
678 } while (recursive_exception || (!top_frame_only && handler_bci < 0 && sd != NULL));
679 }
680
681 // found handling method => lookup exception handler
682 int catch_pco = ret_pc - nm->code_begin();
683
684 ExceptionHandlerTable table(nm);
685 HandlerTableEntry *t = table.entry_for(catch_pco, handler_bci, scope_depth);
686 if (t == NULL && (nm->is_compiled_by_c1() || handler_bci != -1)) {
687 // Allow abbreviated catch tables. The idea is to allow a method
688 // to materialize its exceptions without committing to the exact
689 // routing of exceptions. In particular this is needed for adding
690 // a synthetic handler to unlock monitors when inlining
691 // synchronized methods since the unlock path isn't represented in
692 // the bytecodes.
693 t = table.entry_for(catch_pco, -1, 0);
694 }
695
696 #ifdef COMPILER1
697 if (t == NULL && nm->is_compiled_by_c1()) {
698 assert(nm->unwind_handler_begin() != NULL, "");
699 return nm->unwind_handler_begin();
700 }
701 #endif
702
703 if (t == NULL) {
704 tty->print_cr("MISSING EXCEPTION HANDLER for pc " INTPTR_FORMAT " and handler bci %d", ret_pc, handler_bci);
705 tty->print_cr(" Exception:");
706 exception->print();
707 tty->cr();
708 tty->print_cr(" Compiled exception table :");
709 table.print();
710 nm->print_code();
711 guarantee(false, "missing exception handler");
795 // If vt_stub is NULL, then return NULL to signal handler to report the SEGV error.
796 if (vt_stub == NULL) return NULL;
797
798 if (vt_stub->is_abstract_method_error(pc)) {
799 assert(!vt_stub->is_vtable_stub(), "should never see AbstractMethodErrors from vtable-type VtableStubs");
800 Events::log_exception(thread, "AbstractMethodError at " INTPTR_FORMAT, pc);
801 return StubRoutines::throw_AbstractMethodError_entry();
802 } else {
803 Events::log_exception(thread, "NullPointerException at vtable entry " INTPTR_FORMAT, pc);
804 return StubRoutines::throw_NullPointerException_at_call_entry();
805 }
806 } else {
807 CodeBlob* cb = CodeCache::find_blob(pc);
808
809 // If code blob is NULL, then return NULL to signal handler to report the SEGV error.
810 if (cb == NULL) return NULL;
811
812 // Exception happened in CodeCache. Must be either:
813 // 1. Inline-cache check in C2I handler blob,
814 // 2. Inline-cache check in nmethod, or
815 // 3. Implicit null exception in nmethod
816
817 if (!cb->is_nmethod()) {
818 bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
819 if (!is_in_blob) {
820 cb->print();
821 fatal(err_msg("exception happened outside interpreter, nmethods and vtable stubs at pc " INTPTR_FORMAT, pc));
822 }
823 Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
824 // There is no handler here, so we will simply unwind.
825 return StubRoutines::throw_NullPointerException_at_call_entry();
826 }
827
828 // Otherwise, it's an nmethod. Consult its exception handlers.
829 nmethod* nm = (nmethod*)cb;
830 if (nm->inlinecache_check_contains(pc)) {
831 // exception happened inside inline-cache check code
832 // => the nmethod is not yet active (i.e., the frame
833 // is not set up yet) => use return address pushed by
834 // caller => don't push another return address
835 Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, pc);
2808 VMReg reg2 = regs[i].second();
2809 if( reg2->is_stack()) {
2810 // Yuck
2811 reg2 = reg2->bias(out_preserve_stack_slots());
2812 }
2813 regs[i].set_pair(reg2, reg1);
2814 }
2815 }
2816
2817 // results
2818 *arg_size = cnt;
2819 return regs;
2820 }
2821
2822 // OSR Migration Code
2823 //
2824 // This code is used convert interpreter frames into compiled frames. It is
2825 // called from very start of a compiled OSR nmethod. A temp array is
2826 // allocated to hold the interesting bits of the interpreter frame. All
2827 // active locks are inflated to allow them to move. The displaced headers and
2828 // active interpreter locals are copied into the temp buffer. Then we return
2829 // back to the compiled code. The compiled code then pops the current
2830 // interpreter frame off the stack and pushes a new compiled frame. Then it
2831 // copies the interpreter locals and displaced headers where it wants.
2832 // Finally it calls back to free the temp buffer.
2833 //
2834 // All of this is done NOT at any Safepoint, nor is any safepoint or GC allowed.
2835
2836 JRT_LEAF(intptr_t*, SharedRuntime::OSR_migration_begin( JavaThread *thread) )
2837
2838 //
2839 // This code is dependent on the memory layout of the interpreter local
2840 // array and the monitors. On all of our platforms the layout is identical
2841 // so this code is shared. If some platform lays the their arrays out
2842 // differently then this code could move to platform specific code or
2843 // the code here could be modified to copy items one at a time using
2844 // frame accessor methods and be platform independent.
2845
2846 frame fr = thread->last_frame();
2847 assert( fr.is_interpreted_frame(), "" );
2848 assert( fr.interpreter_frame_expression_stack_size()==0, "only handle empty stacks" );
|