407 // rdi - locals
408 // rsi - bcp
409 // rbx, - method
410 // rdx - cpool
411 // rbp, - interpreter frame
412
413 // C++ interpreter on entry
414 // rsi - new interpreter state pointer
415 // rbp - interpreter frame pointer
416 // rbx - method
417
418 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
419 // rbx, - method
420 // rcx - rcvr (assuming there is one)
421 // top of stack return address of interpreter caller
422 // rsp - sender_sp
423
424 // C++ interpreter only
425 // rsi - previous interpreter state pointer
426
427 const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
428
429 // InterpreterRuntime::frequency_counter_overflow takes one argument
430 // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
431 // The call returns the address of the verified entry point for the method or NULL
432 // if the compilation did not complete (either went background or bailed out).
433 __ movptr(rax, (intptr_t)false);
434 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
435
436 __ movptr(rbx, Address(rbp, method_offset)); // restore Method*
437
438 // Preserve invariant that rsi/rdi contain bcp/locals of sender frame
439 // and jump to the interpreted entry.
440 __ jmp(*do_continue, relocInfo::none);
441
442 }
443
444 void InterpreterGenerator::generate_stack_overflow_check(void) {
445 // see if we've got enough room on the stack for locals plus overhead.
446 // the expression stack grows down incrementally, so the normal guard
447 // page mechanism will work for that.
448 //
851 // If G1 is not enabled then attempt to go through the accessor entry point
852 // Reference.get is an accessor
853 return generate_accessor_entry();
854 }
855
856 //
857 // Interpreter stub for calling a native method. (asm interpreter)
858 // This sets up a somewhat different looking stack for calling the native method
859 // than the typical interpreter frame setup.
860 //
861
862 address InterpreterGenerator::generate_native_entry(bool synchronized) {
863 // determine code generation flags
864 bool inc_counter = UseCompiler || CountCompiledCalls;
865
866 // rbx,: Method*
867 // rsi: sender sp
868 // rsi: previous interpreter state (C++ interpreter) must preserve
869 address entry_point = __ pc();
870
871
872 const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
873 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
874 const Address access_flags (rbx, Method::access_flags_offset());
875
876 // get parameter size (always needed)
877 __ load_unsigned_short(rcx, size_of_parameters);
878
879 // native calls don't need the stack size check since they have no expression stack
880 // and the arguments are already on the stack and we only add a handful of words
881 // to the stack
882
883 // rbx,: Method*
884 // rcx: size of parameters
885 // rsi: sender sp
886
887 __ pop(rax); // get return address
888 // for natives the size of locals is zero
889
890 // compute beginning of parameters (rdi)
891 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
892
893
894 // add 2 zero-initialized slots for native calls
895 // NULL result handler
896 __ push((int32_t)NULL_WORD);
971 const Address monitor_block_top (rbp,
972 frame::interpreter_frame_monitor_block_top_offset * wordSize);
973 __ movptr(rax, monitor_block_top);
974 __ cmpptr(rax, rsp);
975 __ jcc(Assembler::equal, L);
976 __ stop("broken stack frame setup in interpreter");
977 __ bind(L);
978 }
979 #endif
980
981 // jvmti/dtrace support
982 __ notify_method_entry();
983
984 // work registers
985 const Register method = rbx;
986 const Register thread = rdi;
987 const Register t = rcx;
988
989 // allocate space for parameters
990 __ get_method(method);
991 __ load_unsigned_short(t, Address(method, Method::size_of_parameters_offset()));
992 __ shlptr(t, Interpreter::logStackElementSize);
993 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
994 __ subptr(rsp, t);
995 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
996
997 // get signature handler
998 { Label L;
999 __ movptr(t, Address(method, Method::signature_handler_offset()));
1000 __ testptr(t, t);
1001 __ jcc(Assembler::notZero, L);
1002 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method);
1003 __ get_method(method);
1004 __ movptr(t, Address(method, Method::signature_handler_offset()));
1005 __ bind(L);
1006 }
1007
1008 // call signature handler
1009 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == rdi, "adjust this code");
1010 assert(InterpreterRuntime::SignatureHandlerGenerator::to () == rsp, "adjust this code");
1011 assert(InterpreterRuntime::SignatureHandlerGenerator::temp() == t , "adjust this code");
1280 if (inc_counter) {
1281 // Handle overflow of counter and compile method
1282 __ bind(invocation_counter_overflow);
1283 generate_counter_overflow(&continue_after_compile);
1284 }
1285
1286 return entry_point;
1287 }
1288
1289 //
1290 // Generic interpreted method entry to (asm) interpreter
1291 //
1292 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1293 // determine code generation flags
1294 bool inc_counter = UseCompiler || CountCompiledCalls;
1295
1296 // rbx,: Method*
1297 // rsi: sender sp
1298 address entry_point = __ pc();
1299
1300
1301 const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
1302 const Address size_of_locals (rbx, Method::size_of_locals_offset());
1303 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
1304 const Address access_flags (rbx, Method::access_flags_offset());
1305
1306 // get parameter size (always needed)
1307 __ load_unsigned_short(rcx, size_of_parameters);
1308
1309 // rbx,: Method*
1310 // rcx: size of parameters
1311
1312 // rsi: sender_sp (could differ from sp+wordSize if we were called via c2i )
1313
1314 __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words
1315 __ subl(rdx, rcx); // rdx = no. of additional locals
1316
1317 // see if we've got enough room on the stack for locals plus overhead.
1318 generate_stack_overflow_check();
1319
1320 // get return address
1321 __ pop(rax);
1322
1323 // compute beginning of parameters (rdi)
1324 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
1325
1326 // rdx - # of additional locals
1717
1718 {
1719 // Check to see whether we are returning to a deoptimized frame.
1720 // (The PopFrame call ensures that the caller of the popped frame is
1721 // either interpreted or compiled and deoptimizes it if compiled.)
1722 // In this case, we can't call dispatch_next() after the frame is
1723 // popped, but instead must save the incoming arguments and restore
1724 // them after deoptimization has occurred.
1725 //
1726 // Note that we don't compare the return PC against the
1727 // deoptimization blob's unpack entry because of the presence of
1728 // adapter frames in C2.
1729 Label caller_not_deoptimized;
1730 __ movptr(rdx, Address(rbp, frame::return_addr_offset * wordSize));
1731 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::interpreter_contains), rdx);
1732 __ testl(rax, rax);
1733 __ jcc(Assembler::notZero, caller_not_deoptimized);
1734
1735 // Compute size of arguments for saving when returning to deoptimized caller
1736 __ get_method(rax);
1737 __ load_unsigned_short(rax, Address(rax, in_bytes(Method::size_of_parameters_offset())));
1738 __ shlptr(rax, Interpreter::logStackElementSize);
1739 __ restore_locals();
1740 __ subptr(rdi, rax);
1741 __ addptr(rdi, wordSize);
1742 // Save these arguments
1743 __ get_thread(thread);
1744 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), thread, rax, rdi);
1745
1746 __ remove_activation(vtos, rdx,
1747 /* throw_monitor_exception */ false,
1748 /* install_monitor_exception */ false,
1749 /* notify_jvmdi */ false);
1750
1751 // Inform deoptimization that it is responsible for restoring these arguments
1752 __ get_thread(thread);
1753 __ movl(Address(thread, JavaThread::popframe_condition_offset()), JavaThread::popframe_force_deopt_reexecution_bit);
1754
1755 // Continue in deoptimization handler
1756 __ jmp(rdx);
1757
|
407 // rdi - locals
408 // rsi - bcp
409 // rbx, - method
410 // rdx - cpool
411 // rbp, - interpreter frame
412
413 // C++ interpreter on entry
414 // rsi - new interpreter state pointer
415 // rbp - interpreter frame pointer
416 // rbx - method
417
418 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
419 // rbx, - method
420 // rcx - rcvr (assuming there is one)
421 // top of stack return address of interpreter caller
422 // rsp - sender_sp
423
424 // C++ interpreter only
425 // rsi - previous interpreter state pointer
426
427 // InterpreterRuntime::frequency_counter_overflow takes one argument
428 // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
429 // The call returns the address of the verified entry point for the method or NULL
430 // if the compilation did not complete (either went background or bailed out).
431 __ movptr(rax, (intptr_t)false);
432 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
433
434 __ movptr(rbx, Address(rbp, method_offset)); // restore Method*
435
436 // Preserve invariant that rsi/rdi contain bcp/locals of sender frame
437 // and jump to the interpreted entry.
438 __ jmp(*do_continue, relocInfo::none);
439
440 }
441
442 void InterpreterGenerator::generate_stack_overflow_check(void) {
443 // see if we've got enough room on the stack for locals plus overhead.
444 // the expression stack grows down incrementally, so the normal guard
445 // page mechanism will work for that.
446 //
849 // If G1 is not enabled then attempt to go through the accessor entry point
850 // Reference.get is an accessor
851 return generate_accessor_entry();
852 }
853
854 //
855 // Interpreter stub for calling a native method. (asm interpreter)
856 // This sets up a somewhat different looking stack for calling the native method
857 // than the typical interpreter frame setup.
858 //
859
860 address InterpreterGenerator::generate_native_entry(bool synchronized) {
861 // determine code generation flags
862 bool inc_counter = UseCompiler || CountCompiledCalls;
863
864 // rbx,: Method*
865 // rsi: sender sp
866 // rsi: previous interpreter state (C++ interpreter) must preserve
867 address entry_point = __ pc();
868
869 const Address constMethod (rbx, Method::const_offset());
870 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
871 const Address access_flags (rbx, Method::access_flags_offset());
872 const Address size_of_parameters(rcx, ConstMethod::size_of_parameters_offset());
873
874 // get parameter size (always needed)
875 __ movptr(rcx, constMethod);
876 __ load_unsigned_short(rcx, size_of_parameters);
877
878 // native calls don't need the stack size check since they have no expression stack
879 // and the arguments are already on the stack and we only add a handful of words
880 // to the stack
881
882 // rbx,: Method*
883 // rcx: size of parameters
884 // rsi: sender sp
885
886 __ pop(rax); // get return address
887 // for natives the size of locals is zero
888
889 // compute beginning of parameters (rdi)
890 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
891
892
893 // add 2 zero-initialized slots for native calls
894 // NULL result handler
895 __ push((int32_t)NULL_WORD);
970 const Address monitor_block_top (rbp,
971 frame::interpreter_frame_monitor_block_top_offset * wordSize);
972 __ movptr(rax, monitor_block_top);
973 __ cmpptr(rax, rsp);
974 __ jcc(Assembler::equal, L);
975 __ stop("broken stack frame setup in interpreter");
976 __ bind(L);
977 }
978 #endif
979
980 // jvmti/dtrace support
981 __ notify_method_entry();
982
983 // work registers
984 const Register method = rbx;
985 const Register thread = rdi;
986 const Register t = rcx;
987
988 // allocate space for parameters
989 __ get_method(method);
990 __ movptr(t, Address(method, Method::const_offset()));
991 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
992
993 __ shlptr(t, Interpreter::logStackElementSize);
994 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
995 __ subptr(rsp, t);
996 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
997
998 // get signature handler
999 { Label L;
1000 __ movptr(t, Address(method, Method::signature_handler_offset()));
1001 __ testptr(t, t);
1002 __ jcc(Assembler::notZero, L);
1003 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method);
1004 __ get_method(method);
1005 __ movptr(t, Address(method, Method::signature_handler_offset()));
1006 __ bind(L);
1007 }
1008
1009 // call signature handler
1010 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == rdi, "adjust this code");
1011 assert(InterpreterRuntime::SignatureHandlerGenerator::to () == rsp, "adjust this code");
1012 assert(InterpreterRuntime::SignatureHandlerGenerator::temp() == t , "adjust this code");
1281 if (inc_counter) {
1282 // Handle overflow of counter and compile method
1283 __ bind(invocation_counter_overflow);
1284 generate_counter_overflow(&continue_after_compile);
1285 }
1286
1287 return entry_point;
1288 }
1289
1290 //
1291 // Generic interpreted method entry to (asm) interpreter
1292 //
1293 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1294 // determine code generation flags
1295 bool inc_counter = UseCompiler || CountCompiledCalls;
1296
1297 // rbx,: Method*
1298 // rsi: sender sp
1299 address entry_point = __ pc();
1300
1301 const Address constMethod (rbx, Method::const_offset());
1302 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
1303 const Address access_flags (rbx, Method::access_flags_offset());
1304 const Address size_of_parameters(rdx, ConstMethod::size_of_parameters_offset());
1305 const Address size_of_locals (rdx, ConstMethod::size_of_locals_offset());
1306
1307 // get parameter size (always needed)
1308 __ movptr(rdx, constMethod);
1309 __ load_unsigned_short(rcx, size_of_parameters);
1310
1311 // rbx,: Method*
1312 // rcx: size of parameters
1313
1314 // rsi: sender_sp (could differ from sp+wordSize if we were called via c2i )
1315
1316 __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words
1317 __ subl(rdx, rcx); // rdx = no. of additional locals
1318
1319 // see if we've got enough room on the stack for locals plus overhead.
1320 generate_stack_overflow_check();
1321
1322 // get return address
1323 __ pop(rax);
1324
1325 // compute beginning of parameters (rdi)
1326 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
1327
1328 // rdx - # of additional locals
1719
1720 {
1721 // Check to see whether we are returning to a deoptimized frame.
1722 // (The PopFrame call ensures that the caller of the popped frame is
1723 // either interpreted or compiled and deoptimizes it if compiled.)
1724 // In this case, we can't call dispatch_next() after the frame is
1725 // popped, but instead must save the incoming arguments and restore
1726 // them after deoptimization has occurred.
1727 //
1728 // Note that we don't compare the return PC against the
1729 // deoptimization blob's unpack entry because of the presence of
1730 // adapter frames in C2.
1731 Label caller_not_deoptimized;
1732 __ movptr(rdx, Address(rbp, frame::return_addr_offset * wordSize));
1733 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::interpreter_contains), rdx);
1734 __ testl(rax, rax);
1735 __ jcc(Assembler::notZero, caller_not_deoptimized);
1736
1737 // Compute size of arguments for saving when returning to deoptimized caller
1738 __ get_method(rax);
1739 __ movptr(rax, Address(rax, Method::const_offset()));
1740 __ load_unsigned_short(rax, Address(rax, ConstMethod::size_of_parameters_offset()));
1741 __ shlptr(rax, Interpreter::logStackElementSize);
1742 __ restore_locals();
1743 __ subptr(rdi, rax);
1744 __ addptr(rdi, wordSize);
1745 // Save these arguments
1746 __ get_thread(thread);
1747 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), thread, rax, rdi);
1748
1749 __ remove_activation(vtos, rdx,
1750 /* throw_monitor_exception */ false,
1751 /* install_monitor_exception */ false,
1752 /* notify_jvmdi */ false);
1753
1754 // Inform deoptimization that it is responsible for restoring these arguments
1755 __ get_thread(thread);
1756 __ movl(Address(thread, JavaThread::popframe_condition_offset()), JavaThread::popframe_force_deopt_reexecution_bit);
1757
1758 // Continue in deoptimization handler
1759 __ jmp(rdx);
1760
|