407 // rdi - locals
408 // rsi - bcp
409 // rbx, - method
410 // rdx - cpool
411 // rbp, - interpreter frame
412
413 // C++ interpreter on entry
414 // rsi - new interpreter state pointer
415 // rbp - interpreter frame pointer
416 // rbx - method
417
418 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
419 // rbx, - method
420 // rcx - rcvr (assuming there is one)
421 // top of stack return address of interpreter caller
422 // rsp - sender_sp
423
424 // C++ interpreter only
425 // rsi - previous interpreter state pointer
426
427 const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
428
429 // InterpreterRuntime::frequency_counter_overflow takes one argument
430 // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
431 // The call returns the address of the verified entry point for the method or NULL
432 // if the compilation did not complete (either went background or bailed out).
433 __ movptr(rax, (intptr_t)false);
434 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
435
436 __ movptr(rbx, Address(rbp, method_offset)); // restore Method*
437
438 // Preserve invariant that rsi/rdi contain bcp/locals of sender frame
439 // and jump to the interpreted entry.
440 __ jmp(*do_continue, relocInfo::none);
441
442 }
443
444 void InterpreterGenerator::generate_stack_overflow_check(void) {
445 // see if we've got enough room on the stack for locals plus overhead.
446 // the expression stack grows down incrementally, so the normal guard
447 // page mechanism will work for that.
448 //
851 // If G1 is not enabled then attempt to go through the accessor entry point
852 // Reference.get is an accessor
853 return generate_accessor_entry();
854 }
855
856 //
857 // Interpreter stub for calling a native method. (asm interpreter)
858 // This sets up a somewhat different looking stack for calling the native method
859 // than the typical interpreter frame setup.
860 //
861
862 address InterpreterGenerator::generate_native_entry(bool synchronized) {
863 // determine code generation flags
864 bool inc_counter = UseCompiler || CountCompiledCalls;
865
866 // rbx,: Method*
867 // rsi: sender sp
868 // rsi: previous interpreter state (C++ interpreter) must preserve
869 address entry_point = __ pc();
870
871
872 const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
873 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
874 const Address access_flags (rbx, Method::access_flags_offset());
875
876 // get parameter size (always needed)
877 __ load_unsigned_short(rcx, size_of_parameters);
878
879 // native calls don't need the stack size check since they have no expression stack
880 // and the arguments are already on the stack and we only add a handful of words
881 // to the stack
882
883 // rbx,: Method*
884 // rcx: size of parameters
885 // rsi: sender sp
886
887 __ pop(rax); // get return address
888 // for natives the size of locals is zero
889
890 // compute beginning of parameters (rdi)
891 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
892
893
894 // add 2 zero-initialized slots for native calls
895 // NULL result handler
896 __ push((int32_t)NULL_WORD);
897 // NULL oop temp (mirror or jni oop result)
971 const Address monitor_block_top (rbp,
972 frame::interpreter_frame_monitor_block_top_offset * wordSize);
973 __ movptr(rax, monitor_block_top);
974 __ cmpptr(rax, rsp);
975 __ jcc(Assembler::equal, L);
976 __ stop("broken stack frame setup in interpreter");
977 __ bind(L);
978 }
979 #endif
980
981 // jvmti/dtrace support
982 __ notify_method_entry();
983
984 // work registers
985 const Register method = rbx;
986 const Register thread = rdi;
987 const Register t = rcx;
988
989 // allocate space for parameters
990 __ get_method(method);
991 __ load_unsigned_short(t, Address(method, Method::size_of_parameters_offset()));
992 __ shlptr(t, Interpreter::logStackElementSize);
993 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
994 __ subptr(rsp, t);
995 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
996
997 // get signature handler
998 { Label L;
999 __ movptr(t, Address(method, Method::signature_handler_offset()));
1000 __ testptr(t, t);
1001 __ jcc(Assembler::notZero, L);
1002 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method);
1003 __ get_method(method);
1004 __ movptr(t, Address(method, Method::signature_handler_offset()));
1005 __ bind(L);
1006 }
1007
1008 // call signature handler
1009 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == rdi, "adjust this code");
1010 assert(InterpreterRuntime::SignatureHandlerGenerator::to () == rsp, "adjust this code");
1011 assert(InterpreterRuntime::SignatureHandlerGenerator::temp() == t , "adjust this code");
1280 if (inc_counter) {
1281 // Handle overflow of counter and compile method
1282 __ bind(invocation_counter_overflow);
1283 generate_counter_overflow(&continue_after_compile);
1284 }
1285
1286 return entry_point;
1287 }
1288
1289 //
1290 // Generic interpreted method entry to (asm) interpreter
1291 //
1292 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1293 // determine code generation flags
1294 bool inc_counter = UseCompiler || CountCompiledCalls;
1295
1296 // rbx,: Method*
1297 // rsi: sender sp
1298 address entry_point = __ pc();
1299
1300
1301 const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
1302 const Address size_of_locals (rbx, Method::size_of_locals_offset());
1303 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
1304 const Address access_flags (rbx, Method::access_flags_offset());
1305
1306 // get parameter size (always needed)
1307 __ load_unsigned_short(rcx, size_of_parameters);
1308
1309 // rbx,: Method*
1310 // rcx: size of parameters
1311
1312 // rsi: sender_sp (could differ from sp+wordSize if we were called via c2i )
1313
1314 __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words
1315 __ subl(rdx, rcx); // rdx = no. of additional locals
1316
1317 // see if we've got enough room on the stack for locals plus overhead.
1318 generate_stack_overflow_check();
1319
1320 // get return address
1321 __ pop(rax);
1322
1323 // compute beginning of parameters (rdi)
1324 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
1325
1326 // rdx - # of additional locals
1327 // allocate space for locals
1328 // explicitly initialize locals
1329 {
1330 Label exit, loop;
1331 __ testl(rdx, rdx);
1332 __ jcc(Assembler::lessEqual, exit); // do nothing if rdx <= 0
1333 __ bind(loop);
1334 __ push((int32_t)NULL_WORD); // initialize local variables
1717
1718 {
1719 // Check to see whether we are returning to a deoptimized frame.
1720 // (The PopFrame call ensures that the caller of the popped frame is
1721 // either interpreted or compiled and deoptimizes it if compiled.)
1722 // In this case, we can't call dispatch_next() after the frame is
1723 // popped, but instead must save the incoming arguments and restore
1724 // them after deoptimization has occurred.
1725 //
1726 // Note that we don't compare the return PC against the
1727 // deoptimization blob's unpack entry because of the presence of
1728 // adapter frames in C2.
1729 Label caller_not_deoptimized;
1730 __ movptr(rdx, Address(rbp, frame::return_addr_offset * wordSize));
1731 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::interpreter_contains), rdx);
1732 __ testl(rax, rax);
1733 __ jcc(Assembler::notZero, caller_not_deoptimized);
1734
1735 // Compute size of arguments for saving when returning to deoptimized caller
1736 __ get_method(rax);
1737 __ load_unsigned_short(rax, Address(rax, in_bytes(Method::size_of_parameters_offset())));
1738 __ shlptr(rax, Interpreter::logStackElementSize);
1739 __ restore_locals();
1740 __ subptr(rdi, rax);
1741 __ addptr(rdi, wordSize);
1742 // Save these arguments
1743 __ get_thread(thread);
1744 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), thread, rax, rdi);
1745
1746 __ remove_activation(vtos, rdx,
1747 /* throw_monitor_exception */ false,
1748 /* install_monitor_exception */ false,
1749 /* notify_jvmdi */ false);
1750
1751 // Inform deoptimization that it is responsible for restoring these arguments
1752 __ get_thread(thread);
1753 __ movl(Address(thread, JavaThread::popframe_condition_offset()), JavaThread::popframe_force_deopt_reexecution_bit);
1754
1755 // Continue in deoptimization handler
1756 __ jmp(rdx);
1757
|
407 // rdi - locals
408 // rsi - bcp
409 // rbx, - method
410 // rdx - cpool
411 // rbp, - interpreter frame
412
413 // C++ interpreter on entry
414 // rsi - new interpreter state pointer
415 // rbp - interpreter frame pointer
416 // rbx - method
417
418 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
419 // rbx, - method
420 // rcx - rcvr (assuming there is one)
421 // top of stack return address of interpreter caller
422 // rsp - sender_sp
423
424 // C++ interpreter only
425 // rsi - previous interpreter state pointer
426
427 // InterpreterRuntime::frequency_counter_overflow takes one argument
428 // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
429 // The call returns the address of the verified entry point for the method or NULL
430 // if the compilation did not complete (either went background or bailed out).
431 __ movptr(rax, (intptr_t)false);
432 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
433
434 __ movptr(rbx, Address(rbp, method_offset)); // restore Method*
435
436 // Preserve invariant that rsi/rdi contain bcp/locals of sender frame
437 // and jump to the interpreted entry.
438 __ jmp(*do_continue, relocInfo::none);
439
440 }
441
442 void InterpreterGenerator::generate_stack_overflow_check(void) {
443 // see if we've got enough room on the stack for locals plus overhead.
444 // the expression stack grows down incrementally, so the normal guard
445 // page mechanism will work for that.
446 //
849 // If G1 is not enabled then attempt to go through the accessor entry point
850 // Reference.get is an accessor
851 return generate_accessor_entry();
852 }
853
854 //
855 // Interpreter stub for calling a native method. (asm interpreter)
856 // This sets up a somewhat different looking stack for calling the native method
857 // than the typical interpreter frame setup.
858 //
859
860 address InterpreterGenerator::generate_native_entry(bool synchronized) {
861 // determine code generation flags
862 bool inc_counter = UseCompiler || CountCompiledCalls;
863
864 // rbx,: Method*
865 // rsi: sender sp
866 // rsi: previous interpreter state (C++ interpreter) must preserve
867 address entry_point = __ pc();
868
869 const Address constMethod (rbx, Method::const_offset());
870 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
871 const Address access_flags (rbx, Method::access_flags_offset());
872
873 // get parameter size (always needed)
874 __ movptr(rcx, constMethod);
875 __ load_unsigned_short(rcx, Address(rcx, ConstMethod::size_of_parameters_offset()));
876
877 // native calls don't need the stack size check since they have no expression stack
878 // and the arguments are already on the stack and we only add a handful of words
879 // to the stack
880
881 // rbx,: Method*
882 // rcx: size of parameters
883 // rsi: sender sp
884
885 __ pop(rax); // get return address
886 // for natives the size of locals is zero
887
888 // compute beginning of parameters (rdi)
889 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
890
891
892 // add 2 zero-initialized slots for native calls
893 // NULL result handler
894 __ push((int32_t)NULL_WORD);
895 // NULL oop temp (mirror or jni oop result)
969 const Address monitor_block_top (rbp,
970 frame::interpreter_frame_monitor_block_top_offset * wordSize);
971 __ movptr(rax, monitor_block_top);
972 __ cmpptr(rax, rsp);
973 __ jcc(Assembler::equal, L);
974 __ stop("broken stack frame setup in interpreter");
975 __ bind(L);
976 }
977 #endif
978
979 // jvmti/dtrace support
980 __ notify_method_entry();
981
982 // work registers
983 const Register method = rbx;
984 const Register thread = rdi;
985 const Register t = rcx;
986
987 // allocate space for parameters
988 __ get_method(method);
989 __ movptr(t, Address(method, Method::const_offset()));
990 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
991
992 __ shlptr(t, Interpreter::logStackElementSize);
993 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
994 __ subptr(rsp, t);
995 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
996
997 // get signature handler
998 { Label L;
999 __ movptr(t, Address(method, Method::signature_handler_offset()));
1000 __ testptr(t, t);
1001 __ jcc(Assembler::notZero, L);
1002 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method);
1003 __ get_method(method);
1004 __ movptr(t, Address(method, Method::signature_handler_offset()));
1005 __ bind(L);
1006 }
1007
1008 // call signature handler
1009 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == rdi, "adjust this code");
1010 assert(InterpreterRuntime::SignatureHandlerGenerator::to () == rsp, "adjust this code");
1011 assert(InterpreterRuntime::SignatureHandlerGenerator::temp() == t , "adjust this code");
1280 if (inc_counter) {
1281 // Handle overflow of counter and compile method
1282 __ bind(invocation_counter_overflow);
1283 generate_counter_overflow(&continue_after_compile);
1284 }
1285
1286 return entry_point;
1287 }
1288
1289 //
1290 // Generic interpreted method entry to (asm) interpreter
1291 //
1292 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1293 // determine code generation flags
1294 bool inc_counter = UseCompiler || CountCompiledCalls;
1295
1296 // rbx,: Method*
1297 // rsi: sender sp
1298 address entry_point = __ pc();
1299
1300 const Address constMethod (rbx, Method::const_offset());
1301 const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
1302 const Address access_flags (rbx, Method::access_flags_offset());
1303
1304 // get parameter size (always needed)
1305 __ movptr(rdx, constMethod);
1306 __ load_unsigned_short(rcx, Address(rdx, ConstMethod::size_of_parameters_offset()));
1307
1308 // rbx,: Method*
1309 // rcx: size of parameters
1310
1311 // rsi: sender_sp (could differ from sp+wordSize if we were called via c2i )
1312
1313 // get size of locals in words
1314 __ load_unsigned_short(rdx, Address(rdx, ConstMethod::size_of_locals_offset()));
1315 __ subl(rdx, rcx); // rdx = no. of additional locals
1316
1317 // see if we've got enough room on the stack for locals plus overhead.
1318 generate_stack_overflow_check();
1319
1320 // get return address
1321 __ pop(rax);
1322
1323 // compute beginning of parameters (rdi)
1324 __ lea(rdi, Address(rsp, rcx, Interpreter::stackElementScale(), -wordSize));
1325
1326 // rdx - # of additional locals
1327 // allocate space for locals
1328 // explicitly initialize locals
1329 {
1330 Label exit, loop;
1331 __ testl(rdx, rdx);
1332 __ jcc(Assembler::lessEqual, exit); // do nothing if rdx <= 0
1333 __ bind(loop);
1334 __ push((int32_t)NULL_WORD); // initialize local variables
1717
1718 {
1719 // Check to see whether we are returning to a deoptimized frame.
1720 // (The PopFrame call ensures that the caller of the popped frame is
1721 // either interpreted or compiled and deoptimizes it if compiled.)
1722 // In this case, we can't call dispatch_next() after the frame is
1723 // popped, but instead must save the incoming arguments and restore
1724 // them after deoptimization has occurred.
1725 //
1726 // Note that we don't compare the return PC against the
1727 // deoptimization blob's unpack entry because of the presence of
1728 // adapter frames in C2.
1729 Label caller_not_deoptimized;
1730 __ movptr(rdx, Address(rbp, frame::return_addr_offset * wordSize));
1731 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::interpreter_contains), rdx);
1732 __ testl(rax, rax);
1733 __ jcc(Assembler::notZero, caller_not_deoptimized);
1734
1735 // Compute size of arguments for saving when returning to deoptimized caller
1736 __ get_method(rax);
1737 __ movptr(rax, Address(rax, Method::const_offset()));
1738 __ load_unsigned_short(rax, Address(rax, ConstMethod::size_of_parameters_offset()));
1739 __ shlptr(rax, Interpreter::logStackElementSize);
1740 __ restore_locals();
1741 __ subptr(rdi, rax);
1742 __ addptr(rdi, wordSize);
1743 // Save these arguments
1744 __ get_thread(thread);
1745 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), thread, rax, rdi);
1746
1747 __ remove_activation(vtos, rdx,
1748 /* throw_monitor_exception */ false,
1749 /* install_monitor_exception */ false,
1750 /* notify_jvmdi */ false);
1751
1752 // Inform deoptimization that it is responsible for restoring these arguments
1753 __ get_thread(thread);
1754 __ movl(Address(thread, JavaThread::popframe_condition_offset()), JavaThread::popframe_force_deopt_reexecution_bit);
1755
1756 // Continue in deoptimization handler
1757 __ jmp(rdx);
1758
|