352 }
353
354 __ cmp32(rcx, ExternalAddress((address)&InvocationCounter::InterpreterInvocationLimit));
355 __ jcc(Assembler::aboveEqual, *overflow);
356 }
357 }
358
359 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
360
361 // Asm interpreter on entry
362 // r14 - locals
363 // r13 - bcp
364 // rbx - method
365 // edx - cpool --- DOES NOT APPEAR TO BE TRUE
366 // rbp - interpreter frame
367
368 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
369 // Everything as it was on entry
370 // rdx is not restored. Doesn't appear to really be set.
371
372 const Address size_of_parameters(rbx,
373 Method::size_of_parameters_offset());
374
375 // InterpreterRuntime::frequency_counter_overflow takes two
376 // arguments, the first (thread) is passed by call_VM, the second
377 // indicates if the counter overflow occurs at a backwards branch
378 // (NULL bcp). We pass zero for it. The call returns the address
379 // of the verified entry point for the method or NULL if the
380 // compilation did not complete (either went background or bailed
381 // out).
382 __ movl(c_rarg1, 0);
383 __ call_VM(noreg,
384 CAST_FROM_FN_PTR(address,
385 InterpreterRuntime::frequency_counter_overflow),
386 c_rarg1);
387
388 __ movptr(rbx, Address(rbp, method_offset)); // restore Method*
389 // Preserve invariant that r13/r14 contain bcp/locals of sender frame
390 // and jump to the interpreted entry.
391 __ jmp(*do_continue, relocInfo::none);
392 }
393
394 // See if we've got enough room on the stack for locals plus overhead.
827 #endif // SERIALGC
828
829 // If G1 is not enabled then attempt to go through the accessor entry point
830 // Reference.get is an accessor
831 return generate_accessor_entry();
832 }
833
834
835 // Interpreter stub for calling a native method. (asm interpreter)
836 // This sets up a somewhat different looking stack for calling the
837 // native method than the typical interpreter frame setup.
838 address InterpreterGenerator::generate_native_entry(bool synchronized) {
839 // determine code generation flags
840 bool inc_counter = UseCompiler || CountCompiledCalls;
841
842 // rbx: Method*
843 // r13: sender sp
844
845 address entry_point = __ pc();
846
847 const Address size_of_parameters(rbx, Method::
848 size_of_parameters_offset());
849 const Address invocation_counter(rbx, Method::
850 invocation_counter_offset() +
851 InvocationCounter::counter_offset());
852 const Address access_flags (rbx, Method::access_flags_offset());
853
854 // get parameter size (always needed)
855 __ load_unsigned_short(rcx, size_of_parameters);
856
857 // native calls don't need the stack size check since they have no
858 // expression stack and the arguments are already on the stack and
859 // we only add a handful of words to the stack
860
861 // rbx: Method*
862 // rcx: size of parameters
863 // r13: sender sp
864 __ pop(rax); // get return address
865
866 // for natives the size of locals is zero
867
868 // compute beginning of parameters (r14)
869 __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize));
870
871 // add 2 zero-initialized slots for native calls
872 // initialize result_handler slot
873 __ push((int) NULL_WORD);
874 // slot for oop temp
950 Label L;
951 const Address monitor_block_top(rbp,
952 frame::interpreter_frame_monitor_block_top_offset * wordSize);
953 __ movptr(rax, monitor_block_top);
954 __ cmpptr(rax, rsp);
955 __ jcc(Assembler::equal, L);
956 __ stop("broken stack frame setup in interpreter");
957 __ bind(L);
958 }
959 #endif
960
961 // jvmti support
962 __ notify_method_entry();
963
964 // work registers
965 const Register method = rbx;
966 const Register t = r11;
967
968 // allocate space for parameters
969 __ get_method(method);
970 __ load_unsigned_short(t,
971 Address(method,
972 Method::size_of_parameters_offset()));
973 __ shll(t, Interpreter::logStackElementSize);
974
975 __ subptr(rsp, t);
976 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
977 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
978
979 // get signature handler
980 {
981 Label L;
982 __ movptr(t, Address(method, Method::signature_handler_offset()));
983 __ testptr(t, t);
984 __ jcc(Assembler::notZero, L);
985 __ call_VM(noreg,
986 CAST_FROM_FN_PTR(address,
987 InterpreterRuntime::prepare_native_call),
988 method);
989 __ get_method(method);
990 __ movptr(t, Address(method, Method::signature_handler_offset()));
991 __ bind(L);
992 }
1285 if (inc_counter) {
1286 // Handle overflow of counter and compile method
1287 __ bind(invocation_counter_overflow);
1288 generate_counter_overflow(&continue_after_compile);
1289 }
1290
1291 return entry_point;
1292 }
1293
1294 //
1295 // Generic interpreted method entry to (asm) interpreter
1296 //
1297 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1298 // determine code generation flags
1299 bool inc_counter = UseCompiler || CountCompiledCalls;
1300
1301 // ebx: Method*
1302 // r13: sender sp
1303 address entry_point = __ pc();
1304
1305 const Address size_of_parameters(rbx,
1306 Method::size_of_parameters_offset());
1307 const Address size_of_locals(rbx, Method::size_of_locals_offset());
1308 const Address invocation_counter(rbx,
1309 Method::invocation_counter_offset() +
1310 InvocationCounter::counter_offset());
1311 const Address access_flags(rbx, Method::access_flags_offset());
1312
1313 // get parameter size (always needed)
1314 __ load_unsigned_short(rcx, size_of_parameters);
1315
1316 // rbx: Method*
1317 // rcx: size of parameters
1318 // r13: sender_sp (could differ from sp+wordSize if we were called via c2i )
1319
1320 __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words
1321 __ subl(rdx, rcx); // rdx = no. of additional locals
1322
1323 // YYY
1324 // __ incrementl(rdx);
1325 // __ andl(rdx, -2);
1326
1327 // see if we've got enough room on the stack for locals plus overhead.
1328 generate_stack_overflow_check();
1329
1330 // get return address
1331 __ pop(rax);
1332
1333 // compute beginning of parameters (r14)
1735 // Check to see whether we are returning to a deoptimized frame.
1736 // (The PopFrame call ensures that the caller of the popped frame is
1737 // either interpreted or compiled and deoptimizes it if compiled.)
1738 // In this case, we can't call dispatch_next() after the frame is
1739 // popped, but instead must save the incoming arguments and restore
1740 // them after deoptimization has occurred.
1741 //
1742 // Note that we don't compare the return PC against the
1743 // deoptimization blob's unpack entry because of the presence of
1744 // adapter frames in C2.
1745 Label caller_not_deoptimized;
1746 __ movptr(c_rarg1, Address(rbp, frame::return_addr_offset * wordSize));
1747 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1748 InterpreterRuntime::interpreter_contains), c_rarg1);
1749 __ testl(rax, rax);
1750 __ jcc(Assembler::notZero, caller_not_deoptimized);
1751
1752 // Compute size of arguments for saving when returning to
1753 // deoptimized caller
1754 __ get_method(rax);
1755 __ load_unsigned_short(rax, Address(rax, in_bytes(Method::
1756 size_of_parameters_offset())));
1757 __ shll(rax, Interpreter::logStackElementSize);
1758 __ restore_locals(); // XXX do we need this?
1759 __ subptr(r14, rax);
1760 __ addptr(r14, wordSize);
1761 // Save these arguments
1762 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1763 Deoptimization::
1764 popframe_preserve_args),
1765 r15_thread, rax, r14);
1766
1767 __ remove_activation(vtos, rdx,
1768 /* throw_monitor_exception */ false,
1769 /* install_monitor_exception */ false,
1770 /* notify_jvmdi */ false);
1771
1772 // Inform deoptimization that it is responsible for restoring
1773 // these arguments
1774 __ movl(Address(r15_thread, JavaThread::popframe_condition_offset()),
1775 JavaThread::popframe_force_deopt_reexecution_bit);
|
352 }
353
354 __ cmp32(rcx, ExternalAddress((address)&InvocationCounter::InterpreterInvocationLimit));
355 __ jcc(Assembler::aboveEqual, *overflow);
356 }
357 }
358
359 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
360
361 // Asm interpreter on entry
362 // r14 - locals
363 // r13 - bcp
364 // rbx - method
365 // edx - cpool --- DOES NOT APPEAR TO BE TRUE
366 // rbp - interpreter frame
367
368 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
369 // Everything as it was on entry
370 // rdx is not restored. Doesn't appear to really be set.
371
372 // InterpreterRuntime::frequency_counter_overflow takes two
373 // arguments, the first (thread) is passed by call_VM, the second
374 // indicates if the counter overflow occurs at a backwards branch
375 // (NULL bcp). We pass zero for it. The call returns the address
376 // of the verified entry point for the method or NULL if the
377 // compilation did not complete (either went background or bailed
378 // out).
379 __ movl(c_rarg1, 0);
380 __ call_VM(noreg,
381 CAST_FROM_FN_PTR(address,
382 InterpreterRuntime::frequency_counter_overflow),
383 c_rarg1);
384
385 __ movptr(rbx, Address(rbp, method_offset)); // restore Method*
386 // Preserve invariant that r13/r14 contain bcp/locals of sender frame
387 // and jump to the interpreted entry.
388 __ jmp(*do_continue, relocInfo::none);
389 }
390
391 // See if we've got enough room on the stack for locals plus overhead.
824 #endif // SERIALGC
825
826 // If G1 is not enabled then attempt to go through the accessor entry point
827 // Reference.get is an accessor
828 return generate_accessor_entry();
829 }
830
831
832 // Interpreter stub for calling a native method. (asm interpreter)
833 // This sets up a somewhat different looking stack for calling the
834 // native method than the typical interpreter frame setup.
835 address InterpreterGenerator::generate_native_entry(bool synchronized) {
836 // determine code generation flags
837 bool inc_counter = UseCompiler || CountCompiledCalls;
838
839 // rbx: Method*
840 // r13: sender sp
841
842 address entry_point = __ pc();
843
844 const Address constMethod (rbx, Method::const_offset());
845 const Address invocation_counter(rbx, Method::
846 invocation_counter_offset() +
847 InvocationCounter::counter_offset());
848 const Address access_flags (rbx, Method::access_flags_offset());
849 const Address size_of_parameters(rcx, ConstMethod::
850 size_of_parameters_offset());
851
852
853 // get parameter size (always needed)
854 __ movptr(rcx, constMethod);
855 __ load_unsigned_short(rcx, size_of_parameters);
856
857 // native calls don't need the stack size check since they have no
858 // expression stack and the arguments are already on the stack and
859 // we only add a handful of words to the stack
860
861 // rbx: Method*
862 // rcx: size of parameters
863 // r13: sender sp
864 __ pop(rax); // get return address
865
866 // for natives the size of locals is zero
867
868 // compute beginning of parameters (r14)
869 __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize));
870
871 // add 2 zero-initialized slots for native calls
872 // initialize result_handler slot
873 __ push((int) NULL_WORD);
874 // slot for oop temp
950 Label L;
951 const Address monitor_block_top(rbp,
952 frame::interpreter_frame_monitor_block_top_offset * wordSize);
953 __ movptr(rax, monitor_block_top);
954 __ cmpptr(rax, rsp);
955 __ jcc(Assembler::equal, L);
956 __ stop("broken stack frame setup in interpreter");
957 __ bind(L);
958 }
959 #endif
960
961 // jvmti support
962 __ notify_method_entry();
963
964 // work registers
965 const Register method = rbx;
966 const Register t = r11;
967
968 // allocate space for parameters
969 __ get_method(method);
970 __ movptr(t, Address(method, Method::const_offset()));
971 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
972 __ shll(t, Interpreter::logStackElementSize);
973
974 __ subptr(rsp, t);
975 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
976 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
977
978 // get signature handler
979 {
980 Label L;
981 __ movptr(t, Address(method, Method::signature_handler_offset()));
982 __ testptr(t, t);
983 __ jcc(Assembler::notZero, L);
984 __ call_VM(noreg,
985 CAST_FROM_FN_PTR(address,
986 InterpreterRuntime::prepare_native_call),
987 method);
988 __ get_method(method);
989 __ movptr(t, Address(method, Method::signature_handler_offset()));
990 __ bind(L);
991 }
1284 if (inc_counter) {
1285 // Handle overflow of counter and compile method
1286 __ bind(invocation_counter_overflow);
1287 generate_counter_overflow(&continue_after_compile);
1288 }
1289
1290 return entry_point;
1291 }
1292
1293 //
1294 // Generic interpreted method entry to (asm) interpreter
1295 //
1296 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1297 // determine code generation flags
1298 bool inc_counter = UseCompiler || CountCompiledCalls;
1299
1300 // ebx: Method*
1301 // r13: sender sp
1302 address entry_point = __ pc();
1303
1304 const Address constMethod(rbx, Method::const_offset());
1305 const Address invocation_counter(rbx,
1306 Method::invocation_counter_offset() +
1307 InvocationCounter::counter_offset());
1308 const Address access_flags(rbx, Method::access_flags_offset());
1309 const Address size_of_parameters(rdx,
1310 ConstMethod::size_of_parameters_offset());
1311 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1312
1313
1314 // get parameter size (always needed)
1315 __ movptr(rdx, constMethod);
1316 __ load_unsigned_short(rcx, size_of_parameters);
1317
1318 // rbx: Method*
1319 // rcx: size of parameters
1320 // r13: sender_sp (could differ from sp+wordSize if we were called via c2i )
1321
1322 __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words
1323 __ subl(rdx, rcx); // rdx = no. of additional locals
1324
1325 // YYY
1326 // __ incrementl(rdx);
1327 // __ andl(rdx, -2);
1328
1329 // see if we've got enough room on the stack for locals plus overhead.
1330 generate_stack_overflow_check();
1331
1332 // get return address
1333 __ pop(rax);
1334
1335 // compute beginning of parameters (r14)
1737 // Check to see whether we are returning to a deoptimized frame.
1738 // (The PopFrame call ensures that the caller of the popped frame is
1739 // either interpreted or compiled and deoptimizes it if compiled.)
1740 // In this case, we can't call dispatch_next() after the frame is
1741 // popped, but instead must save the incoming arguments and restore
1742 // them after deoptimization has occurred.
1743 //
1744 // Note that we don't compare the return PC against the
1745 // deoptimization blob's unpack entry because of the presence of
1746 // adapter frames in C2.
1747 Label caller_not_deoptimized;
1748 __ movptr(c_rarg1, Address(rbp, frame::return_addr_offset * wordSize));
1749 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1750 InterpreterRuntime::interpreter_contains), c_rarg1);
1751 __ testl(rax, rax);
1752 __ jcc(Assembler::notZero, caller_not_deoptimized);
1753
1754 // Compute size of arguments for saving when returning to
1755 // deoptimized caller
1756 __ get_method(rax);
1757 __ movptr(rax, Address(rax, Method::const_offset()));
1758 __ load_unsigned_short(rax, Address(rax, in_bytes(ConstMethod::
1759 size_of_parameters_offset())));
1760 __ shll(rax, Interpreter::logStackElementSize);
1761 __ restore_locals(); // XXX do we need this?
1762 __ subptr(r14, rax);
1763 __ addptr(r14, wordSize);
1764 // Save these arguments
1765 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1766 Deoptimization::
1767 popframe_preserve_args),
1768 r15_thread, rax, r14);
1769
1770 __ remove_activation(vtos, rdx,
1771 /* throw_monitor_exception */ false,
1772 /* install_monitor_exception */ false,
1773 /* notify_jvmdi */ false);
1774
1775 // Inform deoptimization that it is responsible for restoring
1776 // these arguments
1777 __ movl(Address(r15_thread, JavaThread::popframe_condition_offset()),
1778 JavaThread::popframe_force_deopt_reexecution_bit);
|