src/cpu/x86/vm/cppInterpreter_x86.cpp

Print this page




 594   __ jcc(Assembler::aboveEqual, *overflow);
 595 
 596 }
 597 
 598 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
 599 
 600   // C++ interpreter on entry
 601   // rsi/r13 - new interpreter state pointer
 602   // rbp - interpreter frame pointer
 603   // rbx - method
 604 
 605   // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
 606   // rbx, - method
 607   // rcx - rcvr (assuming there is one)
 608   // top of stack return address of interpreter caller
 609   // rsp - sender_sp
 610 
 611   // C++ interpreter only
 612   // rsi/r13 - previous interpreter state pointer
 613 
 614   const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
 615 
 616   // InterpreterRuntime::frequency_counter_overflow takes one argument
 617   // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
 618   // The call returns the address of the verified entry point for the method or NULL
 619   // if the compilation did not complete (either went background or bailed out).
 620   __ movptr(rax, (int32_t)false);
 621   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
 622 
 623   // for c++ interpreter can rsi really be munged?
 624   __ lea(state, Address(rbp, -(int)sizeof(BytecodeInterpreter)));                               // restore state
 625   __ movptr(rbx, Address(state, byte_offset_of(BytecodeInterpreter, _method)));            // restore method
 626   __ movptr(rdi, Address(state, byte_offset_of(BytecodeInterpreter, _locals)));            // get locals pointer
 627 
 628   __ jmp(*do_continue, relocInfo::none);
 629 
 630 }
 631 
 632 void InterpreterGenerator::generate_stack_overflow_check(void) {
 633   // see if we've got enough room on the stack for locals plus overhead.
 634   // the expression stack grows down incrementally, so the normal guard
 635   // page mechanism will work for that.


 960 }
 961 
 962 //
 963 // C++ Interpreter stub for calling a native method.
 964 // This sets up a somewhat different looking stack for calling the native method
 965 // than the typical interpreter frame setup but still has the pointer to
 966 // an interpreter state.
 967 //
 968 
 969 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 970   // determine code generation flags
 971   bool inc_counter  = UseCompiler || CountCompiledCalls;
 972 
 973   // rbx: Method*
 974   // rcx: receiver (unused)
 975   // rsi/r13: previous interpreter state (if called from C++ interpreter) must preserve
 976   //      in any case. If called via c1/c2/call_stub rsi/r13 is junk (to use) but harmless
 977   //      to save/restore.
 978   address entry_point = __ pc();
 979 
 980   const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
 981   const Address size_of_locals    (rbx, Method::size_of_locals_offset());
 982   const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
 983   const Address access_flags      (rbx, Method::access_flags_offset());
 984 
 985   // rsi/r13 == state/locals rdi == prevstate
 986   const Register locals = rdi;
 987 
 988   // get parameter size (always needed)
 989   __ load_unsigned_short(rcx, size_of_parameters);

 990 
 991   // rbx: Method*
 992   // rcx: size of parameters
 993   __ pop(rax);                                       // get return address
 994   // for natives the size of locals is zero
 995 
 996   // compute beginning of parameters /locals
 997   __ lea(locals, Address(rsp, rcx, Address::times_ptr, -wordSize));
 998 
 999   // initialize fixed part of activation frame
1000 
1001   // Assumes rax = return address
1002 
1003   // allocate and initialize new interpreterState and method expression stack
1004   // IN(locals) ->  locals
1005   // IN(state) -> previous frame manager state (NULL from stub/c1/c2)
1006   // destroys rax, rcx, rdx
1007   // OUT (state) -> new interpreterState
1008   // OUT(rsp) -> bottom of methods expression stack
1009 


1094         __ jcc(Assembler::zero, L);
1095         __ stop("method needs synchronization");
1096         __ bind(L);
1097       }
1098 #endif
1099   }
1100 
1101   // start execution
1102 
1103   // jvmti support
1104   __ notify_method_entry();
1105 
1106   // work registers
1107   const Register method = rbx;
1108   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rdi);
1109   const Register t      = InterpreterRuntime::SignatureHandlerGenerator::temp();    // rcx|rscratch1
1110 
1111   // allocate space for parameters
1112   __ movptr(method, STATE(_method));
1113   __ verify_method_ptr(method);
1114   __ load_unsigned_short(t, Address(method, Method::size_of_parameters_offset()));

1115   __ shll(t, 2);
1116 #ifdef _LP64
1117   __ subptr(rsp, t);
1118   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1119   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
1120 #else
1121   __ addptr(t, 2*wordSize);     // allocate two more slots for JNIEnv and possible mirror
1122   __ subptr(rsp, t);
1123   __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
1124 #endif // _LP64
1125 
1126   // get signature handler
1127     Label pending_exception_present;
1128 
1129   { Label L;
1130     __ movptr(t, Address(method, Method::signature_handler_offset()));
1131     __ testptr(t, t);
1132     __ jcc(Assembler::notZero, L);
1133     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method, false);
1134     __ movptr(method, STATE(_method));


1683   Label dispatch_entry_2;
1684   __ movptr(rcx, sender_sp_on_entry);
1685   __ movptr(state, (int32_t)NULL_WORD);                              // no current activation
1686 
1687   __ jmp(dispatch_entry_2);
1688 
1689   const Register locals  = rdi;
1690 
1691   Label re_dispatch;
1692 
1693   __ bind(re_dispatch);
1694 
1695   // save sender sp (doesn't include return address
1696   __ lea(rcx, Address(rsp, wordSize));
1697 
1698   __ bind(dispatch_entry_2);
1699 
1700   // save sender sp
1701   __ push(rcx);
1702 
1703   const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
1704   const Address size_of_locals    (rbx, Method::size_of_locals_offset());
1705   const Address access_flags      (rbx, Method::access_flags_offset());
1706 
1707   // const Address monitor_block_top (rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
1708   // const Address monitor_block_bot (rbp, frame::interpreter_frame_initial_sp_offset        * wordSize);
1709   // const Address monitor(rbp, frame::interpreter_frame_initial_sp_offset * wordSize - (int)sizeof(BasicObjectLock));
1710 
1711   // get parameter size (always needed)
1712   __ load_unsigned_short(rcx, size_of_parameters);

1713 
1714   // rbx: Method*
1715   // rcx: size of parameters
1716   __ load_unsigned_short(rdx, size_of_locals);                     // get size of locals in words

1717 
1718   __ subptr(rdx, rcx);                                             // rdx = no. of additional locals
1719 
1720   // see if we've got enough room on the stack for locals plus overhead.
1721   generate_stack_overflow_check();                                 // C++
1722 
1723   // c++ interpreter does not use stack banging or any implicit exceptions
1724   // leave for now to verify that check is proper.
1725   bang_stack_shadow_pages(false);
1726 
1727 
1728 
1729   // compute beginning of parameters (rdi)
1730   __ lea(locals, Address(rsp, rcx, Address::times_ptr, wordSize));
1731 
1732   // save sender's sp
1733   // __ movl(rcx, rsp);
1734 
1735   // get sender's sp
1736   __ pop(rcx);


1972 
1973   // restore stack to what we had when we left (in case i2c extended it)
1974 
1975   __ movptr(rsp, STATE(_stack));
1976   __ lea(rsp, Address(rsp, wordSize));
1977 
1978   // If there is a pending exception then we don't really have a result to process
1979 
1980 #ifdef _LP64
1981   __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1982 #else
1983   __ movptr(rcx, STATE(_thread));                       // get thread
1984   __ cmpptr(Address(rcx, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1985 #endif // _LP64
1986   __ jcc(Assembler::notZero, return_with_exception);
1987 
1988   // get method just executed
1989   __ movptr(rbx, STATE(_result._to_call._callee));
1990 
1991   // callee left args on top of expression stack, remove them
1992   __ load_unsigned_short(rcx, Address(rbx, Method::size_of_parameters_offset()));


1993   __ lea(rsp, Address(rsp, rcx, Address::times_ptr));
1994 
1995   __ movl(rcx, Address(rbx, Method::result_index_offset()));
1996   ExternalAddress tosca_to_stack((address)CppInterpreter::_tosca_to_stack);
1997   // Address index(noreg, rax, Address::times_ptr);
1998   __ movptr(rcx, ArrayAddress(tosca_to_stack, Address(noreg, rcx, Address::times_ptr)));
1999   // __ movl(rcx, Address(noreg, rcx, Address::times_ptr, int(AbstractInterpreter::_tosca_to_stack)));
2000   __ call(rcx);                                               // call result converter
2001   __ jmp(resume_interpreter);
2002 
2003   // An exception is being caught on return to a vanilla interpreter frame.
2004   // Empty the stack and resume interpreter
2005 
2006   __ bind(return_with_exception);
2007 
2008   // Exception present, empty stack
2009   __ movptr(rsp, STATE(_stack_base));
2010   __ jmp(resume_interpreter);
2011 
2012   // Return from interpreted method we return result appropriate to the caller (i.e. "recursive"


2142 
2143 
2144   // Call a new method. All we do is (temporarily) trim the expression stack
2145   // push a return address to bring us back to here and leap to the new entry.
2146 
2147   __ bind(call_method);
2148 
2149   // stack points to next free location and not top element on expression stack
2150   // method expects sp to be pointing to topmost element
2151 
2152   __ movptr(rsp, STATE(_stack));                                     // pop args to c++ interpreter, set sp to java stack top
2153   __ lea(rsp, Address(rsp, wordSize));
2154 
2155   __ movptr(rbx, STATE(_result._to_call._callee));                   // get method to execute
2156 
2157   // don't need a return address if reinvoking interpreter
2158 
2159   // Make it look like call_stub calling conventions
2160 
2161   // Get (potential) receiver
2162   __ load_unsigned_short(rcx, size_of_parameters);                   // get size of parameters in words


2163 
2164   ExternalAddress recursive(CAST_FROM_FN_PTR(address, RecursiveInterpreterActivation));
2165   __ pushptr(recursive.addr());                                      // make it look good in the debugger
2166 
2167   InternalAddress entry(entry_point);
2168   __ cmpptr(STATE(_result._to_call._callee_entry_point), entry.addr()); // returning to interpreter?
2169   __ jcc(Assembler::equal, re_dispatch);                             // yes
2170 
2171   __ pop(rax);                                                       // pop dummy address
2172 
2173 
2174   // get specialized entry
2175   __ movptr(rax, STATE(_result._to_call._callee_entry_point));
2176   // set sender SP
2177   __ mov(sender_sp_on_entry, rsp);
2178 
2179   // method uses specialized entry, push a return so we look like call stub setup
2180   // this path will handle fact that result is returned in registers and not
2181   // on the java stack.
2182 




 594   __ jcc(Assembler::aboveEqual, *overflow);
 595 
 596 }
 597 
 598 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
 599 
 600   // C++ interpreter on entry
 601   // rsi/r13 - new interpreter state pointer
 602   // rbp - interpreter frame pointer
 603   // rbx - method
 604 
 605   // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
 606   // rbx, - method
 607   // rcx - rcvr (assuming there is one)
 608   // top of stack return address of interpreter caller
 609   // rsp - sender_sp
 610 
 611   // C++ interpreter only
 612   // rsi/r13 - previous interpreter state pointer
 613 


 614   // InterpreterRuntime::frequency_counter_overflow takes one argument
 615   // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
 616   // The call returns the address of the verified entry point for the method or NULL
 617   // if the compilation did not complete (either went background or bailed out).
 618   __ movptr(rax, (int32_t)false);
 619   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
 620 
 621   // for c++ interpreter can rsi really be munged?
 622   __ lea(state, Address(rbp, -(int)sizeof(BytecodeInterpreter)));                               // restore state
 623   __ movptr(rbx, Address(state, byte_offset_of(BytecodeInterpreter, _method)));            // restore method
 624   __ movptr(rdi, Address(state, byte_offset_of(BytecodeInterpreter, _locals)));            // get locals pointer
 625 
 626   __ jmp(*do_continue, relocInfo::none);
 627 
 628 }
 629 
 630 void InterpreterGenerator::generate_stack_overflow_check(void) {
 631   // see if we've got enough room on the stack for locals plus overhead.
 632   // the expression stack grows down incrementally, so the normal guard
 633   // page mechanism will work for that.


 958 }
 959 
 960 //
 961 // C++ Interpreter stub for calling a native method.
 962 // This sets up a somewhat different looking stack for calling the native method
 963 // than the typical interpreter frame setup but still has the pointer to
 964 // an interpreter state.
 965 //
 966 
 967 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 968   // determine code generation flags
 969   bool inc_counter  = UseCompiler || CountCompiledCalls;
 970 
 971   // rbx: Method*
 972   // rcx: receiver (unused)
 973   // rsi/r13: previous interpreter state (if called from C++ interpreter) must preserve
 974   //      in any case. If called via c1/c2/call_stub rsi/r13 is junk (to use) but harmless
 975   //      to save/restore.
 976   address entry_point = __ pc();
 977 
 978   const Address constMethod       (rbx, Method::const_offset());

 979   const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
 980   const Address access_flags      (rbx, Method::access_flags_offset());
 981 
 982   // rsi/r13 == state/locals rdi == prevstate
 983   const Register locals = rdi;
 984 
 985   // get parameter size (always needed)
 986   __ movptr(rcx, constMethod);
 987   __ load_unsigned_short(rcx, Address(rcx, ConstMethod::size_of_parameters_offset()));
 988 
 989   // rbx: Method*
 990   // rcx: size of parameters
 991   __ pop(rax);                                       // get return address
 992   // for natives the size of locals is zero
 993 
 994   // compute beginning of parameters /locals
 995   __ lea(locals, Address(rsp, rcx, Address::times_ptr, -wordSize));
 996 
 997   // initialize fixed part of activation frame
 998 
 999   // Assumes rax = return address
1000 
1001   // allocate and initialize new interpreterState and method expression stack
1002   // IN(locals) ->  locals
1003   // IN(state) -> previous frame manager state (NULL from stub/c1/c2)
1004   // destroys rax, rcx, rdx
1005   // OUT (state) -> new interpreterState
1006   // OUT(rsp) -> bottom of methods expression stack
1007 


1092         __ jcc(Assembler::zero, L);
1093         __ stop("method needs synchronization");
1094         __ bind(L);
1095       }
1096 #endif
1097   }
1098 
1099   // start execution
1100 
1101   // jvmti support
1102   __ notify_method_entry();
1103 
1104   // work registers
1105   const Register method = rbx;
1106   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rdi);
1107   const Register t      = InterpreterRuntime::SignatureHandlerGenerator::temp();    // rcx|rscratch1
1108 
1109   // allocate space for parameters
1110   __ movptr(method, STATE(_method));
1111   __ verify_method_ptr(method);
1112   __ movptr(t, Address(method, Method::const_offset()));
1113   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1114   __ shll(t, 2);
1115 #ifdef _LP64
1116   __ subptr(rsp, t);
1117   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1118   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
1119 #else
1120   __ addptr(t, 2*wordSize);     // allocate two more slots for JNIEnv and possible mirror
1121   __ subptr(rsp, t);
1122   __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
1123 #endif // _LP64
1124 
1125   // get signature handler
1126     Label pending_exception_present;
1127 
1128   { Label L;
1129     __ movptr(t, Address(method, Method::signature_handler_offset()));
1130     __ testptr(t, t);
1131     __ jcc(Assembler::notZero, L);
1132     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method, false);
1133     __ movptr(method, STATE(_method));


1682   Label dispatch_entry_2;
1683   __ movptr(rcx, sender_sp_on_entry);
1684   __ movptr(state, (int32_t)NULL_WORD);                              // no current activation
1685 
1686   __ jmp(dispatch_entry_2);
1687 
1688   const Register locals  = rdi;
1689 
1690   Label re_dispatch;
1691 
1692   __ bind(re_dispatch);
1693 
1694   // save sender sp (doesn't include return address
1695   __ lea(rcx, Address(rsp, wordSize));
1696 
1697   __ bind(dispatch_entry_2);
1698 
1699   // save sender sp
1700   __ push(rcx);
1701 
1702   const Address constMethod       (rbx, Method::const_offset());

1703   const Address access_flags      (rbx, Method::access_flags_offset());
1704 
1705   // const Address monitor_block_top (rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
1706   // const Address monitor_block_bot (rbp, frame::interpreter_frame_initial_sp_offset        * wordSize);
1707   // const Address monitor(rbp, frame::interpreter_frame_initial_sp_offset * wordSize - (int)sizeof(BasicObjectLock));
1708 
1709   // get parameter size (always needed)
1710   __ movptr(rdx, constMethod);
1711   __ load_unsigned_short(rcx, Address(rdx, ConstMethod::size_of_parameters_offset()));
1712 
1713   // rbx: Method*
1714   // rcx: size of parameters
1715   // get size of locals in words
1716   __ load_unsigned_short(rdx, Address(rdx, ConstMethod::size_of_locals_offset()));
1717 
1718   __ subptr(rdx, rcx);                                             // rdx = no. of additional locals
1719 
1720   // see if we've got enough room on the stack for locals plus overhead.
1721   generate_stack_overflow_check();                                 // C++
1722 
1723   // c++ interpreter does not use stack banging or any implicit exceptions
1724   // leave for now to verify that check is proper.
1725   bang_stack_shadow_pages(false);
1726 
1727 
1728 
1729   // compute beginning of parameters (rdi)
1730   __ lea(locals, Address(rsp, rcx, Address::times_ptr, wordSize));
1731 
1732   // save sender's sp
1733   // __ movl(rcx, rsp);
1734 
1735   // get sender's sp
1736   __ pop(rcx);


1972 
1973   // restore stack to what we had when we left (in case i2c extended it)
1974 
1975   __ movptr(rsp, STATE(_stack));
1976   __ lea(rsp, Address(rsp, wordSize));
1977 
1978   // If there is a pending exception then we don't really have a result to process
1979 
1980 #ifdef _LP64
1981   __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1982 #else
1983   __ movptr(rcx, STATE(_thread));                       // get thread
1984   __ cmpptr(Address(rcx, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1985 #endif // _LP64
1986   __ jcc(Assembler::notZero, return_with_exception);
1987 
1988   // get method just executed
1989   __ movptr(rbx, STATE(_result._to_call._callee));
1990 
1991   // callee left args on top of expression stack, remove them
1992   __ movptr(rcx, Address(rbx, Method::const_offset()));
1993   __ load_unsigned_short(rcx, Address(rcx, ConstMethod::size_of_parameters_offset()));
1994 
1995   __ lea(rsp, Address(rsp, rcx, Address::times_ptr));
1996 
1997   __ movl(rcx, Address(rbx, Method::result_index_offset()));
1998   ExternalAddress tosca_to_stack((address)CppInterpreter::_tosca_to_stack);
1999   // Address index(noreg, rax, Address::times_ptr);
2000   __ movptr(rcx, ArrayAddress(tosca_to_stack, Address(noreg, rcx, Address::times_ptr)));
2001   // __ movl(rcx, Address(noreg, rcx, Address::times_ptr, int(AbstractInterpreter::_tosca_to_stack)));
2002   __ call(rcx);                                               // call result converter
2003   __ jmp(resume_interpreter);
2004 
2005   // An exception is being caught on return to a vanilla interpreter frame.
2006   // Empty the stack and resume interpreter
2007 
2008   __ bind(return_with_exception);
2009 
2010   // Exception present, empty stack
2011   __ movptr(rsp, STATE(_stack_base));
2012   __ jmp(resume_interpreter);
2013 
2014   // Return from interpreted method we return result appropriate to the caller (i.e. "recursive"


2144 
2145 
2146   // Call a new method. All we do is (temporarily) trim the expression stack
2147   // push a return address to bring us back to here and leap to the new entry.
2148 
2149   __ bind(call_method);
2150 
2151   // stack points to next free location and not top element on expression stack
2152   // method expects sp to be pointing to topmost element
2153 
2154   __ movptr(rsp, STATE(_stack));                                     // pop args to c++ interpreter, set sp to java stack top
2155   __ lea(rsp, Address(rsp, wordSize));
2156 
2157   __ movptr(rbx, STATE(_result._to_call._callee));                   // get method to execute
2158 
2159   // don't need a return address if reinvoking interpreter
2160 
2161   // Make it look like call_stub calling conventions
2162 
2163   // Get (potential) receiver
2164   // get size of parameters in words
2165   __ movptr(rcx, constMethod);
2166   __ load_unsigned_short(rcx, Address(rcx, ConstMethod::size_of_parameters_offset()));
2167 
2168   ExternalAddress recursive(CAST_FROM_FN_PTR(address, RecursiveInterpreterActivation));
2169   __ pushptr(recursive.addr());                                      // make it look good in the debugger
2170 
2171   InternalAddress entry(entry_point);
2172   __ cmpptr(STATE(_result._to_call._callee_entry_point), entry.addr()); // returning to interpreter?
2173   __ jcc(Assembler::equal, re_dispatch);                             // yes
2174 
2175   __ pop(rax);                                                       // pop dummy address
2176 
2177 
2178   // get specialized entry
2179   __ movptr(rax, STATE(_result._to_call._callee_entry_point));
2180   // set sender SP
2181   __ mov(sender_sp_on_entry, rsp);
2182 
2183   // method uses specialized entry, push a return so we look like call stub setup
2184   // this path will handle fact that result is returned in registers and not
2185   // on the java stack.
2186