src/cpu/x86/vm/cppInterpreter_x86.cpp

Print this page




 594   __ jcc(Assembler::aboveEqual, *overflow);
 595 
 596 }
 597 
 598 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
 599 
 600   // C++ interpreter on entry
 601   // rsi/r13 - new interpreter state pointer
 602   // rbp - interpreter frame pointer
 603   // rbx - method
 604 
 605   // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
 606   // rbx, - method
 607   // rcx - rcvr (assuming there is one)
 608   // top of stack return address of interpreter caller
 609   // rsp - sender_sp
 610 
 611   // C++ interpreter only
 612   // rsi/r13 - previous interpreter state pointer
 613 
 614   const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
 615 
 616   // InterpreterRuntime::frequency_counter_overflow takes one argument
 617   // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
 618   // The call returns the address of the verified entry point for the method or NULL
 619   // if the compilation did not complete (either went background or bailed out).
 620   __ movptr(rax, (int32_t)false);
 621   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
 622 
 623   // for c++ interpreter can rsi really be munged?
 624   __ lea(state, Address(rbp, -(int)sizeof(BytecodeInterpreter)));                               // restore state
 625   __ movptr(rbx, Address(state, byte_offset_of(BytecodeInterpreter, _method)));            // restore method
 626   __ movptr(rdi, Address(state, byte_offset_of(BytecodeInterpreter, _locals)));            // get locals pointer
 627 
 628   __ jmp(*do_continue, relocInfo::none);
 629 
 630 }
 631 
 632 void InterpreterGenerator::generate_stack_overflow_check(void) {
 633   // see if we've got enough room on the stack for locals plus overhead.
 634   // the expression stack grows down incrementally, so the normal guard
 635   // page mechanism will work for that.


 960 }
 961 
 962 //
 963 // C++ Interpreter stub for calling a native method.
 964 // This sets up a somewhat different looking stack for calling the native method
 965 // than the typical interpreter frame setup but still has the pointer to
 966 // an interpreter state.
 967 //
 968 
 969 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 970   // determine code generation flags
 971   bool inc_counter  = UseCompiler || CountCompiledCalls;
 972 
 973   // rbx: Method*
 974   // rcx: receiver (unused)
 975   // rsi/r13: previous interpreter state (if called from C++ interpreter) must preserve
 976   //      in any case. If called via c1/c2/call_stub rsi/r13 is junk (to use) but harmless
 977   //      to save/restore.
 978   address entry_point = __ pc();
 979 
 980   const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
 981   const Address size_of_locals    (rbx, Method::size_of_locals_offset());
 982   const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
 983   const Address access_flags      (rbx, Method::access_flags_offset());

 984 
 985   // rsi/r13 == state/locals rdi == prevstate
 986   const Register locals = rdi;
 987 
 988   // get parameter size (always needed)

 989   __ load_unsigned_short(rcx, size_of_parameters);
 990 
 991   // rbx: Method*
 992   // rcx: size of parameters
 993   __ pop(rax);                                       // get return address
 994   // for natives the size of locals is zero
 995 
 996   // compute beginning of parameters /locals

 997   __ lea(locals, Address(rsp, rcx, Address::times_ptr, -wordSize));
 998 
 999   // initialize fixed part of activation frame
1000 
1001   // Assumes rax = return address
1002 
1003   // allocate and initialize new interpreterState and method expression stack
1004   // IN(locals) ->  locals
1005   // IN(state) -> previous frame manager state (NULL from stub/c1/c2)
1006   // destroys rax, rcx, rdx
1007   // OUT (state) -> new interpreterState
1008   // OUT(rsp) -> bottom of methods expression stack
1009 
1010   // save sender_sp
1011   __ mov(rcx, sender_sp_on_entry);
1012   // start with NULL previous state
1013   __ movptr(state, (int32_t)NULL_WORD);
1014   generate_compute_interpreter_state(state, locals, rcx, true);
1015 
1016 #ifdef ASSERT


1090 #ifdef ASSERT
1091       { Label L;
1092         __ movl(rax, access_flags);
1093         __ testl(rax, JVM_ACC_SYNCHRONIZED);
1094         __ jcc(Assembler::zero, L);
1095         __ stop("method needs synchronization");
1096         __ bind(L);
1097       }
1098 #endif
1099   }
1100 
1101   // start execution
1102 
1103   // jvmti support
1104   __ notify_method_entry();
1105 
1106   // work registers
1107   const Register method = rbx;
1108   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rdi);
1109   const Register t      = InterpreterRuntime::SignatureHandlerGenerator::temp();    // rcx|rscratch1


1110 
1111   // allocate space for parameters
1112   __ movptr(method, STATE(_method));
1113   __ verify_method_ptr(method);
1114   __ load_unsigned_short(t, Address(method, Method::size_of_parameters_offset()));

1115   __ shll(t, 2);
1116 #ifdef _LP64
1117   __ subptr(rsp, t);
1118   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1119   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
1120 #else
1121   __ addptr(t, 2*wordSize);     // allocate two more slots for JNIEnv and possible mirror
1122   __ subptr(rsp, t);
1123   __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
1124 #endif // _LP64
1125 
1126   // get signature handler
1127     Label pending_exception_present;
1128 
1129   { Label L;
1130     __ movptr(t, Address(method, Method::signature_handler_offset()));
1131     __ testptr(t, t);
1132     __ jcc(Assembler::notZero, L);
1133     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method, false);
1134     __ movptr(method, STATE(_method));


1683   Label dispatch_entry_2;
1684   __ movptr(rcx, sender_sp_on_entry);
1685   __ movptr(state, (int32_t)NULL_WORD);                              // no current activation
1686 
1687   __ jmp(dispatch_entry_2);
1688 
1689   const Register locals  = rdi;
1690 
1691   Label re_dispatch;
1692 
1693   __ bind(re_dispatch);
1694 
1695   // save sender sp (doesn't include return address
1696   __ lea(rcx, Address(rsp, wordSize));
1697 
1698   __ bind(dispatch_entry_2);
1699 
1700   // save sender sp
1701   __ push(rcx);
1702 
1703   const Address size_of_parameters(rbx, Method::size_of_parameters_offset());
1704   const Address size_of_locals    (rbx, Method::size_of_locals_offset());
1705   const Address access_flags      (rbx, Method::access_flags_offset());


1706 
1707   // const Address monitor_block_top (rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
1708   // const Address monitor_block_bot (rbp, frame::interpreter_frame_initial_sp_offset        * wordSize);
1709   // const Address monitor(rbp, frame::interpreter_frame_initial_sp_offset * wordSize - (int)sizeof(BasicObjectLock));
1710 
1711   // get parameter size (always needed)

1712   __ load_unsigned_short(rcx, size_of_parameters);
1713 
1714   // rbx: Method*
1715   // rcx: size of parameters
1716   __ load_unsigned_short(rdx, size_of_locals);                     // get size of locals in words
1717 
1718   __ subptr(rdx, rcx);                                             // rdx = no. of additional locals
1719 
1720   // see if we've got enough room on the stack for locals plus overhead.
1721   generate_stack_overflow_check();                                 // C++
1722 
1723   // c++ interpreter does not use stack banging or any implicit exceptions
1724   // leave for now to verify that check is proper.
1725   bang_stack_shadow_pages(false);
1726 
1727 
1728 
1729   // compute beginning of parameters (rdi)
1730   __ lea(locals, Address(rsp, rcx, Address::times_ptr, wordSize));
1731 


1972 
1973   // restore stack to what we had when we left (in case i2c extended it)
1974 
1975   __ movptr(rsp, STATE(_stack));
1976   __ lea(rsp, Address(rsp, wordSize));
1977 
1978   // If there is a pending exception then we don't really have a result to process
1979 
1980 #ifdef _LP64
1981   __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1982 #else
1983   __ movptr(rcx, STATE(_thread));                       // get thread
1984   __ cmpptr(Address(rcx, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1985 #endif // _LP64
1986   __ jcc(Assembler::notZero, return_with_exception);
1987 
1988   // get method just executed
1989   __ movptr(rbx, STATE(_result._to_call._callee));
1990 
1991   // callee left args on top of expression stack, remove them
1992   __ load_unsigned_short(rcx, Address(rbx, Method::size_of_parameters_offset()));


1993   __ lea(rsp, Address(rsp, rcx, Address::times_ptr));
1994 
1995   __ movl(rcx, Address(rbx, Method::result_index_offset()));
1996   ExternalAddress tosca_to_stack((address)CppInterpreter::_tosca_to_stack);
1997   // Address index(noreg, rax, Address::times_ptr);
1998   __ movptr(rcx, ArrayAddress(tosca_to_stack, Address(noreg, rcx, Address::times_ptr)));
1999   // __ movl(rcx, Address(noreg, rcx, Address::times_ptr, int(AbstractInterpreter::_tosca_to_stack)));
2000   __ call(rcx);                                               // call result converter
2001   __ jmp(resume_interpreter);
2002 
2003   // An exception is being caught on return to a vanilla interpreter frame.
2004   // Empty the stack and resume interpreter
2005 
2006   __ bind(return_with_exception);
2007 
2008   // Exception present, empty stack
2009   __ movptr(rsp, STATE(_stack_base));
2010   __ jmp(resume_interpreter);
2011 
2012   // Return from interpreted method we return result appropriate to the caller (i.e. "recursive"


2142 
2143 
2144   // Call a new method. All we do is (temporarily) trim the expression stack
2145   // push a return address to bring us back to here and leap to the new entry.
2146 
2147   __ bind(call_method);
2148 
2149   // stack points to next free location and not top element on expression stack
2150   // method expects sp to be pointing to topmost element
2151 
2152   __ movptr(rsp, STATE(_stack));                                     // pop args to c++ interpreter, set sp to java stack top
2153   __ lea(rsp, Address(rsp, wordSize));
2154 
2155   __ movptr(rbx, STATE(_result._to_call._callee));                   // get method to execute
2156 
2157   // don't need a return address if reinvoking interpreter
2158 
2159   // Make it look like call_stub calling conventions
2160 
2161   // Get (potential) receiver
2162   __ load_unsigned_short(rcx, size_of_parameters);                   // get size of parameters in words


2163 
2164   ExternalAddress recursive(CAST_FROM_FN_PTR(address, RecursiveInterpreterActivation));
2165   __ pushptr(recursive.addr());                                      // make it look good in the debugger
2166 
2167   InternalAddress entry(entry_point);
2168   __ cmpptr(STATE(_result._to_call._callee_entry_point), entry.addr()); // returning to interpreter?
2169   __ jcc(Assembler::equal, re_dispatch);                             // yes
2170 
2171   __ pop(rax);                                                       // pop dummy address
2172 
2173 
2174   // get specialized entry
2175   __ movptr(rax, STATE(_result._to_call._callee_entry_point));
2176   // set sender SP
2177   __ mov(sender_sp_on_entry, rsp);
2178 
2179   // method uses specialized entry, push a return so we look like call stub setup
2180   // this path will handle fact that result is returned in registers and not
2181   // on the java stack.
2182 




 594   __ jcc(Assembler::aboveEqual, *overflow);
 595 
 596 }
 597 
 598 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
 599 
 600   // C++ interpreter on entry
 601   // rsi/r13 - new interpreter state pointer
 602   // rbp - interpreter frame pointer
 603   // rbx - method
 604 
 605   // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
 606   // rbx, - method
 607   // rcx - rcvr (assuming there is one)
 608   // top of stack return address of interpreter caller
 609   // rsp - sender_sp
 610 
 611   // C++ interpreter only
 612   // rsi/r13 - previous interpreter state pointer
 613 


 614   // InterpreterRuntime::frequency_counter_overflow takes one argument
 615   // indicating if the counter overflow occurs at a backwards branch (non-NULL bcp).
 616   // The call returns the address of the verified entry point for the method or NULL
 617   // if the compilation did not complete (either went background or bailed out).
 618   __ movptr(rax, (int32_t)false);
 619   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rax);
 620 
 621   // for c++ interpreter can rsi really be munged?
 622   __ lea(state, Address(rbp, -(int)sizeof(BytecodeInterpreter)));                               // restore state
 623   __ movptr(rbx, Address(state, byte_offset_of(BytecodeInterpreter, _method)));            // restore method
 624   __ movptr(rdi, Address(state, byte_offset_of(BytecodeInterpreter, _locals)));            // get locals pointer
 625 
 626   __ jmp(*do_continue, relocInfo::none);
 627 
 628 }
 629 
 630 void InterpreterGenerator::generate_stack_overflow_check(void) {
 631   // see if we've got enough room on the stack for locals plus overhead.
 632   // the expression stack grows down incrementally, so the normal guard
 633   // page mechanism will work for that.


 958 }
 959 
 960 //
 961 // C++ Interpreter stub for calling a native method.
 962 // This sets up a somewhat different looking stack for calling the native method
 963 // than the typical interpreter frame setup but still has the pointer to
 964 // an interpreter state.
 965 //
 966 
 967 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 968   // determine code generation flags
 969   bool inc_counter  = UseCompiler || CountCompiledCalls;
 970 
 971   // rbx: Method*
 972   // rcx: receiver (unused)
 973   // rsi/r13: previous interpreter state (if called from C++ interpreter) must preserve
 974   //      in any case. If called via c1/c2/call_stub rsi/r13 is junk (to use) but harmless
 975   //      to save/restore.
 976   address entry_point = __ pc();
 977 
 978   const Address constMethod       (rbx, Method::const_offset());

 979   const Address invocation_counter(rbx, Method::invocation_counter_offset() + InvocationCounter::counter_offset());
 980   const Address access_flags      (rbx, Method::access_flags_offset());
 981   const Address size_of_parameters(rcx, ConstMethod::size_of_parameters_offset());
 982 
 983   // rsi/r13 == state/locals rdi == prevstate
 984   const Register locals = rdi;
 985 
 986   // get parameter size (always needed)
 987   __ movptr(rcx, constMethod);
 988   __ load_unsigned_short(rcx, size_of_parameters);
 989 
 990   // rbx: Method*
 991   // rcx: size of parameters
 992   __ pop(rax);                                       // get return address
 993   // for natives the size of locals is zero
 994 
 995   // compute beginning of parameters /locals
 996 
 997   __ lea(locals, Address(rsp, rcx, Address::times_ptr, -wordSize));
 998 
 999   // initialize fixed part of activation frame
1000 
1001   // Assumes rax = return address
1002 
1003   // allocate and initialize new interpreterState and method expression stack
1004   // IN(locals) ->  locals
1005   // IN(state) -> previous frame manager state (NULL from stub/c1/c2)
1006   // destroys rax, rcx, rdx
1007   // OUT (state) -> new interpreterState
1008   // OUT(rsp) -> bottom of methods expression stack
1009 
1010   // save sender_sp
1011   __ mov(rcx, sender_sp_on_entry);
1012   // start with NULL previous state
1013   __ movptr(state, (int32_t)NULL_WORD);
1014   generate_compute_interpreter_state(state, locals, rcx, true);
1015 
1016 #ifdef ASSERT


1090 #ifdef ASSERT
1091       { Label L;
1092         __ movl(rax, access_flags);
1093         __ testl(rax, JVM_ACC_SYNCHRONIZED);
1094         __ jcc(Assembler::zero, L);
1095         __ stop("method needs synchronization");
1096         __ bind(L);
1097       }
1098 #endif
1099   }
1100 
1101   // start execution
1102 
1103   // jvmti support
1104   __ notify_method_entry();
1105 
1106   // work registers
1107   const Register method = rbx;
1108   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rdi);
1109   const Register t      = InterpreterRuntime::SignatureHandlerGenerator::temp();    // rcx|rscratch1
1110   const Address constMethod       (method, Method::const_offset());
1111   const Address size_of_parameters(t, ConstMethod::size_of_parameters_offset());
1112 
1113   // allocate space for parameters
1114   __ movptr(method, STATE(_method));
1115   __ verify_method_ptr(method);
1116   __ movptr(t, constMethod);
1117   __ load_unsigned_short(t, size_of_parameters);
1118   __ shll(t, 2);
1119 #ifdef _LP64
1120   __ subptr(rsp, t);
1121   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1122   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
1123 #else
1124   __ addptr(t, 2*wordSize);     // allocate two more slots for JNIEnv and possible mirror
1125   __ subptr(rsp, t);
1126   __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
1127 #endif // _LP64
1128 
1129   // get signature handler
1130     Label pending_exception_present;
1131 
1132   { Label L;
1133     __ movptr(t, Address(method, Method::signature_handler_offset()));
1134     __ testptr(t, t);
1135     __ jcc(Assembler::notZero, L);
1136     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method, false);
1137     __ movptr(method, STATE(_method));


1686   Label dispatch_entry_2;
1687   __ movptr(rcx, sender_sp_on_entry);
1688   __ movptr(state, (int32_t)NULL_WORD);                              // no current activation
1689 
1690   __ jmp(dispatch_entry_2);
1691 
1692   const Register locals  = rdi;
1693 
1694   Label re_dispatch;
1695 
1696   __ bind(re_dispatch);
1697 
1698   // save sender sp (doesn't include return address
1699   __ lea(rcx, Address(rsp, wordSize));
1700 
1701   __ bind(dispatch_entry_2);
1702 
1703   // save sender sp
1704   __ push(rcx);
1705 
1706   const Address constMethod       (rbx, Method::const_offset());

1707   const Address access_flags      (rbx, Method::access_flags_offset());
1708   const Address size_of_parameters(rdx, ConstMethod::size_of_parameters_offset());
1709   const Address size_of_locals    (rdx, ConstMethod::size_of_locals_offset());
1710 
1711   // const Address monitor_block_top (rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
1712   // const Address monitor_block_bot (rbp, frame::interpreter_frame_initial_sp_offset        * wordSize);
1713   // const Address monitor(rbp, frame::interpreter_frame_initial_sp_offset * wordSize - (int)sizeof(BasicObjectLock));
1714 
1715   // get parameter size (always needed)
1716   __ movptr(rdx, constMethod);
1717   __ load_unsigned_short(rcx, size_of_parameters);
1718 
1719   // rbx: Method*
1720   // rcx: size of parameters
1721   __ load_unsigned_short(rdx, size_of_locals);                     // get size of locals in words
1722 
1723   __ subptr(rdx, rcx);                                             // rdx = no. of additional locals
1724 
1725   // see if we've got enough room on the stack for locals plus overhead.
1726   generate_stack_overflow_check();                                 // C++
1727 
1728   // c++ interpreter does not use stack banging or any implicit exceptions
1729   // leave for now to verify that check is proper.
1730   bang_stack_shadow_pages(false);
1731 
1732 
1733 
1734   // compute beginning of parameters (rdi)
1735   __ lea(locals, Address(rsp, rcx, Address::times_ptr, wordSize));
1736 


1977 
1978   // restore stack to what we had when we left (in case i2c extended it)
1979 
1980   __ movptr(rsp, STATE(_stack));
1981   __ lea(rsp, Address(rsp, wordSize));
1982 
1983   // If there is a pending exception then we don't really have a result to process
1984 
1985 #ifdef _LP64
1986   __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1987 #else
1988   __ movptr(rcx, STATE(_thread));                       // get thread
1989   __ cmpptr(Address(rcx, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
1990 #endif // _LP64
1991   __ jcc(Assembler::notZero, return_with_exception);
1992 
1993   // get method just executed
1994   __ movptr(rbx, STATE(_result._to_call._callee));
1995 
1996   // callee left args on top of expression stack, remove them
1997   __ movptr(rcx, constMethod);
1998   __ load_unsigned_short(rcx, Address(rcx, ConstMethod::size_of_parameters_offset()));
1999 
2000   __ lea(rsp, Address(rsp, rcx, Address::times_ptr));
2001 
2002   __ movl(rcx, Address(rbx, Method::result_index_offset()));
2003   ExternalAddress tosca_to_stack((address)CppInterpreter::_tosca_to_stack);
2004   // Address index(noreg, rax, Address::times_ptr);
2005   __ movptr(rcx, ArrayAddress(tosca_to_stack, Address(noreg, rcx, Address::times_ptr)));
2006   // __ movl(rcx, Address(noreg, rcx, Address::times_ptr, int(AbstractInterpreter::_tosca_to_stack)));
2007   __ call(rcx);                                               // call result converter
2008   __ jmp(resume_interpreter);
2009 
2010   // An exception is being caught on return to a vanilla interpreter frame.
2011   // Empty the stack and resume interpreter
2012 
2013   __ bind(return_with_exception);
2014 
2015   // Exception present, empty stack
2016   __ movptr(rsp, STATE(_stack_base));
2017   __ jmp(resume_interpreter);
2018 
2019   // Return from interpreted method we return result appropriate to the caller (i.e. "recursive"


2149 
2150 
2151   // Call a new method. All we do is (temporarily) trim the expression stack
2152   // push a return address to bring us back to here and leap to the new entry.
2153 
2154   __ bind(call_method);
2155 
2156   // stack points to next free location and not top element on expression stack
2157   // method expects sp to be pointing to topmost element
2158 
2159   __ movptr(rsp, STATE(_stack));                                     // pop args to c++ interpreter, set sp to java stack top
2160   __ lea(rsp, Address(rsp, wordSize));
2161 
2162   __ movptr(rbx, STATE(_result._to_call._callee));                   // get method to execute
2163 
2164   // don't need a return address if reinvoking interpreter
2165 
2166   // Make it look like call_stub calling conventions
2167 
2168   // Get (potential) receiver
2169   // get size of parameters in words
2170   __ movptr(rcx, constMethod);
2171   __ load_unsigned_short(rcx, Address(rcx, ConstMethod::size_of_parameters_offset()));
2172 
2173   ExternalAddress recursive(CAST_FROM_FN_PTR(address, RecursiveInterpreterActivation));
2174   __ pushptr(recursive.addr());                                      // make it look good in the debugger
2175 
2176   InternalAddress entry(entry_point);
2177   __ cmpptr(STATE(_result._to_call._callee_entry_point), entry.addr()); // returning to interpreter?
2178   __ jcc(Assembler::equal, re_dispatch);                             // yes
2179 
2180   __ pop(rax);                                                       // pop dummy address
2181 
2182 
2183   // get specialized entry
2184   __ movptr(rax, STATE(_result._to_call._callee_entry_point));
2185   // set sender SP
2186   __ mov(sender_sp_on_entry, rsp);
2187 
2188   // method uses specialized entry, push a return so we look like call stub setup
2189   // this path will handle fact that result is returned in registers and not
2190   // on the java stack.
2191