src/cpu/x86/vm/cppInterpreter_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/x86/vm

src/cpu/x86/vm/cppInterpreter_x86.cpp

Print this page
rev 6068 : 8036146: make CPP interpreter build again
Summary: fix build of CPP interpreter on x86 and sparc
Reviewed-by:


 557 
 558 }
 559 
 560 // Helpers for commoning out cases in the various type of method entries.
 561 //
 562 
 563 // increment invocation count & check for overflow
 564 //
 565 // Note: checking for negative value instead of overflow
 566 //       so we have a 'sticky' overflow test
 567 //
 568 // rbx,: method
 569 // rcx: invocation counter
 570 //
 571 void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile_method, Label* profile_method_continue) {
 572   Label done;
 573   const Address invocation_counter(rax,
 574                 MethodCounters::invocation_counter_offset() +
 575                 InvocationCounter::counter_offset());
 576   const Address backedge_counter  (rax,
 577                 MethodCounter::backedge_counter_offset() +
 578                 InvocationCounter::counter_offset());
 579 
 580   __ get_method_counters(rbx, rax, done);
 581 
 582   if (ProfileInterpreter) {
 583     __ incrementl(Address(rax,
 584             MethodCounters::interpreter_invocation_counter_offset()));
 585   }
 586   // Update standard invocation counters
 587   __ movl(rcx, invocation_counter);
 588   __ increment(rcx, InvocationCounter::count_increment);
 589   __ movl(invocation_counter, rcx);             // save invocation count
 590 
 591   __ movl(rax, backedge_counter);               // load backedge counter
 592   __ andl(rax, InvocationCounter::count_mask_value);  // mask out the status bits
 593 
 594   __ addl(rcx, rax);                            // add both counters
 595 
 596   // profile_method is non-null only for interpreted method so
 597   // profile_method != NULL == !native_call


 965 }
 966 
 967 //
 968 // C++ Interpreter stub for calling a native method.
 969 // This sets up a somewhat different looking stack for calling the native method
 970 // than the typical interpreter frame setup but still has the pointer to
 971 // an interpreter state.
 972 //
 973 
 974 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 975   // determine code generation flags
 976   bool inc_counter  = UseCompiler || CountCompiledCalls;
 977 
 978   // rbx: Method*
 979   // rcx: receiver (unused)
 980   // rsi/r13: previous interpreter state (if called from C++ interpreter) must preserve
 981   //      in any case. If called via c1/c2/call_stub rsi/r13 is junk (to use) but harmless
 982   //      to save/restore.
 983   address entry_point = __ pc();
 984 
 985   const Address constMethod       (rbx, Method::const_offset());
 986   const Address access_flags      (rbx, Method::access_flags_offset());
 987   const Address size_of_parameters(rcx, ConstMethod::size_of_parameters_offset());
 988 
 989   // rsi/r13 == state/locals rdi == prevstate
 990   const Register locals = rdi;
 991 
 992   // get parameter size (always needed)



 993   __ movptr(rcx, constMethod);
 994   __ load_unsigned_short(rcx, size_of_parameters);

 995 
 996   // rbx: Method*
 997   // rcx: size of parameters
 998   __ pop(rax);                                       // get return address
 999   // for natives the size of locals is zero
1000 
1001   // compute beginning of parameters /locals
1002 
1003   __ lea(locals, Address(rsp, rcx, Address::times_ptr, -wordSize));
1004 
1005   // initialize fixed part of activation frame
1006 
1007   // Assumes rax = return address
1008 
1009   // allocate and initialize new interpreterState and method expression stack
1010   // IN(locals) ->  locals
1011   // IN(state) -> previous frame manager state (NULL from stub/c1/c2)
1012   // destroys rax, rcx, rdx
1013   // OUT (state) -> new interpreterState
1014   // OUT(rsp) -> bottom of methods expression stack


1094 #ifdef ASSERT
1095       { Label L;
1096         __ movl(rax, access_flags);
1097         __ testl(rax, JVM_ACC_SYNCHRONIZED);
1098         __ jcc(Assembler::zero, L);
1099         __ stop("method needs synchronization");
1100         __ bind(L);
1101       }
1102 #endif
1103   }
1104 
1105   // start execution
1106 
1107   // jvmti support
1108   __ notify_method_entry();
1109 
1110   // work registers
1111   const Register method = rbx;
1112   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rdi);
1113   const Register t      = InterpreterRuntime::SignatureHandlerGenerator::temp();    // rcx|rscratch1
1114   const Address constMethod       (method, Method::const_offset());
1115   const Address size_of_parameters(t, ConstMethod::size_of_parameters_offset());
1116 
1117   // allocate space for parameters
1118   __ movptr(method, STATE(_method));
1119   __ verify_method_ptr(method);



1120   __ movptr(t, constMethod);
1121   __ load_unsigned_short(t, size_of_parameters);

1122   __ shll(t, 2);
1123 #ifdef _LP64
1124   __ subptr(rsp, t);
1125   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1126   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
1127 #else
1128   __ addptr(t, 2*wordSize);     // allocate two more slots for JNIEnv and possible mirror
1129   __ subptr(rsp, t);
1130   __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
1131 #endif // _LP64
1132 
1133   // get signature handler
1134     Label pending_exception_present;
1135 
1136   { Label L;
1137     __ movptr(t, Address(method, Method::signature_handler_offset()));
1138     __ testptr(t, t);
1139     __ jcc(Assembler::notZero, L);
1140     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method, false);
1141     __ movptr(method, STATE(_method));


2204 
2205   __ jmp(unwind_and_forward);
2206 
2207   interpreter_frame_manager = entry_point;
2208   return entry_point;
2209 }
2210 
2211 address AbstractInterpreterGenerator::generate_method_entry(AbstractInterpreter::MethodKind kind) {
2212   // determine code generation flags
2213   bool synchronized = false;
2214   address entry_point = NULL;
2215 
2216   switch (kind) {
2217     case Interpreter::zerolocals             :                                                                             break;
2218     case Interpreter::zerolocals_synchronized: synchronized = true;                                                        break;
2219     case Interpreter::native                 : entry_point = ((InterpreterGenerator*)this)->generate_native_entry(false);  break;
2220     case Interpreter::native_synchronized    : entry_point = ((InterpreterGenerator*)this)->generate_native_entry(true);   break;
2221     case Interpreter::empty                  : entry_point = ((InterpreterGenerator*)this)->generate_empty_entry();        break;
2222     case Interpreter::accessor               : entry_point = ((InterpreterGenerator*)this)->generate_accessor_entry();     break;
2223     case Interpreter::abstract               : entry_point = ((InterpreterGenerator*)this)->generate_abstract_entry();     break;
2224     case Interpreter::method_handle          : entry_point = ((InterpreterGenerator*)this)->generate_method_handle_entry(); break;
2225 
2226     case Interpreter::java_lang_math_sin     : // fall thru
2227     case Interpreter::java_lang_math_cos     : // fall thru
2228     case Interpreter::java_lang_math_tan     : // fall thru
2229     case Interpreter::java_lang_math_abs     : // fall thru
2230     case Interpreter::java_lang_math_log     : // fall thru
2231     case Interpreter::java_lang_math_log10   : // fall thru
2232     case Interpreter::java_lang_math_sqrt    : entry_point = ((InterpreterGenerator*)this)->generate_math_entry(kind);     break;



2233     case Interpreter::java_lang_ref_reference_get
2234                                              : entry_point = ((InterpreterGenerator*)this)->generate_Reference_get_entry(); break;
2235     default                                  : ShouldNotReachHere();                                                       break;
2236   }
2237 
2238   if (entry_point) return entry_point;
2239 
2240   return ((InterpreterGenerator*)this)->generate_normal_entry(synchronized);
2241 
2242 }
2243 
2244 InterpreterGenerator::InterpreterGenerator(StubQueue* code)
2245  : CppInterpreterGenerator(code) {
2246    generate_all(); // down here so it can be "virtual"
2247 }
2248 
2249 // Deoptimization helpers for C++ interpreter
2250 
2251 // How much stack a method activation needs in words.
2252 int AbstractInterpreter::size_top_interpreter_activation(Method* method) {


2433     intptr_t* stack_base = (intptr_t*) ((intptr_t) monitor_base - monitor_size);
2434     /* +1 because stack is always prepushed */
2435     intptr_t* stack = (intptr_t*) ((intptr_t) stack_base - (tempcount + 1) * BytesPerWord);
2436 
2437 
2438     BytecodeInterpreter::layout_interpreterState(cur_state,
2439                                           caller,
2440                                           interpreter_frame,
2441                                           method,
2442                                           locals,
2443                                           stack,
2444                                           stack_base,
2445                                           monitor_base,
2446                                           frame_bottom,
2447                                           is_top_frame);
2448 
2449     // BytecodeInterpreter::pd_layout_interpreterState(cur_state, interpreter_return_address, interpreter_frame->fp());
2450   }
2451   return frame_size/BytesPerWord;
2452 }


















2453 
2454 #endif // CC_INTERP (all)


 557 
 558 }
 559 
 560 // Helpers for commoning out cases in the various type of method entries.
 561 //
 562 
 563 // increment invocation count & check for overflow
 564 //
 565 // Note: checking for negative value instead of overflow
 566 //       so we have a 'sticky' overflow test
 567 //
 568 // rbx,: method
 569 // rcx: invocation counter
 570 //
 571 void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile_method, Label* profile_method_continue) {
 572   Label done;
 573   const Address invocation_counter(rax,
 574                 MethodCounters::invocation_counter_offset() +
 575                 InvocationCounter::counter_offset());
 576   const Address backedge_counter  (rax,
 577                 MethodCounters::backedge_counter_offset() +
 578                 InvocationCounter::counter_offset());
 579 
 580   __ get_method_counters(rbx, rax, done);
 581 
 582   if (ProfileInterpreter) {
 583     __ incrementl(Address(rax,
 584             MethodCounters::interpreter_invocation_counter_offset()));
 585   }
 586   // Update standard invocation counters
 587   __ movl(rcx, invocation_counter);
 588   __ increment(rcx, InvocationCounter::count_increment);
 589   __ movl(invocation_counter, rcx);             // save invocation count
 590 
 591   __ movl(rax, backedge_counter);               // load backedge counter
 592   __ andl(rax, InvocationCounter::count_mask_value);  // mask out the status bits
 593 
 594   __ addl(rcx, rax);                            // add both counters
 595 
 596   // profile_method is non-null only for interpreted method so
 597   // profile_method != NULL == !native_call


 965 }
 966 
 967 //
 968 // C++ Interpreter stub for calling a native method.
 969 // This sets up a somewhat different looking stack for calling the native method
 970 // than the typical interpreter frame setup but still has the pointer to
 971 // an interpreter state.
 972 //
 973 
 974 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 975   // determine code generation flags
 976   bool inc_counter  = UseCompiler || CountCompiledCalls;
 977 
 978   // rbx: Method*
 979   // rcx: receiver (unused)
 980   // rsi/r13: previous interpreter state (if called from C++ interpreter) must preserve
 981   //      in any case. If called via c1/c2/call_stub rsi/r13 is junk (to use) but harmless
 982   //      to save/restore.
 983   address entry_point = __ pc();
 984 

 985   const Address access_flags      (rbx, Method::access_flags_offset());

 986 
 987   // rsi/r13 == state/locals rdi == prevstate
 988   const Register locals = rdi;
 989 
 990   // get parameter size (always needed)
 991   {
 992     const Address constMethod       (rbx, Method::const_offset());
 993     const Address size_of_parameters(rcx, ConstMethod::size_of_parameters_offset());
 994     __ movptr(rcx, constMethod);
 995     __ load_unsigned_short(rcx, size_of_parameters);
 996   }
 997 
 998   // rbx: Method*
 999   // rcx: size of parameters
1000   __ pop(rax);                                       // get return address
1001   // for natives the size of locals is zero
1002 
1003   // compute beginning of parameters /locals
1004 
1005   __ lea(locals, Address(rsp, rcx, Address::times_ptr, -wordSize));
1006 
1007   // initialize fixed part of activation frame
1008 
1009   // Assumes rax = return address
1010 
1011   // allocate and initialize new interpreterState and method expression stack
1012   // IN(locals) ->  locals
1013   // IN(state) -> previous frame manager state (NULL from stub/c1/c2)
1014   // destroys rax, rcx, rdx
1015   // OUT (state) -> new interpreterState
1016   // OUT(rsp) -> bottom of methods expression stack


1096 #ifdef ASSERT
1097       { Label L;
1098         __ movl(rax, access_flags);
1099         __ testl(rax, JVM_ACC_SYNCHRONIZED);
1100         __ jcc(Assembler::zero, L);
1101         __ stop("method needs synchronization");
1102         __ bind(L);
1103       }
1104 #endif
1105   }
1106 
1107   // start execution
1108 
1109   // jvmti support
1110   __ notify_method_entry();
1111 
1112   // work registers
1113   const Register method = rbx;
1114   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rdi);
1115   const Register t      = InterpreterRuntime::SignatureHandlerGenerator::temp();    // rcx|rscratch1


1116   
1117  // allocate space for parameters
1118   __ movptr(method, STATE(_method));
1119   __ verify_method_ptr(method);
1120   {
1121     const Address constMethod       (method, Method::const_offset());
1122     const Address size_of_parameters(t, ConstMethod::size_of_parameters_offset());
1123     __ movptr(t, constMethod);
1124     __ load_unsigned_short(t, size_of_parameters);
1125   }
1126   __ shll(t, 2);
1127 #ifdef _LP64
1128   __ subptr(rsp, t);
1129   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1130   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
1131 #else
1132   __ addptr(t, 2*wordSize);     // allocate two more slots for JNIEnv and possible mirror
1133   __ subptr(rsp, t);
1134   __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
1135 #endif // _LP64
1136 
1137   // get signature handler
1138     Label pending_exception_present;
1139 
1140   { Label L;
1141     __ movptr(t, Address(method, Method::signature_handler_offset()));
1142     __ testptr(t, t);
1143     __ jcc(Assembler::notZero, L);
1144     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method, false);
1145     __ movptr(method, STATE(_method));


2208 
2209   __ jmp(unwind_and_forward);
2210 
2211   interpreter_frame_manager = entry_point;
2212   return entry_point;
2213 }
2214 
2215 address AbstractInterpreterGenerator::generate_method_entry(AbstractInterpreter::MethodKind kind) {
2216   // determine code generation flags
2217   bool synchronized = false;
2218   address entry_point = NULL;
2219 
2220   switch (kind) {
2221     case Interpreter::zerolocals             :                                                                             break;
2222     case Interpreter::zerolocals_synchronized: synchronized = true;                                                        break;
2223     case Interpreter::native                 : entry_point = ((InterpreterGenerator*)this)->generate_native_entry(false);  break;
2224     case Interpreter::native_synchronized    : entry_point = ((InterpreterGenerator*)this)->generate_native_entry(true);   break;
2225     case Interpreter::empty                  : entry_point = ((InterpreterGenerator*)this)->generate_empty_entry();        break;
2226     case Interpreter::accessor               : entry_point = ((InterpreterGenerator*)this)->generate_accessor_entry();     break;
2227     case Interpreter::abstract               : entry_point = ((InterpreterGenerator*)this)->generate_abstract_entry();     break;

2228 
2229     case Interpreter::java_lang_math_sin     : // fall thru
2230     case Interpreter::java_lang_math_cos     : // fall thru
2231     case Interpreter::java_lang_math_tan     : // fall thru
2232     case Interpreter::java_lang_math_abs     : // fall thru
2233     case Interpreter::java_lang_math_log     : // fall thru
2234     case Interpreter::java_lang_math_log10   : // fall thru
2235     case Interpreter::java_lang_math_sqrt    : // fall thru
2236     case Interpreter::java_lang_math_pow     : // fall thru
2237     case Interpreter::java_lang_math_exp     : // fall thru
2238       entry_point = ((InterpreterGenerator*)this)->generate_math_entry(kind);     break;
2239     case Interpreter::java_lang_ref_reference_get
2240                                              : entry_point = ((InterpreterGenerator*)this)->generate_Reference_get_entry(); break;
2241     default                                  : ShouldNotReachHere();                                                       break;
2242   }
2243 
2244   if (entry_point) return entry_point;
2245 
2246   return ((InterpreterGenerator*)this)->generate_normal_entry(synchronized);
2247 
2248 }
2249 
2250 InterpreterGenerator::InterpreterGenerator(StubQueue* code)
2251  : CppInterpreterGenerator(code) {
2252    generate_all(); // down here so it can be "virtual"
2253 }
2254 
2255 // Deoptimization helpers for C++ interpreter
2256 
2257 // How much stack a method activation needs in words.
2258 int AbstractInterpreter::size_top_interpreter_activation(Method* method) {


2439     intptr_t* stack_base = (intptr_t*) ((intptr_t) monitor_base - monitor_size);
2440     /* +1 because stack is always prepushed */
2441     intptr_t* stack = (intptr_t*) ((intptr_t) stack_base - (tempcount + 1) * BytesPerWord);
2442 
2443 
2444     BytecodeInterpreter::layout_interpreterState(cur_state,
2445                                           caller,
2446                                           interpreter_frame,
2447                                           method,
2448                                           locals,
2449                                           stack,
2450                                           stack_base,
2451                                           monitor_base,
2452                                           frame_bottom,
2453                                           is_top_frame);
2454 
2455     // BytecodeInterpreter::pd_layout_interpreterState(cur_state, interpreter_return_address, interpreter_frame->fp());
2456   }
2457   return frame_size/BytesPerWord;
2458 }
2459 
2460 bool AbstractInterpreter::can_be_compiled(methodHandle m) {
2461   switch (method_kind(m)) {
2462     case Interpreter::java_lang_math_sin     : // fall thru
2463     case Interpreter::java_lang_math_cos     : // fall thru
2464     case Interpreter::java_lang_math_tan     : // fall thru
2465     case Interpreter::java_lang_math_abs     : // fall thru
2466     case Interpreter::java_lang_math_log     : // fall thru
2467     case Interpreter::java_lang_math_log10   : // fall thru
2468     case Interpreter::java_lang_math_sqrt    : // fall thru
2469     case Interpreter::java_lang_math_pow     : // fall thru
2470     case Interpreter::java_lang_math_exp     :
2471       return false;
2472     default:
2473       return true;
2474   }
2475 }
2476 
2477 
2478 #endif // CC_INTERP (all)
src/cpu/x86/vm/cppInterpreter_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File