src/cpu/x86/vm/templateInterpreter_x86_64.cpp

Print this page




 352     }
 353 
 354     __ cmp32(rcx, ExternalAddress((address)&InvocationCounter::InterpreterInvocationLimit));
 355     __ jcc(Assembler::aboveEqual, *overflow);
 356   }
 357 }
 358 
 359 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
 360 
 361   // Asm interpreter on entry
 362   // r14 - locals
 363   // r13 - bcp
 364   // rbx - method
 365   // edx - cpool --- DOES NOT APPEAR TO BE TRUE
 366   // rbp - interpreter frame
 367 
 368   // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
 369   // Everything as it was on entry
 370   // rdx is not restored. Doesn't appear to really be set.
 371 
 372   const Address size_of_parameters(rbx,
 373                                    Method::size_of_parameters_offset());
 374 
 375   // InterpreterRuntime::frequency_counter_overflow takes two
 376   // arguments, the first (thread) is passed by call_VM, the second
 377   // indicates if the counter overflow occurs at a backwards branch
 378   // (NULL bcp).  We pass zero for it.  The call returns the address
 379   // of the verified entry point for the method or NULL if the
 380   // compilation did not complete (either went background or bailed
 381   // out).
 382   __ movl(c_rarg1, 0);
 383   __ call_VM(noreg,
 384              CAST_FROM_FN_PTR(address,
 385                               InterpreterRuntime::frequency_counter_overflow),
 386              c_rarg1);
 387 
 388   __ movptr(rbx, Address(rbp, method_offset));   // restore Method*
 389   // Preserve invariant that r13/r14 contain bcp/locals of sender frame
 390   // and jump to the interpreted entry.
 391   __ jmp(*do_continue, relocInfo::none);
 392 }
 393 
 394 // See if we've got enough room on the stack for locals plus overhead.


 827 #endif // SERIALGC
 828 
 829   // If G1 is not enabled then attempt to go through the accessor entry point
 830   // Reference.get is an accessor
 831   return generate_accessor_entry();
 832 }
 833 
 834 
 835 // Interpreter stub for calling a native method. (asm interpreter)
 836 // This sets up a somewhat different looking stack for calling the
 837 // native method than the typical interpreter frame setup.
 838 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 839   // determine code generation flags
 840   bool inc_counter  = UseCompiler || CountCompiledCalls;
 841 
 842   // rbx: Method*
 843   // r13: sender sp
 844 
 845   address entry_point = __ pc();
 846 
 847   const Address size_of_parameters(rbx, Method::
 848                                         size_of_parameters_offset());
 849   const Address invocation_counter(rbx, Method::
 850                                         invocation_counter_offset() +
 851                                         InvocationCounter::counter_offset());
 852   const Address access_flags      (rbx, Method::access_flags_offset());
 853 
 854   // get parameter size (always needed)
 855   __ load_unsigned_short(rcx, size_of_parameters);

 856 
 857   // native calls don't need the stack size check since they have no
 858   // expression stack and the arguments are already on the stack and
 859   // we only add a handful of words to the stack
 860 
 861   // rbx: Method*
 862   // rcx: size of parameters
 863   // r13: sender sp
 864   __ pop(rax);                                       // get return address
 865 
 866   // for natives the size of locals is zero
 867 
 868   // compute beginning of parameters (r14)
 869   __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize));
 870 
 871   // add 2 zero-initialized slots for native calls
 872   // initialize result_handler slot
 873   __ push((int) NULL_WORD);
 874   // slot for oop temp
 875   // (static native method holder mirror/jni oop result)


 950     Label L;
 951     const Address monitor_block_top(rbp,
 952                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
 953     __ movptr(rax, monitor_block_top);
 954     __ cmpptr(rax, rsp);
 955     __ jcc(Assembler::equal, L);
 956     __ stop("broken stack frame setup in interpreter");
 957     __ bind(L);
 958   }
 959 #endif
 960 
 961   // jvmti support
 962   __ notify_method_entry();
 963 
 964   // work registers
 965   const Register method = rbx;
 966   const Register t      = r11;
 967 
 968   // allocate space for parameters
 969   __ get_method(method);

 970   __ load_unsigned_short(t,
 971                          Address(method,
 972                                  Method::size_of_parameters_offset()));
 973   __ shll(t, Interpreter::logStackElementSize);
 974 
 975   __ subptr(rsp, t);
 976   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
 977   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
 978 
 979   // get signature handler
 980   {
 981     Label L;
 982     __ movptr(t, Address(method, Method::signature_handler_offset()));
 983     __ testptr(t, t);
 984     __ jcc(Assembler::notZero, L);
 985     __ call_VM(noreg,
 986                CAST_FROM_FN_PTR(address,
 987                                 InterpreterRuntime::prepare_native_call),
 988                method);
 989     __ get_method(method);
 990     __ movptr(t, Address(method, Method::signature_handler_offset()));
 991     __ bind(L);
 992   }


1285   if (inc_counter) {
1286     // Handle overflow of counter and compile method
1287     __ bind(invocation_counter_overflow);
1288     generate_counter_overflow(&continue_after_compile);
1289   }
1290 
1291   return entry_point;
1292 }
1293 
1294 //
1295 // Generic interpreted method entry to (asm) interpreter
1296 //
1297 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1298   // determine code generation flags
1299   bool inc_counter  = UseCompiler || CountCompiledCalls;
1300 
1301   // ebx: Method*
1302   // r13: sender sp
1303   address entry_point = __ pc();
1304 
1305   const Address size_of_parameters(rbx,
1306                                    Method::size_of_parameters_offset());
1307   const Address size_of_locals(rbx, Method::size_of_locals_offset());
1308   const Address invocation_counter(rbx,
1309                                    Method::invocation_counter_offset() +
1310                                    InvocationCounter::counter_offset());
1311   const Address access_flags(rbx, Method::access_flags_offset());
1312 
1313   // get parameter size (always needed)
1314   __ load_unsigned_short(rcx, size_of_parameters);

1315 
1316   // rbx: Method*
1317   // rcx: size of parameters
1318   // r13: sender_sp (could differ from sp+wordSize if we were called via c2i )
1319 
1320   __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words


1321   __ subl(rdx, rcx); // rdx = no. of additional locals
1322 
1323   // YYY
1324 //   __ incrementl(rdx);
1325 //   __ andl(rdx, -2);
1326 
1327   // see if we've got enough room on the stack for locals plus overhead.
1328   generate_stack_overflow_check();
1329 
1330   // get return address
1331   __ pop(rax);
1332 
1333   // compute beginning of parameters (r14)
1334   __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize));
1335 
1336   // rdx - # of additional locals
1337   // allocate space for locals
1338   // explicitly initialize locals
1339   {
1340     Label exit, loop;


1735     // Check to see whether we are returning to a deoptimized frame.
1736     // (The PopFrame call ensures that the caller of the popped frame is
1737     // either interpreted or compiled and deoptimizes it if compiled.)
1738     // In this case, we can't call dispatch_next() after the frame is
1739     // popped, but instead must save the incoming arguments and restore
1740     // them after deoptimization has occurred.
1741     //
1742     // Note that we don't compare the return PC against the
1743     // deoptimization blob's unpack entry because of the presence of
1744     // adapter frames in C2.
1745     Label caller_not_deoptimized;
1746     __ movptr(c_rarg1, Address(rbp, frame::return_addr_offset * wordSize));
1747     __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1748                                InterpreterRuntime::interpreter_contains), c_rarg1);
1749     __ testl(rax, rax);
1750     __ jcc(Assembler::notZero, caller_not_deoptimized);
1751 
1752     // Compute size of arguments for saving when returning to
1753     // deoptimized caller
1754     __ get_method(rax);
1755     __ load_unsigned_short(rax, Address(rax, in_bytes(Method::

1756                                                 size_of_parameters_offset())));
1757     __ shll(rax, Interpreter::logStackElementSize);
1758     __ restore_locals(); // XXX do we need this?
1759     __ subptr(r14, rax);
1760     __ addptr(r14, wordSize);
1761     // Save these arguments
1762     __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1763                                            Deoptimization::
1764                                            popframe_preserve_args),
1765                           r15_thread, rax, r14);
1766 
1767     __ remove_activation(vtos, rdx,
1768                          /* throw_monitor_exception */ false,
1769                          /* install_monitor_exception */ false,
1770                          /* notify_jvmdi */ false);
1771 
1772     // Inform deoptimization that it is responsible for restoring
1773     // these arguments
1774     __ movl(Address(r15_thread, JavaThread::popframe_condition_offset()),
1775             JavaThread::popframe_force_deopt_reexecution_bit);




 352     }
 353 
 354     __ cmp32(rcx, ExternalAddress((address)&InvocationCounter::InterpreterInvocationLimit));
 355     __ jcc(Assembler::aboveEqual, *overflow);
 356   }
 357 }
 358 
 359 void InterpreterGenerator::generate_counter_overflow(Label* do_continue) {
 360 
 361   // Asm interpreter on entry
 362   // r14 - locals
 363   // r13 - bcp
 364   // rbx - method
 365   // edx - cpool --- DOES NOT APPEAR TO BE TRUE
 366   // rbp - interpreter frame
 367 
 368   // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
 369   // Everything as it was on entry
 370   // rdx is not restored. Doesn't appear to really be set.
 371 



 372   // InterpreterRuntime::frequency_counter_overflow takes two
 373   // arguments, the first (thread) is passed by call_VM, the second
 374   // indicates if the counter overflow occurs at a backwards branch
 375   // (NULL bcp).  We pass zero for it.  The call returns the address
 376   // of the verified entry point for the method or NULL if the
 377   // compilation did not complete (either went background or bailed
 378   // out).
 379   __ movl(c_rarg1, 0);
 380   __ call_VM(noreg,
 381              CAST_FROM_FN_PTR(address,
 382                               InterpreterRuntime::frequency_counter_overflow),
 383              c_rarg1);
 384 
 385   __ movptr(rbx, Address(rbp, method_offset));   // restore Method*
 386   // Preserve invariant that r13/r14 contain bcp/locals of sender frame
 387   // and jump to the interpreted entry.
 388   __ jmp(*do_continue, relocInfo::none);
 389 }
 390 
 391 // See if we've got enough room on the stack for locals plus overhead.


 824 #endif // SERIALGC
 825 
 826   // If G1 is not enabled then attempt to go through the accessor entry point
 827   // Reference.get is an accessor
 828   return generate_accessor_entry();
 829 }
 830 
 831 
 832 // Interpreter stub for calling a native method. (asm interpreter)
 833 // This sets up a somewhat different looking stack for calling the
 834 // native method than the typical interpreter frame setup.
 835 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 836   // determine code generation flags
 837   bool inc_counter  = UseCompiler || CountCompiledCalls;
 838 
 839   // rbx: Method*
 840   // r13: sender sp
 841 
 842   address entry_point = __ pc();
 843 
 844   const Address constMethod       (rbx, Method::const_offset());

 845   const Address invocation_counter(rbx, Method::
 846                                         invocation_counter_offset() +
 847                                         InvocationCounter::counter_offset());
 848   const Address access_flags      (rbx, Method::access_flags_offset());
 849 
 850   // get parameter size (always needed)
 851   __ movptr(rcx, constMethod);
 852   __ load_unsigned_short(rcx, Address(rcx, ConstMethod::size_of_parameters_offset()));
 853 
 854   // native calls don't need the stack size check since they have no
 855   // expression stack and the arguments are already on the stack and
 856   // we only add a handful of words to the stack
 857 
 858   // rbx: Method*
 859   // rcx: size of parameters
 860   // r13: sender sp
 861   __ pop(rax);                                       // get return address
 862 
 863   // for natives the size of locals is zero
 864 
 865   // compute beginning of parameters (r14)
 866   __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize));
 867 
 868   // add 2 zero-initialized slots for native calls
 869   // initialize result_handler slot
 870   __ push((int) NULL_WORD);
 871   // slot for oop temp
 872   // (static native method holder mirror/jni oop result)


 947     Label L;
 948     const Address monitor_block_top(rbp,
 949                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
 950     __ movptr(rax, monitor_block_top);
 951     __ cmpptr(rax, rsp);
 952     __ jcc(Assembler::equal, L);
 953     __ stop("broken stack frame setup in interpreter");
 954     __ bind(L);
 955   }
 956 #endif
 957 
 958   // jvmti support
 959   __ notify_method_entry();
 960 
 961   // work registers
 962   const Register method = rbx;
 963   const Register t      = r11;
 964 
 965   // allocate space for parameters
 966   __ get_method(method);
 967   __ movptr(t, Address(method, Method::const_offset()));
 968   __ load_unsigned_short(t,
 969                          Address(t,
 970                                  ConstMethod::size_of_parameters_offset()));
 971   __ shll(t, Interpreter::logStackElementSize);
 972 
 973   __ subptr(rsp, t);
 974   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
 975   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
 976 
 977   // get signature handler
 978   {
 979     Label L;
 980     __ movptr(t, Address(method, Method::signature_handler_offset()));
 981     __ testptr(t, t);
 982     __ jcc(Assembler::notZero, L);
 983     __ call_VM(noreg,
 984                CAST_FROM_FN_PTR(address,
 985                                 InterpreterRuntime::prepare_native_call),
 986                method);
 987     __ get_method(method);
 988     __ movptr(t, Address(method, Method::signature_handler_offset()));
 989     __ bind(L);
 990   }


1283   if (inc_counter) {
1284     // Handle overflow of counter and compile method
1285     __ bind(invocation_counter_overflow);
1286     generate_counter_overflow(&continue_after_compile);
1287   }
1288 
1289   return entry_point;
1290 }
1291 
1292 //
1293 // Generic interpreted method entry to (asm) interpreter
1294 //
1295 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1296   // determine code generation flags
1297   bool inc_counter  = UseCompiler || CountCompiledCalls;
1298 
1299   // ebx: Method*
1300   // r13: sender sp
1301   address entry_point = __ pc();
1302 
1303   const Address constMethod(rbx, Method::const_offset());


1304   const Address invocation_counter(rbx,
1305                                    Method::invocation_counter_offset() +
1306                                    InvocationCounter::counter_offset());
1307   const Address access_flags(rbx, Method::access_flags_offset());
1308 
1309   // get parameter size (always needed)
1310   __ movptr(rdx, constMethod);
1311   __ load_unsigned_short(rcx, Address(rdx, ConstMethod::size_of_parameters_offset()));
1312 
1313   // rbx: Method*
1314   // rcx: size of parameters
1315   // r13: sender_sp (could differ from sp+wordSize if we were called via c2i )
1316 
1317   // get size of locals in words
1318   // constMethod is already in rdx
1319   __ load_unsigned_short(rdx, Address(rdx, ConstMethod::size_of_locals_offset()));
1320   __ subl(rdx, rcx); // rdx = no. of additional locals
1321 
1322   // YYY
1323 //   __ incrementl(rdx);
1324 //   __ andl(rdx, -2);
1325 
1326   // see if we've got enough room on the stack for locals plus overhead.
1327   generate_stack_overflow_check();
1328 
1329   // get return address
1330   __ pop(rax);
1331 
1332   // compute beginning of parameters (r14)
1333   __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize));
1334 
1335   // rdx - # of additional locals
1336   // allocate space for locals
1337   // explicitly initialize locals
1338   {
1339     Label exit, loop;


1734     // Check to see whether we are returning to a deoptimized frame.
1735     // (The PopFrame call ensures that the caller of the popped frame is
1736     // either interpreted or compiled and deoptimizes it if compiled.)
1737     // In this case, we can't call dispatch_next() after the frame is
1738     // popped, but instead must save the incoming arguments and restore
1739     // them after deoptimization has occurred.
1740     //
1741     // Note that we don't compare the return PC against the
1742     // deoptimization blob's unpack entry because of the presence of
1743     // adapter frames in C2.
1744     Label caller_not_deoptimized;
1745     __ movptr(c_rarg1, Address(rbp, frame::return_addr_offset * wordSize));
1746     __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1747                                InterpreterRuntime::interpreter_contains), c_rarg1);
1748     __ testl(rax, rax);
1749     __ jcc(Assembler::notZero, caller_not_deoptimized);
1750 
1751     // Compute size of arguments for saving when returning to
1752     // deoptimized caller
1753     __ get_method(rax);
1754     __ movptr(rax, Address(rax, Method::const_offset()));
1755     __ load_unsigned_short(rax, Address(rax, in_bytes(ConstMethod::
1756                                                 size_of_parameters_offset())));
1757     __ shll(rax, Interpreter::logStackElementSize);
1758     __ restore_locals(); // XXX do we need this?
1759     __ subptr(r14, rax);
1760     __ addptr(r14, wordSize);
1761     // Save these arguments
1762     __ super_call_VM_leaf(CAST_FROM_FN_PTR(address,
1763                                            Deoptimization::
1764                                            popframe_preserve_args),
1765                           r15_thread, rax, r14);
1766 
1767     __ remove_activation(vtos, rdx,
1768                          /* throw_monitor_exception */ false,
1769                          /* install_monitor_exception */ false,
1770                          /* notify_jvmdi */ false);
1771 
1772     // Inform deoptimization that it is responsible for restoring
1773     // these arguments
1774     __ movl(Address(r15_thread, JavaThread::popframe_condition_offset()),
1775             JavaThread::popframe_force_deopt_reexecution_bit);