src/cpu/sparc/vm/cppInterpreter_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7063628_1 Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/cppInterpreter_sparc.cpp

Print this page




 527     __ br(Assembler::equal, true, Assembler::pt, xreturn_path);
 528     __ delayed()->ldsh(Otos_i, G3_scratch, Otos_i);
 529     __ cmp(G1_scratch, ctos);
 530     __ br(Assembler::equal, true, Assembler::pt, xreturn_path);
 531     __ delayed()->lduh(Otos_i, G3_scratch, Otos_i);
 532 #ifdef ASSERT
 533     __ cmp(G1_scratch, btos);
 534     __ br(Assembler::equal, true, Assembler::pt, xreturn_path);
 535     __ delayed()->ldsb(Otos_i, G3_scratch, Otos_i);
 536     __ should_not_reach_here();
 537 #endif
 538     __ ldsb(Otos_i, G3_scratch, Otos_i);
 539     __ bind(xreturn_path);
 540 
 541     // _ireturn/_areturn
 542     __ retl();                      // return from leaf routine
 543     __ delayed()->mov(O5_savedSP, SP);
 544 
 545     // Generate regular method entry
 546     __ bind(slow_path);
 547     __ ba(fast_accessor_slow_entry_path, false);
 548     __ delayed()->nop();
 549     return entry;
 550   }
 551   return NULL;
 552 }
 553 
 554 address InterpreterGenerator::generate_Reference_get_entry(void) {
 555 #ifndef SERIALGC
 556   if (UseG1GC) {
 557     // We need to generate have a routine that generates code to:
 558     //   * load the value in the referent field
 559     //   * passes that value to the pre-barrier.
 560     //
 561     // In the case of G1 this will record the value of the
 562     // referent in an SATB buffer if marking is active.
 563     // This will cause concurrent marking to mark the referent
 564     // field as live.
 565     Unimplemented();
 566   }
 567 #endif // SERIALGC


 702   // It is important not to smash any handles created by this call,
 703   // until any oop handle in O0 is dereferenced.
 704 
 705   // (note that the space for outgoing params is preallocated)
 706 
 707   // get signature handler
 708 
 709   Label pending_exception_present;
 710 
 711   { Label L;
 712     __ ld_ptr(STATE(_method), G5_method);
 713     __ ld_ptr(Address(G5_method, 0, in_bytes(methodOopDesc::signature_handler_offset())), G3_scratch);
 714     __ tst(G3_scratch);
 715     __ brx(Assembler::notZero, false, Assembler::pt, L);
 716     __ delayed()->nop();
 717     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), G5_method, false);
 718     __ ld_ptr(STATE(_method), G5_method);
 719 
 720     Address exception_addr(G2_thread, 0, in_bytes(Thread::pending_exception_offset()));
 721     __ ld_ptr(exception_addr, G3_scratch);
 722     __ br_notnull(G3_scratch, false, Assembler::pn, pending_exception_present);
 723     __ ld_ptr(Address(G5_method, 0, in_bytes(methodOopDesc::signature_handler_offset())), G3_scratch);
 724     __ bind(L);
 725   }
 726 
 727   // Push a new frame so that the args will really be stored in
 728   // Copy a few locals across so the new frame has the variables
 729   // we need but these values will be dead at the jni call and
 730   // therefore not gc volatile like the values in the current
 731   // frame (Lstate in particular)
 732 
 733   // Flush the state pointer to the register save area
 734   // Which is the only register we need for a stack walk.
 735   __ st_ptr(Lstate, SP, (Lstate->sp_offset_in_saved_window() * wordSize) + STACK_BIAS);
 736 
 737   __ mov(Lstate, O1);         // Need to pass the state pointer across the frame
 738 
 739   // Calculate current frame size
 740   __ sub(SP, FP, O3);         // Calculate negative of current frame size
 741   __ save(SP, O3, SP);        // Allocate an identical sized frame
 742 


1274   }
1275 #endif // ASSERT
1276 
1277   // monitor is already allocated at stack base
1278   // and the lockee is already present
1279   __ ld_ptr(STATE(_stack_base), L2_scratch);
1280   __ ld_ptr(L2_scratch, BasicObjectLock::obj_offset_in_bytes(), O0);   // get object
1281   __ lock_object(L2_scratch, O0);
1282 
1283 }
1284 
1285 //  Generate code for handling resuming a deopted method
1286 void CppInterpreterGenerator::generate_deopt_handling() {
1287 
1288   Label return_from_deopt_common;
1289 
1290   // deopt needs to jump to here to enter the interpreter (return a result)
1291   deopt_frame_manager_return_atos  = __ pc();
1292 
1293   // O0/O1 live
1294   __ ba(return_from_deopt_common, false);
1295   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_OBJECT), L3_scratch);    // Result stub address array index
1296 
1297 
1298   // deopt needs to jump to here to enter the interpreter (return a result)
1299   deopt_frame_manager_return_btos  = __ pc();
1300 
1301   // O0/O1 live
1302   __ ba(return_from_deopt_common, false);
1303   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_BOOLEAN), L3_scratch);    // Result stub address array index
1304 
1305   // deopt needs to jump to here to enter the interpreter (return a result)
1306   deopt_frame_manager_return_itos  = __ pc();
1307 
1308   // O0/O1 live
1309   __ ba(return_from_deopt_common, false);
1310   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_INT), L3_scratch);    // Result stub address array index
1311 
1312   // deopt needs to jump to here to enter the interpreter (return a result)
1313 
1314   deopt_frame_manager_return_ltos  = __ pc();
1315 #if !defined(_LP64) && defined(COMPILER2)
1316   // All return values are where we want them, except for Longs.  C2 returns
1317   // longs in G1 in the 32-bit build whereas the interpreter wants them in O0/O1.
1318   // Since the interpreter will return longs in G1 and O0/O1 in the 32bit
1319   // build even if we are returning from interpreted we just do a little
1320   // stupid shuffing.
1321   // Note: I tried to make c2 return longs in O0/O1 and G1 so we wouldn't have to
1322   // do this here. Unfortunately if we did a rethrow we'd see an machepilog node
1323   // first which would move g1 -> O0/O1 and destroy the exception we were throwing.
1324 
1325   __ srl (G1, 0,O1);
1326   __ srlx(G1,32,O0);
1327 #endif /* !_LP64 && COMPILER2 */
1328   // O0/O1 live
1329   __ ba(return_from_deopt_common, false);
1330   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_LONG), L3_scratch);    // Result stub address array index
1331 
1332   // deopt needs to jump to here to enter the interpreter (return a result)
1333 
1334   deopt_frame_manager_return_ftos  = __ pc();
1335   // O0/O1 live
1336   __ ba(return_from_deopt_common, false);
1337   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_FLOAT), L3_scratch);    // Result stub address array index
1338 
1339   // deopt needs to jump to here to enter the interpreter (return a result)
1340   deopt_frame_manager_return_dtos  = __ pc();
1341 
1342   // O0/O1 live
1343   __ ba(return_from_deopt_common, false);
1344   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_DOUBLE), L3_scratch);    // Result stub address array index
1345 
1346   // deopt needs to jump to here to enter the interpreter (return a result)
1347   deopt_frame_manager_return_vtos  = __ pc();
1348 
1349   // O0/O1 live
1350   __ set(AbstractInterpreter::BasicType_as_index(T_VOID), L3_scratch);
1351 
1352   // Deopt return common
1353   // an index is present that lets us move any possible result being
1354   // return to the interpreter's stack
1355   //
1356   __ bind(return_from_deopt_common);
1357 
1358   // Result if any is in native abi result (O0..O1/F0..F1). The java expression
1359   // stack is in the state that the  calling convention left it.
1360   // Copy the result from native abi result and place it on java expression stack.
1361 
1362   // Current interpreter state is present in Lstate
1363 


1380 // Generate the code to handle a more_monitors message from the c++ interpreter
1381 void CppInterpreterGenerator::generate_more_monitors() {
1382 
1383   Label entry, loop;
1384   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
1385   // 1. compute new pointers                                // esp: old expression stack top
1386   __ delayed()->ld_ptr(STATE(_stack_base), L4_scratch);            // current expression stack bottom
1387   __ sub(L4_scratch, entry_size, L4_scratch);
1388   __ st_ptr(L4_scratch, STATE(_stack_base));
1389 
1390   __ sub(SP, entry_size, SP);                  // Grow stack
1391   __ st_ptr(SP, STATE(_frame_bottom));
1392 
1393   __ ld_ptr(STATE(_stack_limit), L2_scratch);
1394   __ sub(L2_scratch, entry_size, L2_scratch);
1395   __ st_ptr(L2_scratch, STATE(_stack_limit));
1396 
1397   __ ld_ptr(STATE(_stack), L1_scratch);                // Get current stack top
1398   __ sub(L1_scratch, entry_size, L1_scratch);
1399   __ st_ptr(L1_scratch, STATE(_stack));
1400   __ ba(entry, false);
1401   __ delayed()->add(L1_scratch, wordSize, L1_scratch);        // first real entry (undo prepush)
1402 
1403   // 2. move expression stack
1404 
1405   __ bind(loop);
1406   __ st_ptr(L3_scratch, Address(L1_scratch, 0));
1407   __ add(L1_scratch, wordSize, L1_scratch);
1408   __ bind(entry);
1409   __ cmp(L1_scratch, L4_scratch);
1410   __ br(Assembler::notEqual, false, Assembler::pt, loop);
1411   __ delayed()->ld_ptr(L1_scratch, entry_size, L3_scratch);
1412 
1413   // now zero the slot so we can find it.
1414   __ st_ptr(G0, L4_scratch, BasicObjectLock::obj_offset_in_bytes());
1415 
1416 }
1417 
1418 // Initial entry to C++ interpreter from the call_stub.
1419 // This entry point is called the frame manager since it handles the generation
1420 // of interpreter activation frames via requests directly from the vm (via call_stub)


1633 
1634   __ cmp(L1_scratch, (int)BytecodeInterpreter::call_method);
1635   __ br(Assembler::equal, false, Assembler::pt, call_method);
1636   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::return_from_method);
1637   __ br(Assembler::equal, false, Assembler::pt, return_from_interpreted_method);
1638   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::throwing_exception);
1639   __ br(Assembler::equal, false, Assembler::pt, throw_exception);
1640   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::do_osr);
1641   __ br(Assembler::equal, false, Assembler::pt, do_OSR);
1642   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::more_monitors);
1643   __ br(Assembler::notEqual, false, Assembler::pt, bad_msg);
1644 
1645   // Allocate more monitor space, shuffle expression stack....
1646 
1647   generate_more_monitors();
1648 
1649   // new monitor slot allocated, resume the interpreter.
1650 
1651   __ set((int)BytecodeInterpreter::got_monitors, L1_scratch);
1652   VALIDATE_STATE(G3_scratch, 5);
1653   __ ba(call_interpreter, false);
1654   __ delayed()->st(L1_scratch, STATE(_msg));
1655 
1656   // uncommon trap needs to jump to here to enter the interpreter (re-execute current bytecode)
1657   unctrap_frame_manager_entry  = __ pc();
1658 
1659   // QQQ what message do we send
1660 
1661   __ ba(call_interpreter, false);
1662   __ delayed()->ld_ptr(STATE(_frame_bottom), SP);                  // restore to full stack frame
1663 
1664   //=============================================================================
1665   // Returning from a compiled method into a deopted method. The bytecode at the
1666   // bcp has completed. The result of the bytecode is in the native abi (the tosca
1667   // for the template based interpreter). Any stack space that was used by the
1668   // bytecode that has completed has been removed (e.g. parameters for an invoke)
1669   // so all that we have to do is place any pending result on the expression stack
1670   // and resume execution on the next bytecode.
1671 
1672   generate_deopt_handling();
1673 
1674   // ready to resume the interpreter
1675 
1676   __ set((int)BytecodeInterpreter::deopt_resume, L1_scratch);
1677   __ ba(call_interpreter, false);
1678   __ delayed()->st(L1_scratch, STATE(_msg));
1679 
1680   // Current frame has caught an exception we need to dispatch to the
1681   // handler. We can get here because a native interpreter frame caught
1682   // an exception in which case there is no handler and we must rethrow
1683   // If it is a vanilla interpreted frame the we simply drop into the
1684   // interpreter and let it do the lookup.
1685 
1686   Interpreter::_rethrow_exception_entry = __ pc();
1687 
1688   Label return_with_exception;
1689   Label unwind_and_forward;
1690 
1691   // O0: exception
1692   // O7: throwing pc
1693 
1694   // We want exception in the thread no matter what we ultimately decide about frame type.
1695 
1696   Address exception_addr (G2_thread, 0, in_bytes(Thread::pending_exception_offset()));
1697   __ verify_thread();


1745   __ delayed()->nop();
1746 
1747   // Process the native abi result to java expression stack
1748 
1749   __ ld_ptr(STATE(_result._to_call._callee), L4_scratch);                        // called method
1750   __ ld_ptr(STATE(_stack), L1_scratch);                                          // get top of java expr stack
1751   __ lduh(L4_scratch, in_bytes(methodOopDesc::size_of_parameters_offset()), L2_scratch); // get parameter size
1752   __ sll(L2_scratch, LogBytesPerWord, L2_scratch     );                           // parameter size in bytes
1753   __ add(L1_scratch, L2_scratch, L1_scratch);                                      // stack destination for result
1754   __ ld(L4_scratch, in_bytes(methodOopDesc::result_index_offset()), L3_scratch); // called method result type index
1755 
1756   // tosca is really just native abi
1757   __ set((intptr_t)CppInterpreter::_tosca_to_stack, L4_scratch);
1758   __ sll(L3_scratch, LogBytesPerWord, L3_scratch);
1759   __ ld_ptr(L4_scratch, L3_scratch, Lscratch);                                       // get typed result converter address
1760   __ jmpl(Lscratch, G0, O7);                                                   // and convert it
1761   __ delayed()->nop();
1762 
1763   // L1_scratch points to top of stack (prepushed)
1764 
1765   __ ba(resume_interpreter, false);
1766   __ delayed()->mov(L1_scratch, O1);
1767 
1768   // An exception is being caught on return to a vanilla interpreter frame.
1769   // Empty the stack and resume interpreter
1770 
1771   __ bind(return_with_exception);
1772 
1773   __ ld_ptr(STATE(_frame_bottom), SP);                             // restore to full stack frame
1774   __ ld_ptr(STATE(_stack_base), O1);                               // empty java expression stack
1775   __ ba(resume_interpreter, false);
1776   __ delayed()->sub(O1, wordSize, O1);                             // account for prepush
1777 
1778   // Return from interpreted method we return result appropriate to the caller (i.e. "recursive"
1779   // interpreter call, or native) and unwind this interpreter activation.
1780   // All monitors should be unlocked.
1781 
1782   __ bind(return_from_interpreted_method);
1783 
1784   VALIDATE_STATE(G3_scratch, 7);
1785 
1786   Label return_to_initial_caller;
1787 
1788   // Interpreted result is on the top of the completed activation expression stack.
1789   // We must return it to the top of the callers stack if caller was interpreted
1790   // otherwise we convert to native abi result and return to call_stub/c1/c2
1791   // The caller's expression stack was truncated by the call however the current activation
1792   // has enough stuff on the stack that we have usable space there no matter what. The
1793   // other thing that makes it easy is that the top of the caller's stack is stored in STATE(_locals)
1794   // for the current activation
1795 


1834 
1835 
1836   __ mov(O1, I1);                                                     // pass back new stack top across activation
1837   // POP FRAME HERE ==================================
1838   __ restore(FP, G0, SP);                                             // unwind interpreter state frame
1839   __ ld_ptr(STATE(_frame_bottom), SP);                                // restore to full stack frame
1840 
1841 
1842   // Resume the interpreter. The current frame contains the current interpreter
1843   // state object.
1844   //
1845   // O1 == new java stack pointer
1846 
1847   __ bind(resume_interpreter);
1848   VALIDATE_STATE(G3_scratch, 10);
1849 
1850   // A frame we have already used before so no need to bang stack so use call_interpreter_2 entry
1851 
1852   __ set((int)BytecodeInterpreter::method_resume, L1_scratch);
1853   __ st(L1_scratch, STATE(_msg));
1854   __ ba(call_interpreter_2, false);
1855   __ delayed()->st_ptr(O1, STATE(_stack));
1856 
1857 
1858   // Fast accessor methods share this entry point.
1859   // This works because frame manager is in the same codelet
1860   // This can either be an entry via call_stub/c1/c2 or a recursive interpreter call
1861   // we need to do a little register fixup here once we distinguish the two of them
1862   if (UseFastAccessorMethods && !synchronized) {
1863   // Call stub_return address still in O7
1864     __ bind(fast_accessor_slow_entry_path);
1865     __ set((intptr_t)return_from_native_method - 8, Gtmp1);
1866     __ cmp(Gtmp1, O7);                                                // returning to interpreter?
1867     __ brx(Assembler::equal, true, Assembler::pt, re_dispatch);       // yep
1868     __ delayed()->nop();
1869     __ ba(re_dispatch, false);
1870     __ delayed()->mov(G0, prevState);                                   // initial entry
1871 
1872   }
1873 
1874   // interpreter returning to native code (call_stub/c1/c2)
1875   // convert result and unwind initial activation
1876   // L2_scratch - scaled result type index
1877 
1878   __ bind(return_to_initial_caller);
1879 
1880   __ set((intptr_t)CppInterpreter::_stack_to_native_abi, L4_scratch);
1881   __ ld_ptr(L4_scratch, L2_scratch, Lscratch);                           // get typed result converter address
1882   __ ld_ptr(STATE(_stack), O0);                                        // current top (prepushed)
1883   __ jmpl(Lscratch, G0, O7);                                           // and convert it
1884   __ delayed()->add(O0, wordSize, O0);                                 // get source (top of current expr stack)
1885 
1886   Label unwind_initial_activation;
1887   __ bind(unwind_initial_activation);
1888 
1889   // RETURN TO CALL_STUB/C1/C2 code (result if any in I0..I1/(F0/..F1)


2014   __ jmpl(L2_scratch, G0, G0);                               // Do specialized entry
2015   __ delayed()->nop();
2016 
2017   //
2018   // Bad Message from interpreter
2019   //
2020   __ bind(bad_msg);
2021   __ stop("Bad message from interpreter");
2022 
2023   // Interpreted method "returned" with an exception pass it on...
2024   // Pass result, unwind activation and continue/return to interpreter/call_stub
2025   // We handle result (if any) differently based on return to interpreter or call_stub
2026 
2027   __ bind(throw_exception);
2028   __ ld_ptr(STATE(_prev_link), L1_scratch);
2029   __ tst(L1_scratch);
2030   __ brx(Assembler::zero, false, Assembler::pt, unwind_and_forward);
2031   __ delayed()->nop();
2032 
2033   __ ld_ptr(STATE(_locals), O1);                                   // get result of popping callee's args
2034   __ ba(unwind_recursive_activation, false);
2035   __ delayed()->nop();
2036 
2037   interpreter_frame_manager = entry_point;
2038   return entry_point;
2039 }
2040 
2041 InterpreterGenerator::InterpreterGenerator(StubQueue* code)
2042  : CppInterpreterGenerator(code) {
2043    generate_all(); // down here so it can be "virtual"
2044 }
2045 
2046 
2047 static int size_activation_helper(int callee_extra_locals, int max_stack, int monitor_size) {
2048 
2049   // Figure out the size of an interpreter frame (in words) given that we have a fully allocated
2050   // expression stack, the callee will have callee_extra_locals (so we can account for
2051   // frame extension) and monitor_size for monitors. Basically we need to calculate
2052   // this exactly like generate_fixed_frame/generate_compute_interpreter_state.
2053   //
2054   //




 527     __ br(Assembler::equal, true, Assembler::pt, xreturn_path);
 528     __ delayed()->ldsh(Otos_i, G3_scratch, Otos_i);
 529     __ cmp(G1_scratch, ctos);
 530     __ br(Assembler::equal, true, Assembler::pt, xreturn_path);
 531     __ delayed()->lduh(Otos_i, G3_scratch, Otos_i);
 532 #ifdef ASSERT
 533     __ cmp(G1_scratch, btos);
 534     __ br(Assembler::equal, true, Assembler::pt, xreturn_path);
 535     __ delayed()->ldsb(Otos_i, G3_scratch, Otos_i);
 536     __ should_not_reach_here();
 537 #endif
 538     __ ldsb(Otos_i, G3_scratch, Otos_i);
 539     __ bind(xreturn_path);
 540 
 541     // _ireturn/_areturn
 542     __ retl();                      // return from leaf routine
 543     __ delayed()->mov(O5_savedSP, SP);
 544 
 545     // Generate regular method entry
 546     __ bind(slow_path);
 547     __ ba(fast_accessor_slow_entry_path);
 548     __ delayed()->nop();
 549     return entry;
 550   }
 551   return NULL;
 552 }
 553 
 554 address InterpreterGenerator::generate_Reference_get_entry(void) {
 555 #ifndef SERIALGC
 556   if (UseG1GC) {
 557     // We need to generate have a routine that generates code to:
 558     //   * load the value in the referent field
 559     //   * passes that value to the pre-barrier.
 560     //
 561     // In the case of G1 this will record the value of the
 562     // referent in an SATB buffer if marking is active.
 563     // This will cause concurrent marking to mark the referent
 564     // field as live.
 565     Unimplemented();
 566   }
 567 #endif // SERIALGC


 702   // It is important not to smash any handles created by this call,
 703   // until any oop handle in O0 is dereferenced.
 704 
 705   // (note that the space for outgoing params is preallocated)
 706 
 707   // get signature handler
 708 
 709   Label pending_exception_present;
 710 
 711   { Label L;
 712     __ ld_ptr(STATE(_method), G5_method);
 713     __ ld_ptr(Address(G5_method, 0, in_bytes(methodOopDesc::signature_handler_offset())), G3_scratch);
 714     __ tst(G3_scratch);
 715     __ brx(Assembler::notZero, false, Assembler::pt, L);
 716     __ delayed()->nop();
 717     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), G5_method, false);
 718     __ ld_ptr(STATE(_method), G5_method);
 719 
 720     Address exception_addr(G2_thread, 0, in_bytes(Thread::pending_exception_offset()));
 721     __ ld_ptr(exception_addr, G3_scratch);
 722     __ br_notnull_short(G3_scratch, Assembler::pn, pending_exception_present);
 723     __ ld_ptr(Address(G5_method, 0, in_bytes(methodOopDesc::signature_handler_offset())), G3_scratch);
 724     __ bind(L);
 725   }
 726 
 727   // Push a new frame so that the args will really be stored in
 728   // Copy a few locals across so the new frame has the variables
 729   // we need but these values will be dead at the jni call and
 730   // therefore not gc volatile like the values in the current
 731   // frame (Lstate in particular)
 732 
 733   // Flush the state pointer to the register save area
 734   // Which is the only register we need for a stack walk.
 735   __ st_ptr(Lstate, SP, (Lstate->sp_offset_in_saved_window() * wordSize) + STACK_BIAS);
 736 
 737   __ mov(Lstate, O1);         // Need to pass the state pointer across the frame
 738 
 739   // Calculate current frame size
 740   __ sub(SP, FP, O3);         // Calculate negative of current frame size
 741   __ save(SP, O3, SP);        // Allocate an identical sized frame
 742 


1274   }
1275 #endif // ASSERT
1276 
1277   // monitor is already allocated at stack base
1278   // and the lockee is already present
1279   __ ld_ptr(STATE(_stack_base), L2_scratch);
1280   __ ld_ptr(L2_scratch, BasicObjectLock::obj_offset_in_bytes(), O0);   // get object
1281   __ lock_object(L2_scratch, O0);
1282 
1283 }
1284 
1285 //  Generate code for handling resuming a deopted method
1286 void CppInterpreterGenerator::generate_deopt_handling() {
1287 
1288   Label return_from_deopt_common;
1289 
1290   // deopt needs to jump to here to enter the interpreter (return a result)
1291   deopt_frame_manager_return_atos  = __ pc();
1292 
1293   // O0/O1 live
1294   __ ba(return_from_deopt_common);
1295   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_OBJECT), L3_scratch);    // Result stub address array index
1296 
1297 
1298   // deopt needs to jump to here to enter the interpreter (return a result)
1299   deopt_frame_manager_return_btos  = __ pc();
1300 
1301   // O0/O1 live
1302   __ ba(return_from_deopt_common);
1303   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_BOOLEAN), L3_scratch);    // Result stub address array index
1304 
1305   // deopt needs to jump to here to enter the interpreter (return a result)
1306   deopt_frame_manager_return_itos  = __ pc();
1307 
1308   // O0/O1 live
1309   __ ba(return_from_deopt_common);
1310   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_INT), L3_scratch);    // Result stub address array index
1311 
1312   // deopt needs to jump to here to enter the interpreter (return a result)
1313 
1314   deopt_frame_manager_return_ltos  = __ pc();
1315 #if !defined(_LP64) && defined(COMPILER2)
1316   // All return values are where we want them, except for Longs.  C2 returns
1317   // longs in G1 in the 32-bit build whereas the interpreter wants them in O0/O1.
1318   // Since the interpreter will return longs in G1 and O0/O1 in the 32bit
1319   // build even if we are returning from interpreted we just do a little
1320   // stupid shuffing.
1321   // Note: I tried to make c2 return longs in O0/O1 and G1 so we wouldn't have to
1322   // do this here. Unfortunately if we did a rethrow we'd see an machepilog node
1323   // first which would move g1 -> O0/O1 and destroy the exception we were throwing.
1324 
1325   __ srl (G1, 0,O1);
1326   __ srlx(G1,32,O0);
1327 #endif /* !_LP64 && COMPILER2 */
1328   // O0/O1 live
1329   __ ba(return_from_deopt_common);
1330   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_LONG), L3_scratch);    // Result stub address array index
1331 
1332   // deopt needs to jump to here to enter the interpreter (return a result)
1333 
1334   deopt_frame_manager_return_ftos  = __ pc();
1335   // O0/O1 live
1336   __ ba(return_from_deopt_common);
1337   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_FLOAT), L3_scratch);    // Result stub address array index
1338 
1339   // deopt needs to jump to here to enter the interpreter (return a result)
1340   deopt_frame_manager_return_dtos  = __ pc();
1341 
1342   // O0/O1 live
1343   __ ba(return_from_deopt_common);
1344   __ delayed()->set(AbstractInterpreter::BasicType_as_index(T_DOUBLE), L3_scratch);    // Result stub address array index
1345 
1346   // deopt needs to jump to here to enter the interpreter (return a result)
1347   deopt_frame_manager_return_vtos  = __ pc();
1348 
1349   // O0/O1 live
1350   __ set(AbstractInterpreter::BasicType_as_index(T_VOID), L3_scratch);
1351 
1352   // Deopt return common
1353   // an index is present that lets us move any possible result being
1354   // return to the interpreter's stack
1355   //
1356   __ bind(return_from_deopt_common);
1357 
1358   // Result if any is in native abi result (O0..O1/F0..F1). The java expression
1359   // stack is in the state that the  calling convention left it.
1360   // Copy the result from native abi result and place it on java expression stack.
1361 
1362   // Current interpreter state is present in Lstate
1363 


1380 // Generate the code to handle a more_monitors message from the c++ interpreter
1381 void CppInterpreterGenerator::generate_more_monitors() {
1382 
1383   Label entry, loop;
1384   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
1385   // 1. compute new pointers                                // esp: old expression stack top
1386   __ delayed()->ld_ptr(STATE(_stack_base), L4_scratch);            // current expression stack bottom
1387   __ sub(L4_scratch, entry_size, L4_scratch);
1388   __ st_ptr(L4_scratch, STATE(_stack_base));
1389 
1390   __ sub(SP, entry_size, SP);                  // Grow stack
1391   __ st_ptr(SP, STATE(_frame_bottom));
1392 
1393   __ ld_ptr(STATE(_stack_limit), L2_scratch);
1394   __ sub(L2_scratch, entry_size, L2_scratch);
1395   __ st_ptr(L2_scratch, STATE(_stack_limit));
1396 
1397   __ ld_ptr(STATE(_stack), L1_scratch);                // Get current stack top
1398   __ sub(L1_scratch, entry_size, L1_scratch);
1399   __ st_ptr(L1_scratch, STATE(_stack));
1400   __ ba(entry);
1401   __ delayed()->add(L1_scratch, wordSize, L1_scratch);        // first real entry (undo prepush)
1402 
1403   // 2. move expression stack
1404 
1405   __ bind(loop);
1406   __ st_ptr(L3_scratch, Address(L1_scratch, 0));
1407   __ add(L1_scratch, wordSize, L1_scratch);
1408   __ bind(entry);
1409   __ cmp(L1_scratch, L4_scratch);
1410   __ br(Assembler::notEqual, false, Assembler::pt, loop);
1411   __ delayed()->ld_ptr(L1_scratch, entry_size, L3_scratch);
1412 
1413   // now zero the slot so we can find it.
1414   __ st_ptr(G0, L4_scratch, BasicObjectLock::obj_offset_in_bytes());
1415 
1416 }
1417 
1418 // Initial entry to C++ interpreter from the call_stub.
1419 // This entry point is called the frame manager since it handles the generation
1420 // of interpreter activation frames via requests directly from the vm (via call_stub)


1633 
1634   __ cmp(L1_scratch, (int)BytecodeInterpreter::call_method);
1635   __ br(Assembler::equal, false, Assembler::pt, call_method);
1636   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::return_from_method);
1637   __ br(Assembler::equal, false, Assembler::pt, return_from_interpreted_method);
1638   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::throwing_exception);
1639   __ br(Assembler::equal, false, Assembler::pt, throw_exception);
1640   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::do_osr);
1641   __ br(Assembler::equal, false, Assembler::pt, do_OSR);
1642   __ delayed()->cmp(L1_scratch, (int)BytecodeInterpreter::more_monitors);
1643   __ br(Assembler::notEqual, false, Assembler::pt, bad_msg);
1644 
1645   // Allocate more monitor space, shuffle expression stack....
1646 
1647   generate_more_monitors();
1648 
1649   // new monitor slot allocated, resume the interpreter.
1650 
1651   __ set((int)BytecodeInterpreter::got_monitors, L1_scratch);
1652   VALIDATE_STATE(G3_scratch, 5);
1653   __ ba(call_interpreter);
1654   __ delayed()->st(L1_scratch, STATE(_msg));
1655 
1656   // uncommon trap needs to jump to here to enter the interpreter (re-execute current bytecode)
1657   unctrap_frame_manager_entry  = __ pc();
1658 
1659   // QQQ what message do we send
1660 
1661   __ ba(call_interpreter);
1662   __ delayed()->ld_ptr(STATE(_frame_bottom), SP);                  // restore to full stack frame
1663 
1664   //=============================================================================
1665   // Returning from a compiled method into a deopted method. The bytecode at the
1666   // bcp has completed. The result of the bytecode is in the native abi (the tosca
1667   // for the template based interpreter). Any stack space that was used by the
1668   // bytecode that has completed has been removed (e.g. parameters for an invoke)
1669   // so all that we have to do is place any pending result on the expression stack
1670   // and resume execution on the next bytecode.
1671 
1672   generate_deopt_handling();
1673 
1674   // ready to resume the interpreter
1675 
1676   __ set((int)BytecodeInterpreter::deopt_resume, L1_scratch);
1677   __ ba(call_interpreter);
1678   __ delayed()->st(L1_scratch, STATE(_msg));
1679 
1680   // Current frame has caught an exception we need to dispatch to the
1681   // handler. We can get here because a native interpreter frame caught
1682   // an exception in which case there is no handler and we must rethrow
1683   // If it is a vanilla interpreted frame the we simply drop into the
1684   // interpreter and let it do the lookup.
1685 
1686   Interpreter::_rethrow_exception_entry = __ pc();
1687 
1688   Label return_with_exception;
1689   Label unwind_and_forward;
1690 
1691   // O0: exception
1692   // O7: throwing pc
1693 
1694   // We want exception in the thread no matter what we ultimately decide about frame type.
1695 
1696   Address exception_addr (G2_thread, 0, in_bytes(Thread::pending_exception_offset()));
1697   __ verify_thread();


1745   __ delayed()->nop();
1746 
1747   // Process the native abi result to java expression stack
1748 
1749   __ ld_ptr(STATE(_result._to_call._callee), L4_scratch);                        // called method
1750   __ ld_ptr(STATE(_stack), L1_scratch);                                          // get top of java expr stack
1751   __ lduh(L4_scratch, in_bytes(methodOopDesc::size_of_parameters_offset()), L2_scratch); // get parameter size
1752   __ sll(L2_scratch, LogBytesPerWord, L2_scratch     );                           // parameter size in bytes
1753   __ add(L1_scratch, L2_scratch, L1_scratch);                                      // stack destination for result
1754   __ ld(L4_scratch, in_bytes(methodOopDesc::result_index_offset()), L3_scratch); // called method result type index
1755 
1756   // tosca is really just native abi
1757   __ set((intptr_t)CppInterpreter::_tosca_to_stack, L4_scratch);
1758   __ sll(L3_scratch, LogBytesPerWord, L3_scratch);
1759   __ ld_ptr(L4_scratch, L3_scratch, Lscratch);                                       // get typed result converter address
1760   __ jmpl(Lscratch, G0, O7);                                                   // and convert it
1761   __ delayed()->nop();
1762 
1763   // L1_scratch points to top of stack (prepushed)
1764 
1765   __ ba(resume_interpreter);
1766   __ delayed()->mov(L1_scratch, O1);
1767 
1768   // An exception is being caught on return to a vanilla interpreter frame.
1769   // Empty the stack and resume interpreter
1770 
1771   __ bind(return_with_exception);
1772 
1773   __ ld_ptr(STATE(_frame_bottom), SP);                             // restore to full stack frame
1774   __ ld_ptr(STATE(_stack_base), O1);                               // empty java expression stack
1775   __ ba(resume_interpreter);
1776   __ delayed()->sub(O1, wordSize, O1);                             // account for prepush
1777 
1778   // Return from interpreted method we return result appropriate to the caller (i.e. "recursive"
1779   // interpreter call, or native) and unwind this interpreter activation.
1780   // All monitors should be unlocked.
1781 
1782   __ bind(return_from_interpreted_method);
1783 
1784   VALIDATE_STATE(G3_scratch, 7);
1785 
1786   Label return_to_initial_caller;
1787 
1788   // Interpreted result is on the top of the completed activation expression stack.
1789   // We must return it to the top of the callers stack if caller was interpreted
1790   // otherwise we convert to native abi result and return to call_stub/c1/c2
1791   // The caller's expression stack was truncated by the call however the current activation
1792   // has enough stuff on the stack that we have usable space there no matter what. The
1793   // other thing that makes it easy is that the top of the caller's stack is stored in STATE(_locals)
1794   // for the current activation
1795 


1834 
1835 
1836   __ mov(O1, I1);                                                     // pass back new stack top across activation
1837   // POP FRAME HERE ==================================
1838   __ restore(FP, G0, SP);                                             // unwind interpreter state frame
1839   __ ld_ptr(STATE(_frame_bottom), SP);                                // restore to full stack frame
1840 
1841 
1842   // Resume the interpreter. The current frame contains the current interpreter
1843   // state object.
1844   //
1845   // O1 == new java stack pointer
1846 
1847   __ bind(resume_interpreter);
1848   VALIDATE_STATE(G3_scratch, 10);
1849 
1850   // A frame we have already used before so no need to bang stack so use call_interpreter_2 entry
1851 
1852   __ set((int)BytecodeInterpreter::method_resume, L1_scratch);
1853   __ st(L1_scratch, STATE(_msg));
1854   __ ba(call_interpreter_2);
1855   __ delayed()->st_ptr(O1, STATE(_stack));
1856 
1857 
1858   // Fast accessor methods share this entry point.
1859   // This works because frame manager is in the same codelet
1860   // This can either be an entry via call_stub/c1/c2 or a recursive interpreter call
1861   // we need to do a little register fixup here once we distinguish the two of them
1862   if (UseFastAccessorMethods && !synchronized) {
1863   // Call stub_return address still in O7
1864     __ bind(fast_accessor_slow_entry_path);
1865     __ set((intptr_t)return_from_native_method - 8, Gtmp1);
1866     __ cmp(Gtmp1, O7);                                                // returning to interpreter?
1867     __ brx(Assembler::equal, true, Assembler::pt, re_dispatch);       // yep
1868     __ delayed()->nop();
1869     __ ba(re_dispatch);
1870     __ delayed()->mov(G0, prevState);                                 // initial entry
1871 
1872   }
1873 
1874   // interpreter returning to native code (call_stub/c1/c2)
1875   // convert result and unwind initial activation
1876   // L2_scratch - scaled result type index
1877 
1878   __ bind(return_to_initial_caller);
1879 
1880   __ set((intptr_t)CppInterpreter::_stack_to_native_abi, L4_scratch);
1881   __ ld_ptr(L4_scratch, L2_scratch, Lscratch);                           // get typed result converter address
1882   __ ld_ptr(STATE(_stack), O0);                                        // current top (prepushed)
1883   __ jmpl(Lscratch, G0, O7);                                           // and convert it
1884   __ delayed()->add(O0, wordSize, O0);                                 // get source (top of current expr stack)
1885 
1886   Label unwind_initial_activation;
1887   __ bind(unwind_initial_activation);
1888 
1889   // RETURN TO CALL_STUB/C1/C2 code (result if any in I0..I1/(F0/..F1)


2014   __ jmpl(L2_scratch, G0, G0);                               // Do specialized entry
2015   __ delayed()->nop();
2016 
2017   //
2018   // Bad Message from interpreter
2019   //
2020   __ bind(bad_msg);
2021   __ stop("Bad message from interpreter");
2022 
2023   // Interpreted method "returned" with an exception pass it on...
2024   // Pass result, unwind activation and continue/return to interpreter/call_stub
2025   // We handle result (if any) differently based on return to interpreter or call_stub
2026 
2027   __ bind(throw_exception);
2028   __ ld_ptr(STATE(_prev_link), L1_scratch);
2029   __ tst(L1_scratch);
2030   __ brx(Assembler::zero, false, Assembler::pt, unwind_and_forward);
2031   __ delayed()->nop();
2032 
2033   __ ld_ptr(STATE(_locals), O1); // get result of popping callee's args
2034   __ ba(unwind_recursive_activation);
2035   __ delayed()->nop();
2036 
2037   interpreter_frame_manager = entry_point;
2038   return entry_point;
2039 }
2040 
2041 InterpreterGenerator::InterpreterGenerator(StubQueue* code)
2042  : CppInterpreterGenerator(code) {
2043    generate_all(); // down here so it can be "virtual"
2044 }
2045 
2046 
2047 static int size_activation_helper(int callee_extra_locals, int max_stack, int monitor_size) {
2048 
2049   // Figure out the size of an interpreter frame (in words) given that we have a fully allocated
2050   // expression stack, the callee will have callee_extra_locals (so we can account for
2051   // frame extension) and monitor_size for monitors. Basically we need to calculate
2052   // this exactly like generate_fixed_frame/generate_compute_interpreter_state.
2053   //
2054   //


src/cpu/sparc/vm/cppInterpreter_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File