src/cpu/sparc/vm/templateInterpreter_sparc.cpp

Print this page




 477   //    the monitor slots (0 slots initially), and some frame local
 478   //    scratch locations.
 479   //
 480   // 3) The following interpreter activation registers must be setup:
 481   //    Lesp       : expression stack pointer
 482   //    Lbcp       : bytecode pointer
 483   //    Lmethod    : method
 484   //    Llocals    : locals pointer
 485   //    Lmonitors  : monitor pointer
 486   //    LcpoolCache: constant pool cache
 487   //
 488   // 4) Initialize the non-argument locals if necessary:
 489   //    Non-argument locals may need to be initialized to NULL
 490   //    for GC to work. If the oop-map information is accurate
 491   //    (in the absence of the JSR problem), no initialization
 492   //    is necessary.
 493   //
 494   // (gri - 2/25/2000)
 495 
 496 
 497   const Address size_of_parameters(G5_method, Method::size_of_parameters_offset());
 498   const Address size_of_locals    (G5_method, Method::size_of_locals_offset());
 499   const Address constMethod       (G5_method, Method::const_offset());
 500   int rounded_vm_local_words = round_to( frame::interpreter_frame_vm_local_words, WordsPerLong );
 501 
 502   const int extra_space =
 503     rounded_vm_local_words +                   // frame local scratch space
 504     //6815692//Method::extra_stack_words() +       // extra push slots for MH adapters
 505     frame::memory_parameter_word_sp_offset +   // register save area
 506     (native_call ? frame::interpreter_frame_extra_outgoing_argument_words : 0);
 507 
 508   const Register Glocals_size = G3;
 509   const Register Otmp1 = O3;
 510   const Register Otmp2 = O4;
 511   // Lscratch can't be used as a temporary because the call_stub uses
 512   // it to assert that the stack frame was setup correctly.
 513 
 514   __ lduh( size_of_parameters, Glocals_size);

 515 
 516   // Gargs points to first local + BytesPerWord
 517   // Set the saved SP after the register window save
 518   //
 519   assert_different_registers(Gargs, Glocals_size, Gframe_size, O5_savedSP);
 520   __ sll(Glocals_size, Interpreter::logStackElementSize, Otmp1);
 521   __ add(Gargs, Otmp1, Gargs);
 522 
 523   if (native_call) {
 524     __ calc_mem_param_words( Glocals_size, Gframe_size );
 525     __ add( Gframe_size,  extra_space, Gframe_size);
 526     __ round_to( Gframe_size, WordsPerLong );
 527     __ sll( Gframe_size, LogBytesPerWord, Gframe_size );
 528   } else {
 529 
 530     //
 531     // Compute number of locals in method apart from incoming parameters
 532     //
 533     __ lduh( size_of_locals, Otmp1 );

 534     __ sub( Otmp1, Glocals_size, Glocals_size );
 535     __ round_to( Glocals_size, WordsPerLong );
 536     __ sll( Glocals_size, Interpreter::logStackElementSize, Glocals_size );
 537 
 538     // see if the frame is greater than one page in size. If so,
 539     // then we need to verify there is enough stack space remaining
 540     // Frame_size = (max_stack + extra_space) * BytesPerWord;
 541     __ ld_ptr( constMethod, Gframe_size );
 542     __ lduh( Gframe_size, in_bytes(ConstMethod::max_stack_offset()), Gframe_size );
 543     __ add( Gframe_size, extra_space, Gframe_size );
 544     __ round_to( Gframe_size, WordsPerLong );
 545     __ sll( Gframe_size, Interpreter::logStackElementSize, Gframe_size);
 546 
 547     // Add in java locals size for stack overflow check only
 548     __ add( Gframe_size, Glocals_size, Gframe_size );
 549 
 550     const Register Otmp2 = O4;
 551     assert_different_registers(Otmp1, Otmp2, O5_savedSP);
 552     generate_stack_overflow_check(Gframe_size, Otmp1, Otmp2);
 553 


1239 
1240   return entry;
1241 }
1242 
1243 
1244 // Generic method entry to (asm) interpreter
1245 //------------------------------------------------------------------------------------------------------------------------
1246 //
1247 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1248   address entry = __ pc();
1249 
1250   bool inc_counter  = UseCompiler || CountCompiledCalls;
1251 
1252   // the following temporary registers are used during frame creation
1253   const Register Gtmp1 = G3_scratch ;
1254   const Register Gtmp2 = G1_scratch;
1255 
1256   // make sure registers are different!
1257   assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
1258 
1259   const Address size_of_parameters(G5_method, Method::size_of_parameters_offset());
1260   const Address size_of_locals    (G5_method, Method::size_of_locals_offset());
1261   // Seems like G5_method is live at the point this is used. So we could make this look consistent
1262   // and use in the asserts.
1263   const Address access_flags      (Lmethod,   Method::access_flags_offset());
1264 
1265   const Register Glocals_size = G3;
1266   assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
1267 
1268   // make sure method is not native & not abstract
1269   // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
1270 #ifdef ASSERT
1271   __ ld(G5_method, Method::access_flags_offset(), Gtmp1);
1272   {
1273     Label L;
1274     __ btst(JVM_ACC_NATIVE, Gtmp1);
1275     __ br(Assembler::zero, false, Assembler::pt, L);
1276     __ delayed()->nop();
1277     __ stop("tried to execute native method as non-native");
1278     __ bind(L);
1279   }
1280   { Label L;


1292 
1293 #ifdef FAST_DISPATCH
1294   __ set((intptr_t)Interpreter::dispatch_table(), IdispatchTables);
1295                                           // set bytecode dispatch table base
1296 #endif
1297 
1298   //
1299   // Code to initialize the extra (i.e. non-parm) locals
1300   //
1301   Register init_value = noreg;    // will be G0 if we must clear locals
1302   // The way the code was setup before zerolocals was always true for vanilla java entries.
1303   // It could only be false for the specialized entries like accessor or empty which have
1304   // no extra locals so the testing was a waste of time and the extra locals were always
1305   // initialized. We removed this extra complication to already over complicated code.
1306 
1307   init_value = G0;
1308   Label clear_loop;
1309 
1310   // NOTE: If you change the frame layout, this code will need to
1311   // be updated!
1312   __ lduh( size_of_locals, O2 );
1313   __ lduh( size_of_parameters, O1 );

1314   __ sll( O2, Interpreter::logStackElementSize, O2);
1315   __ sll( O1, Interpreter::logStackElementSize, O1 );
1316   __ sub( Llocals, O2, O2 );
1317   __ sub( Llocals, O1, O1 );
1318 
1319   __ bind( clear_loop );
1320   __ inc( O2, wordSize );
1321 
1322   __ cmp( O2, O1 );
1323   __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop );
1324   __ delayed()->st_ptr( init_value, O2, 0 );
1325 
1326   const Address do_not_unlock_if_synchronized(G2_thread,
1327     JavaThread::do_not_unlock_if_synchronized_offset());
1328   // Since at this point in the method invocation the exception handler
1329   // would try to exit the monitor of synchronized methods which hasn't
1330   // been entered yet, we set the thread local variable
1331   // _do_not_unlock_if_synchronized to true. If any exception was thrown by
1332   // runtime, exception handling i.e. unlock_if_synchronized_method will
1333   // check this thread local flag.


1808 
1809   {
1810     // Check to see whether we are returning to a deoptimized frame.
1811     // (The PopFrame call ensures that the caller of the popped frame is
1812     // either interpreted or compiled and deoptimizes it if compiled.)
1813     // In this case, we can't call dispatch_next() after the frame is
1814     // popped, but instead must save the incoming arguments and restore
1815     // them after deoptimization has occurred.
1816     //
1817     // Note that we don't compare the return PC against the
1818     // deoptimization blob's unpack entry because of the presence of
1819     // adapter frames in C2.
1820     Label caller_not_deoptimized;
1821     __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, InterpreterRuntime::interpreter_contains), I7);
1822     __ br_notnull_short(O0, Assembler::pt, caller_not_deoptimized);
1823 
1824     const Register Gtmp1 = G3_scratch;
1825     const Register Gtmp2 = G1_scratch;
1826 
1827     // Compute size of arguments for saving when returning to deoptimized caller
1828     __ lduh(Lmethod, in_bytes(Method::size_of_parameters_offset()), Gtmp1);

1829     __ sll(Gtmp1, Interpreter::logStackElementSize, Gtmp1);
1830     __ sub(Llocals, Gtmp1, Gtmp2);
1831     __ add(Gtmp2, wordSize, Gtmp2);
1832     // Save these arguments
1833     __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), G2_thread, Gtmp1, Gtmp2);
1834     // Inform deoptimization that it is responsible for restoring these arguments
1835     __ set(JavaThread::popframe_force_deopt_reexecution_bit, Gtmp1);
1836     Address popframe_condition_addr(G2_thread, JavaThread::popframe_condition_offset());
1837     __ st(Gtmp1, popframe_condition_addr);
1838 
1839     // Return from the current method
1840     // The caller's SP was adjusted upon method entry to accomodate
1841     // the callee's non-argument locals. Undo that adjustment.
1842     __ ret();
1843     __ delayed()->restore(I5_savedSP, G0, SP);
1844 
1845     __ bind(caller_not_deoptimized);
1846   }
1847 
1848   // Clear the popframe condition flag




 477   //    the monitor slots (0 slots initially), and some frame local
 478   //    scratch locations.
 479   //
 480   // 3) The following interpreter activation registers must be setup:
 481   //    Lesp       : expression stack pointer
 482   //    Lbcp       : bytecode pointer
 483   //    Lmethod    : method
 484   //    Llocals    : locals pointer
 485   //    Lmonitors  : monitor pointer
 486   //    LcpoolCache: constant pool cache
 487   //
 488   // 4) Initialize the non-argument locals if necessary:
 489   //    Non-argument locals may need to be initialized to NULL
 490   //    for GC to work. If the oop-map information is accurate
 491   //    (in the absence of the JSR problem), no initialization
 492   //    is necessary.
 493   //
 494   // (gri - 2/25/2000)
 495 
 496 


 497   const Address constMethod       (G5_method, Method::const_offset());
 498   int rounded_vm_local_words = round_to( frame::interpreter_frame_vm_local_words, WordsPerLong );
 499 
 500   const int extra_space =
 501     rounded_vm_local_words +                   // frame local scratch space
 502     //6815692//Method::extra_stack_words() +       // extra push slots for MH adapters
 503     frame::memory_parameter_word_sp_offset +   // register save area
 504     (native_call ? frame::interpreter_frame_extra_outgoing_argument_words : 0);
 505 
 506   const Register Glocals_size = G3;
 507   const Register Otmp1 = O3;
 508   const Register Otmp2 = O4;
 509   // Lscratch can't be used as a temporary because the call_stub uses
 510   // it to assert that the stack frame was setup correctly.
 511 
 512   __ ld_ptr( constMethod, Glocals_size );
 513   __ lduh( Glocals_size, in_bytes(ConstMethod::size_of_parameters_offset()), Glocals_size);
 514 
 515   // Gargs points to first local + BytesPerWord
 516   // Set the saved SP after the register window save
 517   //
 518   assert_different_registers(Gargs, Glocals_size, Gframe_size, O5_savedSP);
 519   __ sll(Glocals_size, Interpreter::logStackElementSize, Otmp1);
 520   __ add(Gargs, Otmp1, Gargs);
 521 
 522   if (native_call) {
 523     __ calc_mem_param_words( Glocals_size, Gframe_size );
 524     __ add( Gframe_size,  extra_space, Gframe_size);
 525     __ round_to( Gframe_size, WordsPerLong );
 526     __ sll( Gframe_size, LogBytesPerWord, Gframe_size );
 527   } else {
 528 
 529     //
 530     // Compute number of locals in method apart from incoming parameters
 531     //
 532     __ ld_ptr( G5_method, Method::const_offset(), Otmp1 );
 533     __ lduh( Otmp1, in_bytes(ConstMethod::size_of_locals_offset()), Otmp1 );
 534     __ sub( Otmp1, Glocals_size, Glocals_size );
 535     __ round_to( Glocals_size, WordsPerLong );
 536     __ sll( Glocals_size, Interpreter::logStackElementSize, Glocals_size );
 537 
 538     // see if the frame is greater than one page in size. If so,
 539     // then we need to verify there is enough stack space remaining
 540     // Frame_size = (max_stack + extra_space) * BytesPerWord;
 541     __ ld_ptr( constMethod, Gframe_size );
 542     __ lduh( Gframe_size, in_bytes(ConstMethod::max_stack_offset()), Gframe_size );
 543     __ add( Gframe_size, extra_space, Gframe_size );
 544     __ round_to( Gframe_size, WordsPerLong );
 545     __ sll( Gframe_size, Interpreter::logStackElementSize, Gframe_size);
 546 
 547     // Add in java locals size for stack overflow check only
 548     __ add( Gframe_size, Glocals_size, Gframe_size );
 549 
 550     const Register Otmp2 = O4;
 551     assert_different_registers(Otmp1, Otmp2, O5_savedSP);
 552     generate_stack_overflow_check(Gframe_size, Otmp1, Otmp2);
 553 


1239 
1240   return entry;
1241 }
1242 
1243 
1244 // Generic method entry to (asm) interpreter
1245 //------------------------------------------------------------------------------------------------------------------------
1246 //
1247 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1248   address entry = __ pc();
1249 
1250   bool inc_counter  = UseCompiler || CountCompiledCalls;
1251 
1252   // the following temporary registers are used during frame creation
1253   const Register Gtmp1 = G3_scratch ;
1254   const Register Gtmp2 = G1_scratch;
1255 
1256   // make sure registers are different!
1257   assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
1258 


1259   // Seems like G5_method is live at the point this is used. So we could make this look consistent
1260   // and use in the asserts.
1261   const Address access_flags      (Lmethod,   Method::access_flags_offset());
1262 
1263   const Register Glocals_size = G3;
1264   assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
1265 
1266   // make sure method is not native & not abstract
1267   // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
1268 #ifdef ASSERT
1269   __ ld(G5_method, Method::access_flags_offset(), Gtmp1);
1270   {
1271     Label L;
1272     __ btst(JVM_ACC_NATIVE, Gtmp1);
1273     __ br(Assembler::zero, false, Assembler::pt, L);
1274     __ delayed()->nop();
1275     __ stop("tried to execute native method as non-native");
1276     __ bind(L);
1277   }
1278   { Label L;


1290 
1291 #ifdef FAST_DISPATCH
1292   __ set((intptr_t)Interpreter::dispatch_table(), IdispatchTables);
1293                                           // set bytecode dispatch table base
1294 #endif
1295 
1296   //
1297   // Code to initialize the extra (i.e. non-parm) locals
1298   //
1299   Register init_value = noreg;    // will be G0 if we must clear locals
1300   // The way the code was setup before zerolocals was always true for vanilla java entries.
1301   // It could only be false for the specialized entries like accessor or empty which have
1302   // no extra locals so the testing was a waste of time and the extra locals were always
1303   // initialized. We removed this extra complication to already over complicated code.
1304 
1305   init_value = G0;
1306   Label clear_loop;
1307 
1308   // NOTE: If you change the frame layout, this code will need to
1309   // be updated!
1310   __ ld_ptr( G5_method, Method::const_offset(), O1 ); 
1311   __ lduh( O1, in_bytes(ConstMethod::size_of_locals_offset()), O2 );
1312   __ lduh( O1, in_bytes(ConstMethod::size_of_parameters_offset()), O1 );
1313   __ sll( O2, Interpreter::logStackElementSize, O2);
1314   __ sll( O1, Interpreter::logStackElementSize, O1 );
1315   __ sub( Llocals, O2, O2 );
1316   __ sub( Llocals, O1, O1 );
1317 
1318   __ bind( clear_loop );
1319   __ inc( O2, wordSize );
1320 
1321   __ cmp( O2, O1 );
1322   __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop );
1323   __ delayed()->st_ptr( init_value, O2, 0 );
1324 
1325   const Address do_not_unlock_if_synchronized(G2_thread,
1326     JavaThread::do_not_unlock_if_synchronized_offset());
1327   // Since at this point in the method invocation the exception handler
1328   // would try to exit the monitor of synchronized methods which hasn't
1329   // been entered yet, we set the thread local variable
1330   // _do_not_unlock_if_synchronized to true. If any exception was thrown by
1331   // runtime, exception handling i.e. unlock_if_synchronized_method will
1332   // check this thread local flag.


1807 
1808   {
1809     // Check to see whether we are returning to a deoptimized frame.
1810     // (The PopFrame call ensures that the caller of the popped frame is
1811     // either interpreted or compiled and deoptimizes it if compiled.)
1812     // In this case, we can't call dispatch_next() after the frame is
1813     // popped, but instead must save the incoming arguments and restore
1814     // them after deoptimization has occurred.
1815     //
1816     // Note that we don't compare the return PC against the
1817     // deoptimization blob's unpack entry because of the presence of
1818     // adapter frames in C2.
1819     Label caller_not_deoptimized;
1820     __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, InterpreterRuntime::interpreter_contains), I7);
1821     __ br_notnull_short(O0, Assembler::pt, caller_not_deoptimized);
1822 
1823     const Register Gtmp1 = G3_scratch;
1824     const Register Gtmp2 = G1_scratch;
1825 
1826     // Compute size of arguments for saving when returning to deoptimized caller
1827     __ ld_ptr(Lmethod, in_bytes(Method::const_offset()), Gtmp1);
1828     __ lduh(Gtmp1, in_bytes(ConstMethod::size_of_parameters_offset()), Gtmp1);
1829     __ sll(Gtmp1, Interpreter::logStackElementSize, Gtmp1);
1830     __ sub(Llocals, Gtmp1, Gtmp2);
1831     __ add(Gtmp2, wordSize, Gtmp2);
1832     // Save these arguments
1833     __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), G2_thread, Gtmp1, Gtmp2);
1834     // Inform deoptimization that it is responsible for restoring these arguments
1835     __ set(JavaThread::popframe_force_deopt_reexecution_bit, Gtmp1);
1836     Address popframe_condition_addr(G2_thread, JavaThread::popframe_condition_offset());
1837     __ st(Gtmp1, popframe_condition_addr);
1838 
1839     // Return from the current method
1840     // The caller's SP was adjusted upon method entry to accomodate
1841     // the callee's non-argument locals. Undo that adjustment.
1842     __ ret();
1843     __ delayed()->restore(I5_savedSP, G0, SP);
1844 
1845     __ bind(caller_not_deoptimized);
1846   }
1847 
1848   // Clear the popframe condition flag