src/cpu/sparc/vm/cppInterpreter_sparc.cpp

Print this page




 565   }
 566 #endif // SERIALGC
 567 
 568   // If G1 is not enabled then attempt to go through the accessor entry point
 569   // Reference.get is an accessor
 570   return generate_accessor_entry();
 571 }
 572 
 573 //
 574 // Interpreter stub for calling a native method. (C++ interpreter)
 575 // This sets up a somewhat different looking stack for calling the native method
 576 // than the typical interpreter frame setup.
 577 //
 578 
 579 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 580   address entry = __ pc();
 581 
 582   // the following temporary registers are used during frame creation
 583   const Register Gtmp1 = G3_scratch ;
 584   const Register Gtmp2 = G1_scratch;
 585   const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));


 586 
 587   bool inc_counter  = UseCompiler || CountCompiledCalls;
 588 
 589   // make sure registers are different!
 590   assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
 591 
 592   const Address access_flags      (G5_method, 0, in_bytes(Method::access_flags_offset()));
 593 
 594   Label Lentry;
 595   __ bind(Lentry);
 596 
 597   const Register Glocals_size = G3;
 598   assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
 599 
 600   // make sure method is native & not abstract
 601   // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
 602 #ifdef ASSERT
 603   __ ld(access_flags, Gtmp1);
 604   {
 605     Label L;
 606     __ btst(JVM_ACC_NATIVE, Gtmp1);
 607     __ br(Assembler::notZero, false, Assembler::pt, L);
 608     __ delayed()->nop();
 609     __ stop("tried to execute non-native method as native");
 610     __ bind(L);
 611   }
 612   { Label L;
 613     __ btst(JVM_ACC_ABSTRACT, Gtmp1);
 614     __ br(Assembler::zero, false, Assembler::pt, L);
 615     __ delayed()->nop();
 616     __ stop("tried to execute abstract method as non-abstract");
 617     __ bind(L);
 618   }
 619 #endif // ASSERT
 620 

 621   __ lduh(size_of_parameters, Gtmp1);
 622   __ sll(Gtmp1, LogBytesPerWord, Gtmp2);       // parameter size in bytes
 623   __ add(Gargs, Gtmp2, Gargs);                 // points to first local + BytesPerWord
 624   // NEW
 625   __ add(Gargs, -wordSize, Gargs);             // points to first local[0]
 626   // generate the code to allocate the interpreter stack frame
 627   // NEW FRAME ALLOCATED HERE
 628   // save callers original sp
 629   // __ mov(SP, I5_savedSP->after_restore());
 630 
 631   generate_compute_interpreter_state(Lstate, G0, true);
 632 
 633   // At this point Lstate points to new interpreter state
 634   //
 635 
 636   const Address do_not_unlock_if_synchronized(G2_thread, 0,
 637       in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
 638   // Since at this point in the method invocation the exception handler
 639   // would try to exit the monitor of synchronized methods which hasn't
 640   // been entered yet, we set the thread local variable


1030 
1031 void CppInterpreterGenerator::generate_compute_interpreter_state(const Register state,
1032                                                               const Register prev_state,
1033                                                               bool native) {
1034 
1035   // On entry
1036   // G5_method - caller's method
1037   // Gargs - points to initial parameters (i.e. locals[0])
1038   // G2_thread - valid? (C1 only??)
1039   // "prev_state" - contains any previous frame manager state which we must save a link
1040   //
1041   // On return
1042   // "state" is a pointer to the newly allocated  state object. We must allocate and initialize
1043   // a new interpretState object and the method expression stack.
1044 
1045   assert_different_registers(state, prev_state);
1046   assert_different_registers(prev_state, G3_scratch);
1047   const Register Gtmp = G3_scratch;
1048   const Address constMethod       (G5_method, 0, in_bytes(Method::const_offset()));
1049   const Address access_flags      (G5_method, 0, in_bytes(Method::access_flags_offset()));
1050   const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));
1051   const Address size_of_locals    (G5_method, 0, in_bytes(Method::size_of_locals_offset()));
1052 
1053   // slop factor is two extra slots on the expression stack so that
1054   // we always have room to store a result when returning from a call without parameters
1055   // that returns a result.
1056 
1057   const int slop_factor = 2*wordSize;
1058 
1059   const int fixed_size = ((sizeof(BytecodeInterpreter) + slop_factor) >> LogBytesPerWord) + // what is the slop factor?
1060                          //6815692//Method::extra_stack_words() +  // extra push slots for MH adapters
1061                          frame::memory_parameter_word_sp_offset +  // register save area + param window
1062                          (native ?  frame::interpreter_frame_extra_outgoing_argument_words : 0); // JNI, class
1063 
1064   // XXX G5_method valid
1065 
1066   // Now compute new frame size
1067 
1068   if (native) {



1069     __ lduh( size_of_parameters, Gtmp );
1070     __ calc_mem_param_words(Gtmp, Gtmp);     // space for native call parameters passed on the stack in words
1071   } else {
1072     // Full size expression stack
1073     __ ld_ptr(constMethod, Gtmp);
1074     __ lduh(Gtmp, in_bytes(ConstMethod::max_stack_offset()), Gtmp);
1075   }
1076   __ add(Gtmp, fixed_size, Gtmp);           // plus the fixed portion
1077 
1078   __ neg(Gtmp);                               // negative space for stack/parameters in words
1079   __ and3(Gtmp, -WordsPerLong, Gtmp);        // make multiple of 2 (SP must be 2-word aligned)
1080   __ sll(Gtmp, LogBytesPerWord, Gtmp);       // negative space for frame in bytes
1081 
1082   // Need to do stack size check here before we fault on large frames
1083 
1084   Label stack_ok;
1085 
1086   const int max_pages = StackShadowPages > (StackRedPages+StackYellowPages) ? StackShadowPages :
1087                                                                               (StackRedPages+StackYellowPages);
1088 


1219   __ st_ptr(O3, XXX_STATE(_stack_limit));
1220 
1221   if (!native) {
1222     //
1223     // Code to initialize locals
1224     //
1225     Register init_value = noreg;    // will be G0 if we must clear locals
1226     // Now zero locals
1227     if (true /* zerolocals */ || ClearInterpreterLocals) {
1228       // explicitly initialize locals
1229       init_value = G0;
1230     } else {
1231     #ifdef ASSERT
1232       // initialize locals to a garbage pattern for better debugging
1233       init_value = O3;
1234       __ set( 0x0F0F0F0F, init_value );
1235     #endif // ASSERT
1236     }
1237     if (init_value != noreg) {
1238       Label clear_loop;



1239 
1240       // NOTE: If you change the frame layout, this code will need to
1241       // be updated!

1242       __ lduh( size_of_locals, O2 );
1243       __ lduh( size_of_parameters, O1 );
1244       __ sll( O2, LogBytesPerWord, O2);
1245       __ sll( O1, LogBytesPerWord, O1 );
1246       __ ld_ptr(XXX_STATE(_locals), L2_scratch);
1247       __ sub( L2_scratch, O2, O2 );
1248       __ sub( L2_scratch, O1, O1 );
1249 
1250       __ bind( clear_loop );
1251       __ inc( O2, wordSize );
1252 
1253       __ cmp( O2, O1 );
1254       __ br( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop );
1255       __ delayed()->st_ptr( init_value, O2, 0 );
1256     }
1257   }
1258 }
1259 // Find preallocated  monitor and lock method (C++ interpreter)
1260 //
1261 void InterpreterGenerator::lock_method(void) {


1466     __ delayed()->nop();                                          \
1467     __ breakpoint_trap();                                         \
1468     __ emit_long(marker);                                         \
1469     __ bind(skip);                                                \
1470   }
1471 #else
1472   #define VALIDATE_STATE(scratch, marker)
1473 #endif /* ASSERT */
1474 
1475 void CppInterpreterGenerator::adjust_callers_stack(Register args) {
1476 //
1477 // Adjust caller's stack so that all the locals can be contiguous with
1478 // the parameters.
1479 // Worries about stack overflow make this a pain.
1480 //
1481 // Destroys args, G3_scratch, G3_scratch
1482 // In/Out O5_savedSP (sender's original SP)
1483 //
1484 //  assert_different_registers(state, prev_state);
1485   const Register Gtmp = G3_scratch;

1486   const Register tmp = O2;
1487   const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));
1488   const Address size_of_locals    (G5_method, 0, in_bytes(Method::size_of_locals_offset()));

1489 

1490   __ lduh(size_of_parameters, tmp);
1491   __ sll(tmp, LogBytesPerWord, Gtmp);       // parameter size in bytes
1492   __ add(args, Gtmp, Gargs);                // points to first local + BytesPerWord
1493   // NEW
1494   __ add(Gargs, -wordSize, Gargs);             // points to first local[0]
1495   // determine extra space for non-argument locals & adjust caller's SP
1496   // Gtmp1: parameter size in words
1497   __ lduh(size_of_locals, Gtmp);
1498   __ compute_extra_locals_size_in_bytes(tmp, Gtmp, Gtmp);
1499 
1500 #if 1
1501   // c2i adapters place the final interpreter argument in the register save area for O0/I0
1502   // the call_stub will place the final interpreter argument at
1503   // frame::memory_parameter_word_sp_offset. This is mostly not noticable for either asm
1504   // or c++ interpreter. However with the c++ interpreter when we do a recursive call
1505   // and try to make it look good in the debugger we will store the argument to
1506   // RecursiveInterpreterActivation in the register argument save area. Without allocating
1507   // extra space for the compiler this will overwrite locals in the local array of the
1508   // interpreter.
1509   // QQQ still needed with frameless adapters???
1510 
1511   const int c2i_adjust_words = frame::memory_parameter_word_sp_offset - frame::callee_register_argument_save_area_sp_offset;
1512 


1524   // Gargs:   bottom of args (sender_sp)
1525   // O5: sender's sp
1526 
1527   // A single frame manager is plenty as we don't specialize for synchronized. We could and
1528   // the code is pretty much ready. Would need to change the test below and for good measure
1529   // modify generate_interpreter_state to only do the (pre) sync stuff stuff for synchronized
1530   // routines. Not clear this is worth it yet.
1531 
1532   if (interpreter_frame_manager) {
1533     return interpreter_frame_manager;
1534   }
1535 
1536   __ bind(frame_manager_entry);
1537 
1538   // the following temporary registers are used during frame creation
1539   const Register Gtmp1 = G3_scratch;
1540   // const Register Lmirror = L1;     // native mirror (native calls only)
1541 
1542   const Address constMethod       (G5_method, 0, in_bytes(Method::const_offset()));
1543   const Address access_flags      (G5_method, 0, in_bytes(Method::access_flags_offset()));
1544   const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));
1545   const Address size_of_locals    (G5_method, 0, in_bytes(Method::size_of_locals_offset()));
1546 
1547   address entry_point = __ pc();
1548   __ mov(G0, prevState);                                                 // no current activation
1549 
1550 
1551   Label re_dispatch;
1552 
1553   __ bind(re_dispatch);
1554 
1555   // Interpreter needs to have locals completely contiguous. In order to do that
1556   // We must adjust the caller's stack pointer for any locals beyond just the
1557   // parameters
1558   adjust_callers_stack(Gargs);
1559 
1560   // O5_savedSP still contains sender's sp
1561 
1562   // NEW FRAME
1563 
1564   generate_compute_interpreter_state(Lstate, prevState, false);
1565 


1733   VALIDATE_STATE(G3_scratch, 6);
1734 
1735   // Result if any is in native abi result (O0..O1/F0..F1). The java expression
1736   // stack is in the state that the  calling convention left it.
1737   // Copy the result from native abi result and place it on java expression stack.
1738 
1739   // Current interpreter state is present in Lstate
1740 
1741   // Exception pending?
1742 
1743   __ ld_ptr(STATE(_frame_bottom), SP);                             // restore to full stack frame
1744   __ ld_ptr(exception_addr, Lscratch);                                         // get any pending exception
1745   __ tst(Lscratch);                                                            // exception pending?
1746   __ brx(Assembler::notZero, false, Assembler::pt, return_with_exception);
1747   __ delayed()->nop();
1748 
1749   // Process the native abi result to java expression stack
1750 
1751   __ ld_ptr(STATE(_result._to_call._callee), L4_scratch);                        // called method
1752   __ ld_ptr(STATE(_stack), L1_scratch);                                          // get top of java expr stack
1753   __ lduh(L4_scratch, in_bytes(Method::size_of_parameters_offset()), L2_scratch); // get parameter size


1754   __ sll(L2_scratch, LogBytesPerWord, L2_scratch     );                           // parameter size in bytes
1755   __ add(L1_scratch, L2_scratch, L1_scratch);                                      // stack destination for result
1756   __ ld(L4_scratch, in_bytes(Method::result_index_offset()), L3_scratch); // called method result type index
1757 
1758   // tosca is really just native abi
1759   __ set((intptr_t)CppInterpreter::_tosca_to_stack, L4_scratch);
1760   __ sll(L3_scratch, LogBytesPerWord, L3_scratch);
1761   __ ld_ptr(L4_scratch, L3_scratch, Lscratch);                                       // get typed result converter address
1762   __ jmpl(Lscratch, G0, O7);                                                   // and convert it
1763   __ delayed()->nop();
1764 
1765   // L1_scratch points to top of stack (prepushed)
1766 
1767   __ ba(resume_interpreter);
1768   __ delayed()->mov(L1_scratch, O1);
1769 
1770   // An exception is being caught on return to a vanilla interpreter frame.
1771   // Empty the stack and resume interpreter
1772 
1773   __ bind(return_with_exception);




 565   }
 566 #endif // SERIALGC
 567 
 568   // If G1 is not enabled then attempt to go through the accessor entry point
 569   // Reference.get is an accessor
 570   return generate_accessor_entry();
 571 }
 572 
 573 //
 574 // Interpreter stub for calling a native method. (C++ interpreter)
 575 // This sets up a somewhat different looking stack for calling the native method
 576 // than the typical interpreter frame setup.
 577 //
 578 
 579 address InterpreterGenerator::generate_native_entry(bool synchronized) {
 580   address entry = __ pc();
 581 
 582   // the following temporary registers are used during frame creation
 583   const Register Gtmp1 = G3_scratch ;
 584   const Register Gtmp2 = G1_scratch;
 585   const Register RconstMethod = Gtmp1;
 586   const Address constMethod(G5_method, 0, in_bytes(Method::const_offset()));
 587   const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
 588 
 589   bool inc_counter  = UseCompiler || CountCompiledCalls;
 590 
 591   // make sure registers are different!
 592   assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
 593 
 594   const Address access_flags      (G5_method, 0, in_bytes(Method::access_flags_offset()));
 595 
 596   Label Lentry;
 597   __ bind(Lentry);
 598 
 599   const Register Glocals_size = G3;
 600   assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
 601 
 602   // make sure method is native & not abstract
 603   // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
 604 #ifdef ASSERT
 605   __ ld(access_flags, Gtmp1);
 606   {
 607     Label L;
 608     __ btst(JVM_ACC_NATIVE, Gtmp1);
 609     __ br(Assembler::notZero, false, Assembler::pt, L);
 610     __ delayed()->nop();
 611     __ stop("tried to execute non-native method as native");
 612     __ bind(L);
 613   }
 614   { Label L;
 615     __ btst(JVM_ACC_ABSTRACT, Gtmp1);
 616     __ br(Assembler::zero, false, Assembler::pt, L);
 617     __ delayed()->nop();
 618     __ stop("tried to execute abstract method as non-abstract");
 619     __ bind(L);
 620   }
 621 #endif // ASSERT
 622 
 623   __ ld_ptr(constMethod, RconstMethod);
 624   __ lduh(size_of_parameters, Gtmp1);
 625   __ sll(Gtmp1, LogBytesPerWord, Gtmp2);       // parameter size in bytes
 626   __ add(Gargs, Gtmp2, Gargs);                 // points to first local + BytesPerWord
 627   // NEW
 628   __ add(Gargs, -wordSize, Gargs);             // points to first local[0]
 629   // generate the code to allocate the interpreter stack frame
 630   // NEW FRAME ALLOCATED HERE
 631   // save callers original sp
 632   // __ mov(SP, I5_savedSP->after_restore());
 633 
 634   generate_compute_interpreter_state(Lstate, G0, true);
 635 
 636   // At this point Lstate points to new interpreter state
 637   //
 638 
 639   const Address do_not_unlock_if_synchronized(G2_thread, 0,
 640       in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
 641   // Since at this point in the method invocation the exception handler
 642   // would try to exit the monitor of synchronized methods which hasn't
 643   // been entered yet, we set the thread local variable


1033 
1034 void CppInterpreterGenerator::generate_compute_interpreter_state(const Register state,
1035                                                               const Register prev_state,
1036                                                               bool native) {
1037 
1038   // On entry
1039   // G5_method - caller's method
1040   // Gargs - points to initial parameters (i.e. locals[0])
1041   // G2_thread - valid? (C1 only??)
1042   // "prev_state" - contains any previous frame manager state which we must save a link
1043   //
1044   // On return
1045   // "state" is a pointer to the newly allocated  state object. We must allocate and initialize
1046   // a new interpretState object and the method expression stack.
1047 
1048   assert_different_registers(state, prev_state);
1049   assert_different_registers(prev_state, G3_scratch);
1050   const Register Gtmp = G3_scratch;
1051   const Address constMethod       (G5_method, 0, in_bytes(Method::const_offset()));
1052   const Address access_flags      (G5_method, 0, in_bytes(Method::access_flags_offset()));


1053 
1054   // slop factor is two extra slots on the expression stack so that
1055   // we always have room to store a result when returning from a call without parameters
1056   // that returns a result.
1057 
1058   const int slop_factor = 2*wordSize;
1059 
1060   const int fixed_size = ((sizeof(BytecodeInterpreter) + slop_factor) >> LogBytesPerWord) + // what is the slop factor?
1061                          //6815692//Method::extra_stack_words() +  // extra push slots for MH adapters
1062                          frame::memory_parameter_word_sp_offset +  // register save area + param window
1063                          (native ?  frame::interpreter_frame_extra_outgoing_argument_words : 0); // JNI, class
1064 
1065   // XXX G5_method valid
1066 
1067   // Now compute new frame size
1068 
1069   if (native) {
1070     const Register RconstMethod = Gtmp;
1071     const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
1072     __ ld_ptr(constMethod, RconstMethod);
1073     __ lduh( size_of_parameters, Gtmp );
1074     __ calc_mem_param_words(Gtmp, Gtmp);     // space for native call parameters passed on the stack in words
1075   } else {
1076     // Full size expression stack
1077     __ ld_ptr(constMethod, Gtmp);
1078     __ lduh(Gtmp, in_bytes(ConstMethod::max_stack_offset()), Gtmp);
1079   }
1080   __ add(Gtmp, fixed_size, Gtmp);           // plus the fixed portion
1081 
1082   __ neg(Gtmp);                               // negative space for stack/parameters in words
1083   __ and3(Gtmp, -WordsPerLong, Gtmp);        // make multiple of 2 (SP must be 2-word aligned)
1084   __ sll(Gtmp, LogBytesPerWord, Gtmp);       // negative space for frame in bytes
1085 
1086   // Need to do stack size check here before we fault on large frames
1087 
1088   Label stack_ok;
1089 
1090   const int max_pages = StackShadowPages > (StackRedPages+StackYellowPages) ? StackShadowPages :
1091                                                                               (StackRedPages+StackYellowPages);
1092 


1223   __ st_ptr(O3, XXX_STATE(_stack_limit));
1224 
1225   if (!native) {
1226     //
1227     // Code to initialize locals
1228     //
1229     Register init_value = noreg;    // will be G0 if we must clear locals
1230     // Now zero locals
1231     if (true /* zerolocals */ || ClearInterpreterLocals) {
1232       // explicitly initialize locals
1233       init_value = G0;
1234     } else {
1235     #ifdef ASSERT
1236       // initialize locals to a garbage pattern for better debugging
1237       init_value = O3;
1238       __ set( 0x0F0F0F0F, init_value );
1239     #endif // ASSERT
1240     }
1241     if (init_value != noreg) {
1242       Label clear_loop;
1243       const Register RconstMethod = O1;
1244       const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
1245       const Address size_of_locals    (RconstMethod, 0, in_bytes(ConstMethod::size_of_locals_offset()));
1246 
1247       // NOTE: If you change the frame layout, this code will need to
1248       // be updated!
1249       __ ld_ptr( constMethod, RconstMethod );
1250       __ lduh( size_of_locals, O2 );
1251       __ lduh( size_of_parameters, O1 );
1252       __ sll( O2, LogBytesPerWord, O2);
1253       __ sll( O1, LogBytesPerWord, O1 );
1254       __ ld_ptr(XXX_STATE(_locals), L2_scratch);
1255       __ sub( L2_scratch, O2, O2 );
1256       __ sub( L2_scratch, O1, O1 );
1257 
1258       __ bind( clear_loop );
1259       __ inc( O2, wordSize );
1260 
1261       __ cmp( O2, O1 );
1262       __ br( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop );
1263       __ delayed()->st_ptr( init_value, O2, 0 );
1264     }
1265   }
1266 }
1267 // Find preallocated  monitor and lock method (C++ interpreter)
1268 //
1269 void InterpreterGenerator::lock_method(void) {


1474     __ delayed()->nop();                                          \
1475     __ breakpoint_trap();                                         \
1476     __ emit_long(marker);                                         \
1477     __ bind(skip);                                                \
1478   }
1479 #else
1480   #define VALIDATE_STATE(scratch, marker)
1481 #endif /* ASSERT */
1482 
1483 void CppInterpreterGenerator::adjust_callers_stack(Register args) {
1484 //
1485 // Adjust caller's stack so that all the locals can be contiguous with
1486 // the parameters.
1487 // Worries about stack overflow make this a pain.
1488 //
1489 // Destroys args, G3_scratch, G3_scratch
1490 // In/Out O5_savedSP (sender's original SP)
1491 //
1492 //  assert_different_registers(state, prev_state);
1493   const Register Gtmp = G3_scratch;
1494   const RconstMethod = G3_scratch;
1495   const Register tmp = O2;
1496   const Address constMethod(G5_method, 0, in_bytes(Method::const_offset()));
1497   const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
1498   const Address size_of_locals    (RconstMethod, 0, in_bytes(ConstMethod::size_of_locals_offset()));
1499 
1500   __ ld_ptr(constMethod, RconstMethod);
1501   __ lduh(size_of_parameters, tmp); 
1502   __ sll(tmp, LogBytesPerWord, Gargs);       // parameter size in bytes
1503   __ add(args, Gargs, Gargs);                // points to first local + BytesPerWord
1504   // NEW
1505   __ add(Gargs, -wordSize, Gargs);             // points to first local[0]
1506   // determine extra space for non-argument locals & adjust caller's SP
1507   // Gtmp1: parameter size in words
1508   __ lduh(size_of_locals, Gtmp);
1509   __ compute_extra_locals_size_in_bytes(tmp, Gtmp, Gtmp);
1510 
1511 #if 1
1512   // c2i adapters place the final interpreter argument in the register save area for O0/I0
1513   // the call_stub will place the final interpreter argument at
1514   // frame::memory_parameter_word_sp_offset. This is mostly not noticable for either asm
1515   // or c++ interpreter. However with the c++ interpreter when we do a recursive call
1516   // and try to make it look good in the debugger we will store the argument to
1517   // RecursiveInterpreterActivation in the register argument save area. Without allocating
1518   // extra space for the compiler this will overwrite locals in the local array of the
1519   // interpreter.
1520   // QQQ still needed with frameless adapters???
1521 
1522   const int c2i_adjust_words = frame::memory_parameter_word_sp_offset - frame::callee_register_argument_save_area_sp_offset;
1523 


1535   // Gargs:   bottom of args (sender_sp)
1536   // O5: sender's sp
1537 
1538   // A single frame manager is plenty as we don't specialize for synchronized. We could and
1539   // the code is pretty much ready. Would need to change the test below and for good measure
1540   // modify generate_interpreter_state to only do the (pre) sync stuff stuff for synchronized
1541   // routines. Not clear this is worth it yet.
1542 
1543   if (interpreter_frame_manager) {
1544     return interpreter_frame_manager;
1545   }
1546 
1547   __ bind(frame_manager_entry);
1548 
1549   // the following temporary registers are used during frame creation
1550   const Register Gtmp1 = G3_scratch;
1551   // const Register Lmirror = L1;     // native mirror (native calls only)
1552 
1553   const Address constMethod       (G5_method, 0, in_bytes(Method::const_offset()));
1554   const Address access_flags      (G5_method, 0, in_bytes(Method::access_flags_offset()));


1555 
1556   address entry_point = __ pc();
1557   __ mov(G0, prevState);                                                 // no current activation
1558 
1559 
1560   Label re_dispatch;
1561 
1562   __ bind(re_dispatch);
1563 
1564   // Interpreter needs to have locals completely contiguous. In order to do that
1565   // We must adjust the caller's stack pointer for any locals beyond just the
1566   // parameters
1567   adjust_callers_stack(Gargs);
1568 
1569   // O5_savedSP still contains sender's sp
1570 
1571   // NEW FRAME
1572 
1573   generate_compute_interpreter_state(Lstate, prevState, false);
1574 


1742   VALIDATE_STATE(G3_scratch, 6);
1743 
1744   // Result if any is in native abi result (O0..O1/F0..F1). The java expression
1745   // stack is in the state that the  calling convention left it.
1746   // Copy the result from native abi result and place it on java expression stack.
1747 
1748   // Current interpreter state is present in Lstate
1749 
1750   // Exception pending?
1751 
1752   __ ld_ptr(STATE(_frame_bottom), SP);                             // restore to full stack frame
1753   __ ld_ptr(exception_addr, Lscratch);                                         // get any pending exception
1754   __ tst(Lscratch);                                                            // exception pending?
1755   __ brx(Assembler::notZero, false, Assembler::pt, return_with_exception);
1756   __ delayed()->nop();
1757 
1758   // Process the native abi result to java expression stack
1759 
1760   __ ld_ptr(STATE(_result._to_call._callee), L4_scratch);                        // called method
1761   __ ld_ptr(STATE(_stack), L1_scratch);                                          // get top of java expr stack
1762   // get parameter size
1763   __ ld_ptr(L4_scratch, in_bytes(Method::const_offset()), L2_scratch); 
1764   __ lduh(L2_scratch, in_bytes(ConstMethod::size_of_parameters_offset()), L2_scratch);
1765   __ sll(L2_scratch, LogBytesPerWord, L2_scratch     );                           // parameter size in bytes
1766   __ add(L1_scratch, L2_scratch, L1_scratch);                                      // stack destination for result
1767   __ ld(L4_scratch, in_bytes(Method::result_index_offset()), L3_scratch); // called method result type index
1768 
1769   // tosca is really just native abi
1770   __ set((intptr_t)CppInterpreter::_tosca_to_stack, L4_scratch);
1771   __ sll(L3_scratch, LogBytesPerWord, L3_scratch);
1772   __ ld_ptr(L4_scratch, L3_scratch, Lscratch);                                       // get typed result converter address
1773   __ jmpl(Lscratch, G0, O7);                                                   // and convert it
1774   __ delayed()->nop();
1775 
1776   // L1_scratch points to top of stack (prepushed)
1777 
1778   __ ba(resume_interpreter);
1779   __ delayed()->mov(L1_scratch, O1);
1780 
1781   // An exception is being caught on return to a vanilla interpreter frame.
1782   // Empty the stack and resume interpreter
1783 
1784   __ bind(return_with_exception);