565 }
566 #endif // SERIALGC
567
568 // If G1 is not enabled then attempt to go through the accessor entry point
569 // Reference.get is an accessor
570 return generate_accessor_entry();
571 }
572
573 //
574 // Interpreter stub for calling a native method. (C++ interpreter)
575 // This sets up a somewhat different looking stack for calling the native method
576 // than the typical interpreter frame setup.
577 //
578
579 address InterpreterGenerator::generate_native_entry(bool synchronized) {
580 address entry = __ pc();
581
582 // the following temporary registers are used during frame creation
583 const Register Gtmp1 = G3_scratch ;
584 const Register Gtmp2 = G1_scratch;
585 const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));
586
587 bool inc_counter = UseCompiler || CountCompiledCalls;
588
589 // make sure registers are different!
590 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
591
592 const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
593
594 Label Lentry;
595 __ bind(Lentry);
596
597 const Register Glocals_size = G3;
598 assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
599
600 // make sure method is native & not abstract
601 // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
602 #ifdef ASSERT
603 __ ld(access_flags, Gtmp1);
604 {
605 Label L;
606 __ btst(JVM_ACC_NATIVE, Gtmp1);
607 __ br(Assembler::notZero, false, Assembler::pt, L);
608 __ delayed()->nop();
609 __ stop("tried to execute non-native method as native");
610 __ bind(L);
611 }
612 { Label L;
613 __ btst(JVM_ACC_ABSTRACT, Gtmp1);
614 __ br(Assembler::zero, false, Assembler::pt, L);
615 __ delayed()->nop();
616 __ stop("tried to execute abstract method as non-abstract");
617 __ bind(L);
618 }
619 #endif // ASSERT
620
621 __ lduh(size_of_parameters, Gtmp1);
622 __ sll(Gtmp1, LogBytesPerWord, Gtmp2); // parameter size in bytes
623 __ add(Gargs, Gtmp2, Gargs); // points to first local + BytesPerWord
624 // NEW
625 __ add(Gargs, -wordSize, Gargs); // points to first local[0]
626 // generate the code to allocate the interpreter stack frame
627 // NEW FRAME ALLOCATED HERE
628 // save callers original sp
629 // __ mov(SP, I5_savedSP->after_restore());
630
631 generate_compute_interpreter_state(Lstate, G0, true);
632
633 // At this point Lstate points to new interpreter state
634 //
635
636 const Address do_not_unlock_if_synchronized(G2_thread, 0,
637 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
638 // Since at this point in the method invocation the exception handler
639 // would try to exit the monitor of synchronized methods which hasn't
640 // been entered yet, we set the thread local variable
641 // _do_not_unlock_if_synchronized to true. If any exception was thrown by
1030
1031 void CppInterpreterGenerator::generate_compute_interpreter_state(const Register state,
1032 const Register prev_state,
1033 bool native) {
1034
1035 // On entry
1036 // G5_method - caller's method
1037 // Gargs - points to initial parameters (i.e. locals[0])
1038 // G2_thread - valid? (C1 only??)
1039 // "prev_state" - contains any previous frame manager state which we must save a link
1040 //
1041 // On return
1042 // "state" is a pointer to the newly allocated state object. We must allocate and initialize
1043 // a new interpretState object and the method expression stack.
1044
1045 assert_different_registers(state, prev_state);
1046 assert_different_registers(prev_state, G3_scratch);
1047 const Register Gtmp = G3_scratch;
1048 const Address constMethod (G5_method, 0, in_bytes(Method::const_offset()));
1049 const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
1050 const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));
1051 const Address size_of_locals (G5_method, 0, in_bytes(Method::size_of_locals_offset()));
1052
1053 // slop factor is two extra slots on the expression stack so that
1054 // we always have room to store a result when returning from a call without parameters
1055 // that returns a result.
1056
1057 const int slop_factor = 2*wordSize;
1058
1059 const int fixed_size = ((sizeof(BytecodeInterpreter) + slop_factor) >> LogBytesPerWord) + // what is the slop factor?
1060 //6815692//Method::extra_stack_words() + // extra push slots for MH adapters
1061 frame::memory_parameter_word_sp_offset + // register save area + param window
1062 (native ? frame::interpreter_frame_extra_outgoing_argument_words : 0); // JNI, class
1063
1064 // XXX G5_method valid
1065
1066 // Now compute new frame size
1067
1068 if (native) {
1069 __ lduh( size_of_parameters, Gtmp );
1070 __ calc_mem_param_words(Gtmp, Gtmp); // space for native call parameters passed on the stack in words
1071 } else {
1072 // Full size expression stack
1073 __ ld_ptr(constMethod, Gtmp);
1074 __ lduh(Gtmp, in_bytes(ConstMethod::max_stack_offset()), Gtmp);
1075 }
1076 __ add(Gtmp, fixed_size, Gtmp); // plus the fixed portion
1077
1078 __ neg(Gtmp); // negative space for stack/parameters in words
1079 __ and3(Gtmp, -WordsPerLong, Gtmp); // make multiple of 2 (SP must be 2-word aligned)
1080 __ sll(Gtmp, LogBytesPerWord, Gtmp); // negative space for frame in bytes
1081
1082 // Need to do stack size check here before we fault on large frames
1083
1084 Label stack_ok;
1085
1086 const int max_pages = StackShadowPages > (StackRedPages+StackYellowPages) ? StackShadowPages :
1087 (StackRedPages+StackYellowPages);
1088
1089
1222 //
1223 // Code to initialize locals
1224 //
1225 Register init_value = noreg; // will be G0 if we must clear locals
1226 // Now zero locals
1227 if (true /* zerolocals */ || ClearInterpreterLocals) {
1228 // explicitly initialize locals
1229 init_value = G0;
1230 } else {
1231 #ifdef ASSERT
1232 // initialize locals to a garbage pattern for better debugging
1233 init_value = O3;
1234 __ set( 0x0F0F0F0F, init_value );
1235 #endif // ASSERT
1236 }
1237 if (init_value != noreg) {
1238 Label clear_loop;
1239
1240 // NOTE: If you change the frame layout, this code will need to
1241 // be updated!
1242 __ lduh( size_of_locals, O2 );
1243 __ lduh( size_of_parameters, O1 );
1244 __ sll( O2, LogBytesPerWord, O2);
1245 __ sll( O1, LogBytesPerWord, O1 );
1246 __ ld_ptr(XXX_STATE(_locals), L2_scratch);
1247 __ sub( L2_scratch, O2, O2 );
1248 __ sub( L2_scratch, O1, O1 );
1249
1250 __ bind( clear_loop );
1251 __ inc( O2, wordSize );
1252
1253 __ cmp( O2, O1 );
1254 __ br( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop );
1255 __ delayed()->st_ptr( init_value, O2, 0 );
1256 }
1257 }
1258 }
1259 // Find preallocated monitor and lock method (C++ interpreter)
1260 //
1261 void InterpreterGenerator::lock_method(void) {
1262 // Lock the current method.
1263 // Destroys registers L2_scratch, L3_scratch, O0
1467 __ breakpoint_trap(); \
1468 __ emit_long(marker); \
1469 __ bind(skip); \
1470 }
1471 #else
1472 #define VALIDATE_STATE(scratch, marker)
1473 #endif /* ASSERT */
1474
1475 void CppInterpreterGenerator::adjust_callers_stack(Register args) {
1476 //
1477 // Adjust caller's stack so that all the locals can be contiguous with
1478 // the parameters.
1479 // Worries about stack overflow make this a pain.
1480 //
1481 // Destroys args, G3_scratch, G3_scratch
1482 // In/Out O5_savedSP (sender's original SP)
1483 //
1484 // assert_different_registers(state, prev_state);
1485 const Register Gtmp = G3_scratch;
1486 const Register tmp = O2;
1487 const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));
1488 const Address size_of_locals (G5_method, 0, in_bytes(Method::size_of_locals_offset()));
1489
1490 __ lduh(size_of_parameters, tmp);
1491 __ sll(tmp, LogBytesPerWord, Gtmp); // parameter size in bytes
1492 __ add(args, Gtmp, Gargs); // points to first local + BytesPerWord
1493 // NEW
1494 __ add(Gargs, -wordSize, Gargs); // points to first local[0]
1495 // determine extra space for non-argument locals & adjust caller's SP
1496 // Gtmp1: parameter size in words
1497 __ lduh(size_of_locals, Gtmp);
1498 __ compute_extra_locals_size_in_bytes(tmp, Gtmp, Gtmp);
1499
1500 #if 1
1501 // c2i adapters place the final interpreter argument in the register save area for O0/I0
1502 // the call_stub will place the final interpreter argument at
1503 // frame::memory_parameter_word_sp_offset. This is mostly not noticable for either asm
1504 // or c++ interpreter. However with the c++ interpreter when we do a recursive call
1505 // and try to make it look good in the debugger we will store the argument to
1506 // RecursiveInterpreterActivation in the register argument save area. Without allocating
1507 // extra space for the compiler this will overwrite locals in the local array of the
1508 // interpreter.
1509 // QQQ still needed with frameless adapters???
1510
1511 const int c2i_adjust_words = frame::memory_parameter_word_sp_offset - frame::callee_register_argument_save_area_sp_offset;
1512
1513 __ add(Gtmp, c2i_adjust_words*wordSize, Gtmp);
1514 #endif // 1
1515
1516
1517 __ sub(SP, Gtmp, SP); // just caller's frame for the additional space we need.
1524 // Gargs: bottom of args (sender_sp)
1525 // O5: sender's sp
1526
1527 // A single frame manager is plenty as we don't specialize for synchronized. We could and
1528 // the code is pretty much ready. Would need to change the test below and for good measure
1529 // modify generate_interpreter_state to only do the (pre) sync stuff stuff for synchronized
1530 // routines. Not clear this is worth it yet.
1531
1532 if (interpreter_frame_manager) {
1533 return interpreter_frame_manager;
1534 }
1535
1536 __ bind(frame_manager_entry);
1537
1538 // the following temporary registers are used during frame creation
1539 const Register Gtmp1 = G3_scratch;
1540 // const Register Lmirror = L1; // native mirror (native calls only)
1541
1542 const Address constMethod (G5_method, 0, in_bytes(Method::const_offset()));
1543 const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
1544 const Address size_of_parameters(G5_method, 0, in_bytes(Method::size_of_parameters_offset()));
1545 const Address size_of_locals (G5_method, 0, in_bytes(Method::size_of_locals_offset()));
1546
1547 address entry_point = __ pc();
1548 __ mov(G0, prevState); // no current activation
1549
1550
1551 Label re_dispatch;
1552
1553 __ bind(re_dispatch);
1554
1555 // Interpreter needs to have locals completely contiguous. In order to do that
1556 // We must adjust the caller's stack pointer for any locals beyond just the
1557 // parameters
1558 adjust_callers_stack(Gargs);
1559
1560 // O5_savedSP still contains sender's sp
1561
1562 // NEW FRAME
1563
1564 generate_compute_interpreter_state(Lstate, prevState, false);
1565
1733 VALIDATE_STATE(G3_scratch, 6);
1734
1735 // Result if any is in native abi result (O0..O1/F0..F1). The java expression
1736 // stack is in the state that the calling convention left it.
1737 // Copy the result from native abi result and place it on java expression stack.
1738
1739 // Current interpreter state is present in Lstate
1740
1741 // Exception pending?
1742
1743 __ ld_ptr(STATE(_frame_bottom), SP); // restore to full stack frame
1744 __ ld_ptr(exception_addr, Lscratch); // get any pending exception
1745 __ tst(Lscratch); // exception pending?
1746 __ brx(Assembler::notZero, false, Assembler::pt, return_with_exception);
1747 __ delayed()->nop();
1748
1749 // Process the native abi result to java expression stack
1750
1751 __ ld_ptr(STATE(_result._to_call._callee), L4_scratch); // called method
1752 __ ld_ptr(STATE(_stack), L1_scratch); // get top of java expr stack
1753 __ lduh(L4_scratch, in_bytes(Method::size_of_parameters_offset()), L2_scratch); // get parameter size
1754 __ sll(L2_scratch, LogBytesPerWord, L2_scratch ); // parameter size in bytes
1755 __ add(L1_scratch, L2_scratch, L1_scratch); // stack destination for result
1756 __ ld(L4_scratch, in_bytes(Method::result_index_offset()), L3_scratch); // called method result type index
1757
1758 // tosca is really just native abi
1759 __ set((intptr_t)CppInterpreter::_tosca_to_stack, L4_scratch);
1760 __ sll(L3_scratch, LogBytesPerWord, L3_scratch);
1761 __ ld_ptr(L4_scratch, L3_scratch, Lscratch); // get typed result converter address
1762 __ jmpl(Lscratch, G0, O7); // and convert it
1763 __ delayed()->nop();
1764
1765 // L1_scratch points to top of stack (prepushed)
1766
1767 __ ba(resume_interpreter);
1768 __ delayed()->mov(L1_scratch, O1);
1769
1770 // An exception is being caught on return to a vanilla interpreter frame.
1771 // Empty the stack and resume interpreter
1772
1773 __ bind(return_with_exception);
|
565 }
566 #endif // SERIALGC
567
568 // If G1 is not enabled then attempt to go through the accessor entry point
569 // Reference.get is an accessor
570 return generate_accessor_entry();
571 }
572
573 //
574 // Interpreter stub for calling a native method. (C++ interpreter)
575 // This sets up a somewhat different looking stack for calling the native method
576 // than the typical interpreter frame setup.
577 //
578
579 address InterpreterGenerator::generate_native_entry(bool synchronized) {
580 address entry = __ pc();
581
582 // the following temporary registers are used during frame creation
583 const Register Gtmp1 = G3_scratch ;
584 const Register Gtmp2 = G1_scratch;
585
586 bool inc_counter = UseCompiler || CountCompiledCalls;
587
588 // make sure registers are different!
589 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
590
591 const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
592
593 Label Lentry;
594 __ bind(Lentry);
595
596 const Register Glocals_size = G3;
597 assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
598
599 // make sure method is native & not abstract
600 // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
601 #ifdef ASSERT
602 __ ld(access_flags, Gtmp1);
603 {
604 Label L;
605 __ btst(JVM_ACC_NATIVE, Gtmp1);
606 __ br(Assembler::notZero, false, Assembler::pt, L);
607 __ delayed()->nop();
608 __ stop("tried to execute non-native method as native");
609 __ bind(L);
610 }
611 { Label L;
612 __ btst(JVM_ACC_ABSTRACT, Gtmp1);
613 __ br(Assembler::zero, false, Assembler::pt, L);
614 __ delayed()->nop();
615 __ stop("tried to execute abstract method as non-abstract");
616 __ bind(L);
617 }
618 #endif // ASSERT
619
620 __ ld_ptr(G5_method, in_bytes(Method::const_offset()), Gtmp1);
621 __ lduh(Gtmp1, in_bytes(ConstMethod::size_of_parameters_offset()), Gtmp1);
622 __ sll(Gtmp1, LogBytesPerWord, Gtmp2); // parameter size in bytes
623 __ add(Gargs, Gtmp2, Gargs); // points to first local + BytesPerWord
624 // NEW
625 __ add(Gargs, -wordSize, Gargs); // points to first local[0]
626 // generate the code to allocate the interpreter stack frame
627 // NEW FRAME ALLOCATED HERE
628 // save callers original sp
629 // __ mov(SP, I5_savedSP->after_restore());
630
631 generate_compute_interpreter_state(Lstate, G0, true);
632
633 // At this point Lstate points to new interpreter state
634 //
635
636 const Address do_not_unlock_if_synchronized(G2_thread, 0,
637 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
638 // Since at this point in the method invocation the exception handler
639 // would try to exit the monitor of synchronized methods which hasn't
640 // been entered yet, we set the thread local variable
641 // _do_not_unlock_if_synchronized to true. If any exception was thrown by
1030
1031 void CppInterpreterGenerator::generate_compute_interpreter_state(const Register state,
1032 const Register prev_state,
1033 bool native) {
1034
1035 // On entry
1036 // G5_method - caller's method
1037 // Gargs - points to initial parameters (i.e. locals[0])
1038 // G2_thread - valid? (C1 only??)
1039 // "prev_state" - contains any previous frame manager state which we must save a link
1040 //
1041 // On return
1042 // "state" is a pointer to the newly allocated state object. We must allocate and initialize
1043 // a new interpretState object and the method expression stack.
1044
1045 assert_different_registers(state, prev_state);
1046 assert_different_registers(prev_state, G3_scratch);
1047 const Register Gtmp = G3_scratch;
1048 const Address constMethod (G5_method, 0, in_bytes(Method::const_offset()));
1049 const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
1050
1051 // slop factor is two extra slots on the expression stack so that
1052 // we always have room to store a result when returning from a call without parameters
1053 // that returns a result.
1054
1055 const int slop_factor = 2*wordSize;
1056
1057 const int fixed_size = ((sizeof(BytecodeInterpreter) + slop_factor) >> LogBytesPerWord) + // what is the slop factor?
1058 //6815692//Method::extra_stack_words() + // extra push slots for MH adapters
1059 frame::memory_parameter_word_sp_offset + // register save area + param window
1060 (native ? frame::interpreter_frame_extra_outgoing_argument_words : 0); // JNI, class
1061
1062 // XXX G5_method valid
1063
1064 // Now compute new frame size
1065
1066 if (native) {
1067 __ ld_ptr(constMethod, Gtmp);
1068 __ lduh(Gtmp, in_bytes(ConstMethod::size_of_parameters_offset()), Gtmp );
1069 __ calc_mem_param_words(Gtmp, Gtmp); // space for native call parameters passed on the stack in words
1070 } else {
1071 // Full size expression stack
1072 __ ld_ptr(constMethod, Gtmp);
1073 __ lduh(Gtmp, in_bytes(ConstMethod::max_stack_offset()), Gtmp);
1074 }
1075 __ add(Gtmp, fixed_size, Gtmp); // plus the fixed portion
1076
1077 __ neg(Gtmp); // negative space for stack/parameters in words
1078 __ and3(Gtmp, -WordsPerLong, Gtmp); // make multiple of 2 (SP must be 2-word aligned)
1079 __ sll(Gtmp, LogBytesPerWord, Gtmp); // negative space for frame in bytes
1080
1081 // Need to do stack size check here before we fault on large frames
1082
1083 Label stack_ok;
1084
1085 const int max_pages = StackShadowPages > (StackRedPages+StackYellowPages) ? StackShadowPages :
1086 (StackRedPages+StackYellowPages);
1087
1088
1221 //
1222 // Code to initialize locals
1223 //
1224 Register init_value = noreg; // will be G0 if we must clear locals
1225 // Now zero locals
1226 if (true /* zerolocals */ || ClearInterpreterLocals) {
1227 // explicitly initialize locals
1228 init_value = G0;
1229 } else {
1230 #ifdef ASSERT
1231 // initialize locals to a garbage pattern for better debugging
1232 init_value = O3;
1233 __ set( 0x0F0F0F0F, init_value );
1234 #endif // ASSERT
1235 }
1236 if (init_value != noreg) {
1237 Label clear_loop;
1238
1239 // NOTE: If you change the frame layout, this code will need to
1240 // be updated!
1241 __ ld_ptr( constMethod, O1 );
1242 __ lduh( O1, in_bytes(ConstMethod::size_of_locals_offset()), O2 );
1243 __ lduh(O1, in_bytes(ConstMethod::size_of_parameters_offset()), O1);
1244 __ sll( O2, LogBytesPerWord, O2);
1245 __ sll( O1, LogBytesPerWord, O1 );
1246 __ ld_ptr(XXX_STATE(_locals), L2_scratch);
1247 __ sub( L2_scratch, O2, O2 );
1248 __ sub( L2_scratch, O1, O1 );
1249
1250 __ bind( clear_loop );
1251 __ inc( O2, wordSize );
1252
1253 __ cmp( O2, O1 );
1254 __ br( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop );
1255 __ delayed()->st_ptr( init_value, O2, 0 );
1256 }
1257 }
1258 }
1259 // Find preallocated monitor and lock method (C++ interpreter)
1260 //
1261 void InterpreterGenerator::lock_method(void) {
1262 // Lock the current method.
1263 // Destroys registers L2_scratch, L3_scratch, O0
1467 __ breakpoint_trap(); \
1468 __ emit_long(marker); \
1469 __ bind(skip); \
1470 }
1471 #else
1472 #define VALIDATE_STATE(scratch, marker)
1473 #endif /* ASSERT */
1474
1475 void CppInterpreterGenerator::adjust_callers_stack(Register args) {
1476 //
1477 // Adjust caller's stack so that all the locals can be contiguous with
1478 // the parameters.
1479 // Worries about stack overflow make this a pain.
1480 //
1481 // Destroys args, G3_scratch, G3_scratch
1482 // In/Out O5_savedSP (sender's original SP)
1483 //
1484 // assert_different_registers(state, prev_state);
1485 const Register Gtmp = G3_scratch;
1486 const Register tmp = O2;
1487
1488 __ ld_ptr(G5_method, in_bytes(Method::const_offset()), Gtmp);
1489 __ lduh(Gtmp, in_bytes(ConstMethod::size_of_parameters_offset()), tmp);
1490 __ sll(tmp, LogBytesPerWord, Gargs); // parameter size in bytes
1491 __ add(args, Gargs, Gargs); // points to first local + BytesPerWord
1492 // NEW
1493 __ add(Gargs, -wordSize, Gargs); // points to first local[0]
1494 // determine extra space for non-argument locals & adjust caller's SP
1495 // Gtmp1: parameter size in words
1496 __ lduh(Gtmp, in_bytes(ConstMethod::size_of_locals_offset()), Gtmp);
1497 __ compute_extra_locals_size_in_bytes(tmp, Gtmp, Gtmp);
1498
1499 #if 1
1500 // c2i adapters place the final interpreter argument in the register save area for O0/I0
1501 // the call_stub will place the final interpreter argument at
1502 // frame::memory_parameter_word_sp_offset. This is mostly not noticable for either asm
1503 // or c++ interpreter. However with the c++ interpreter when we do a recursive call
1504 // and try to make it look good in the debugger we will store the argument to
1505 // RecursiveInterpreterActivation in the register argument save area. Without allocating
1506 // extra space for the compiler this will overwrite locals in the local array of the
1507 // interpreter.
1508 // QQQ still needed with frameless adapters???
1509
1510 const int c2i_adjust_words = frame::memory_parameter_word_sp_offset - frame::callee_register_argument_save_area_sp_offset;
1511
1512 __ add(Gtmp, c2i_adjust_words*wordSize, Gtmp);
1513 #endif // 1
1514
1515
1516 __ sub(SP, Gtmp, SP); // just caller's frame for the additional space we need.
1523 // Gargs: bottom of args (sender_sp)
1524 // O5: sender's sp
1525
1526 // A single frame manager is plenty as we don't specialize for synchronized. We could and
1527 // the code is pretty much ready. Would need to change the test below and for good measure
1528 // modify generate_interpreter_state to only do the (pre) sync stuff stuff for synchronized
1529 // routines. Not clear this is worth it yet.
1530
1531 if (interpreter_frame_manager) {
1532 return interpreter_frame_manager;
1533 }
1534
1535 __ bind(frame_manager_entry);
1536
1537 // the following temporary registers are used during frame creation
1538 const Register Gtmp1 = G3_scratch;
1539 // const Register Lmirror = L1; // native mirror (native calls only)
1540
1541 const Address constMethod (G5_method, 0, in_bytes(Method::const_offset()));
1542 const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
1543
1544 address entry_point = __ pc();
1545 __ mov(G0, prevState); // no current activation
1546
1547
1548 Label re_dispatch;
1549
1550 __ bind(re_dispatch);
1551
1552 // Interpreter needs to have locals completely contiguous. In order to do that
1553 // We must adjust the caller's stack pointer for any locals beyond just the
1554 // parameters
1555 adjust_callers_stack(Gargs);
1556
1557 // O5_savedSP still contains sender's sp
1558
1559 // NEW FRAME
1560
1561 generate_compute_interpreter_state(Lstate, prevState, false);
1562
1730 VALIDATE_STATE(G3_scratch, 6);
1731
1732 // Result if any is in native abi result (O0..O1/F0..F1). The java expression
1733 // stack is in the state that the calling convention left it.
1734 // Copy the result from native abi result and place it on java expression stack.
1735
1736 // Current interpreter state is present in Lstate
1737
1738 // Exception pending?
1739
1740 __ ld_ptr(STATE(_frame_bottom), SP); // restore to full stack frame
1741 __ ld_ptr(exception_addr, Lscratch); // get any pending exception
1742 __ tst(Lscratch); // exception pending?
1743 __ brx(Assembler::notZero, false, Assembler::pt, return_with_exception);
1744 __ delayed()->nop();
1745
1746 // Process the native abi result to java expression stack
1747
1748 __ ld_ptr(STATE(_result._to_call._callee), L4_scratch); // called method
1749 __ ld_ptr(STATE(_stack), L1_scratch); // get top of java expr stack
1750 // get parameter size
1751 __ ld_ptr(L4_scratch, in_bytes(Method::const_offset()), L2_scratch);
1752 __ lduh(L2_scratch, in_bytes(ConstMethod::size_of_parameters_offset()), L2_scratch);
1753 __ sll(L2_scratch, LogBytesPerWord, L2_scratch ); // parameter size in bytes
1754 __ add(L1_scratch, L2_scratch, L1_scratch); // stack destination for result
1755 __ ld(L4_scratch, in_bytes(Method::result_index_offset()), L3_scratch); // called method result type index
1756
1757 // tosca is really just native abi
1758 __ set((intptr_t)CppInterpreter::_tosca_to_stack, L4_scratch);
1759 __ sll(L3_scratch, LogBytesPerWord, L3_scratch);
1760 __ ld_ptr(L4_scratch, L3_scratch, Lscratch); // get typed result converter address
1761 __ jmpl(Lscratch, G0, O7); // and convert it
1762 __ delayed()->nop();
1763
1764 // L1_scratch points to top of stack (prepushed)
1765
1766 __ ba(resume_interpreter);
1767 __ delayed()->mov(L1_scratch, O1);
1768
1769 // An exception is being caught on return to a vanilla interpreter frame.
1770 // Empty the stack and resume interpreter
1771
1772 __ bind(return_with_exception);
|