< prev index next >

src/cpu/ppc/vm/templateInterpreter_ppc.cpp

Print this page
rev 7507 : 8066964: ppc64: argument and return type profiling, fix problem with popframe


  74   __ empty_expression_stack();
  75   __ load_const_optimized(R4_ARG2, (address) name);
  76   // Index is in R17_tos.
  77   __ mr(R5_ARG3, R17_tos);
  78   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_ArrayIndexOutOfBoundsException));
  79   return entry;
  80 }
  81 
  82 #if 0
  83 // Call special ClassCastException constructor taking object to cast
  84 // and target class as arguments.
  85 address TemplateInterpreterGenerator::generate_ClassCastException_verbose_handler() {
  86   address entry = __ pc();
  87 
  88   // Expression stack must be empty before entering the VM if an
  89   // exception happened.
  90   __ empty_expression_stack();
  91 
  92   // Thread will be loaded to R3_ARG1.
  93   // Target class oop is in register R5_ARG3 by convention!
  94   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_ClassCastException_verbose, R17_tos, R5_ARG3));
  95   // Above call must not return here since exception pending.
  96   DEBUG_ONLY(__ should_not_reach_here();)
  97   return entry;
  98 }
  99 #endif
 100 
 101 address TemplateInterpreterGenerator::generate_ClassCastException_handler() {
 102   address entry = __ pc();
 103   // Expression stack must be empty before entering the VM if an
 104   // exception happened.
 105   __ empty_expression_stack();
 106 
 107   // Load exception object.
 108   // Thread will be loaded to R3_ARG1.
 109   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_ClassCastException), R17_tos);
 110 #ifdef ASSERT
 111   // Above call must not return here since exception pending.
 112   __ should_not_reach_here();
 113 #endif
 114   return entry;


 155   switch (state) {
 156     case ltos:
 157     case btos:
 158     case ctos:
 159     case stos:
 160     case atos:
 161     case itos: __ mr(R17_tos, R3_RET); break;   // RET -> TOS cache
 162     case ftos:
 163     case dtos: __ fmr(F15_ftos, F1_RET); break; // TOS cache -> GR_FRET
 164     case vtos: break;                           // Nothing to do, this was a void return.
 165     default  : ShouldNotReachHere();
 166   }
 167 
 168   __ restore_interpreter_state(R11_scratch1); // Sets R11_scratch1 = fp.
 169   __ ld(R12_scratch2, _ijava_state_neg(top_frame_sp), R11_scratch1);
 170   __ resize_frame_absolute(R12_scratch2, R11_scratch1, R0);
 171 
 172   // Compiled code destroys templateTableBase, reload.
 173   __ load_const_optimized(R25_templateTableBase, (address)Interpreter::dispatch_table((TosState)0), R12_scratch2);
 174 




 175   const Register cache = R11_scratch1;
 176   const Register size  = R12_scratch2;
 177   __ get_cache_and_index_at_bcp(cache, 1, index_size);
 178 
 179   // Get least significant byte of 64 bit value:
 180 #if defined(VM_LITTLE_ENDIAN)
 181   __ lbz(size, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()), cache);
 182 #else
 183   __ lbz(size, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()) + 7, cache);
 184 #endif
 185   __ sldi(size, size, Interpreter::logStackElementSize);
 186   __ add(R15_esp, R15_esp, size);
 187   __ dispatch_next(state, step);
 188   return entry;
 189 }
 190 
 191 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, int step) {
 192   address entry = __ pc();
 193   // If state != vtos, we're returning from a native method, which put it's result
 194   // into the result register. So move the value out of the return register back


1172   // --------------------------------------------------------------------------
1173   // Counter increment and overflow check.
1174   Label invocation_counter_overflow,
1175         profile_method,
1176         profile_method_continue;
1177   if (inc_counter || ProfileInterpreter) {
1178 
1179     Register Rdo_not_unlock_if_synchronized_addr = R11_scratch1;
1180     if (synchronized) {
1181       // Since at this point in the method invocation the exception handler
1182       // would try to exit the monitor of synchronized methods which hasn't
1183       // been entered yet, we set the thread local variable
1184       // _do_not_unlock_if_synchronized to true. If any exception was thrown by
1185       // runtime, exception handling i.e. unlock_if_synchronized_method will
1186       // check this thread local flag.
1187       // This flag has two effects, one is to force an unwind in the topmost
1188       // interpreter frame and not perform an unlock while doing so.
1189       __ li(R0, 1);
1190       __ stb(R0, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()), R16_thread);
1191     }




1192     // Increment invocation counter and check for overflow.
1193     if (inc_counter) {
1194       generate_counter_incr(&invocation_counter_overflow, &profile_method, &profile_method_continue);
1195     }
1196 
1197     __ bind(profile_method_continue);
1198 
1199     // Reset the _do_not_unlock_if_synchronized flag.
1200     if (synchronized) {
1201       __ li(R0, 0);
1202       __ stb(R0, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()), R16_thread);
1203     }
1204   }
1205 
1206   // --------------------------------------------------------------------------
1207   // Locking of synchronized methods. Must happen AFTER invocation_counter
1208   // check and stack overflow check, so method is not locked if overflows.
1209   if (synchronized) {
1210     lock_method(R3_ARG1, R4_ARG2, R5_ARG3);
1211   }


1452     // end up in the deopt interpeter entry, deoptimization prepared everything that
1453     // we will reexecute the call that called us.
1454     __ merge_frames(/*top_frame_sp*/ R21_sender_SP, /*reload return_pc*/ return_pc, R11_scratch1, R12_scratch2);
1455     __ mtlr(return_pc);
1456     __ blr();
1457 
1458     // The non-deoptimized case.
1459     __ bind(Lcaller_not_deoptimized);
1460 
1461     // Clear the popframe condition flag.
1462     __ li(R0, 0);
1463     __ stw(R0, in_bytes(JavaThread::popframe_condition_offset()), R16_thread);
1464 
1465     // Get out of the current method and re-execute the call that called us.
1466     __ merge_frames(/*top_frame_sp*/ R21_sender_SP, /*return_pc*/ noreg, R11_scratch1, R12_scratch2);
1467     __ restore_interpreter_state(R11_scratch1);
1468     __ ld(R12_scratch2, _ijava_state_neg(top_frame_sp), R11_scratch1);
1469     __ resize_frame_absolute(R12_scratch2, R11_scratch1, R0);
1470     if (ProfileInterpreter) {
1471       __ set_method_data_pointer_for_bcp();


1472     }
1473 #if INCLUDE_JVMTI
1474     Label L_done;
1475 
1476     __ lbz(R11_scratch1, 0, R14_bcp);
1477     __ cmpwi(CCR0, R11_scratch1, Bytecodes::_invokestatic);
1478     __ bne(CCR0, L_done);
1479 
1480     // The member name argument must be restored if _invokestatic is re-executed after a PopFrame call.
1481     // Detect such a case in the InterpreterRuntime function and return the member name argument, or NULL.
1482     __ ld(R4_ARG2, 0, R18_locals);
1483     __ call_VM(R11_scratch1, CAST_FROM_FN_PTR(address, InterpreterRuntime::member_name_arg_or_null),
1484                R4_ARG2, R19_method, R14_bcp);
1485 
1486     __ cmpdi(CCR0, R11_scratch1, 0);
1487     __ beq(CCR0, L_done);
1488 
1489     __ std(R11_scratch1, wordSize, R15_esp);
1490     __ bind(L_done);
1491 #endif // INCLUDE_JVMTI
1492     __ dispatch_next(vtos);
1493   }
1494   // end of JVMTI PopFrame support
1495 
1496   // --------------------------------------------------------------------------
1497   // Remove activation exception entry.
1498   // This is jumped to if an interpreted method can't handle an exception itself
1499   // (we come from the throw/rethrow exception entry above). We're going to call
1500   // into the VM to find the exception handler in the caller, pop the current
1501   // frame and return the handler we calculated.
1502   Interpreter::_remove_activation_entry = __ pc();
1503   {
1504     __ pop_ptr(Rexception);
1505     __ verify_thread();
1506     __ verify_oop(Rexception);
1507     __ std(Rexception, in_bytes(JavaThread::vm_result_offset()), R16_thread);
1508 
1509     __ unlock_if_synchronized_method(vtos, /* throw_monitor_exception */ false, true);




  74   __ empty_expression_stack();
  75   __ load_const_optimized(R4_ARG2, (address) name);
  76   // Index is in R17_tos.
  77   __ mr(R5_ARG3, R17_tos);
  78   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_ArrayIndexOutOfBoundsException));
  79   return entry;
  80 }
  81 
  82 #if 0
  83 // Call special ClassCastException constructor taking object to cast
  84 // and target class as arguments.
  85 address TemplateInterpreterGenerator::generate_ClassCastException_verbose_handler() {
  86   address entry = __ pc();
  87 
  88   // Expression stack must be empty before entering the VM if an
  89   // exception happened.
  90   __ empty_expression_stack();
  91 
  92   // Thread will be loaded to R3_ARG1.
  93   // Target class oop is in register R5_ARG3 by convention!
  94   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_ClassCastException_verbose), R17_tos, R5_ARG3);
  95   // Above call must not return here since exception pending.
  96   DEBUG_ONLY(__ should_not_reach_here();)
  97   return entry;
  98 }
  99 #endif
 100 
 101 address TemplateInterpreterGenerator::generate_ClassCastException_handler() {
 102   address entry = __ pc();
 103   // Expression stack must be empty before entering the VM if an
 104   // exception happened.
 105   __ empty_expression_stack();
 106 
 107   // Load exception object.
 108   // Thread will be loaded to R3_ARG1.
 109   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_ClassCastException), R17_tos);
 110 #ifdef ASSERT
 111   // Above call must not return here since exception pending.
 112   __ should_not_reach_here();
 113 #endif
 114   return entry;


 155   switch (state) {
 156     case ltos:
 157     case btos:
 158     case ctos:
 159     case stos:
 160     case atos:
 161     case itos: __ mr(R17_tos, R3_RET); break;   // RET -> TOS cache
 162     case ftos:
 163     case dtos: __ fmr(F15_ftos, F1_RET); break; // TOS cache -> GR_FRET
 164     case vtos: break;                           // Nothing to do, this was a void return.
 165     default  : ShouldNotReachHere();
 166   }
 167 
 168   __ restore_interpreter_state(R11_scratch1); // Sets R11_scratch1 = fp.
 169   __ ld(R12_scratch2, _ijava_state_neg(top_frame_sp), R11_scratch1);
 170   __ resize_frame_absolute(R12_scratch2, R11_scratch1, R0);
 171 
 172   // Compiled code destroys templateTableBase, reload.
 173   __ load_const_optimized(R25_templateTableBase, (address)Interpreter::dispatch_table((TosState)0), R12_scratch2);
 174 
 175   if (state == atos) {
 176     __ profile_return_type(R3_RET, R11_scratch1, R12_scratch2);
 177   }
 178 
 179   const Register cache = R11_scratch1;
 180   const Register size  = R12_scratch2;
 181   __ get_cache_and_index_at_bcp(cache, 1, index_size);
 182 
 183   // Get least significant byte of 64 bit value:
 184 #if defined(VM_LITTLE_ENDIAN)
 185   __ lbz(size, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()), cache);
 186 #else
 187   __ lbz(size, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()) + 7, cache);
 188 #endif
 189   __ sldi(size, size, Interpreter::logStackElementSize);
 190   __ add(R15_esp, R15_esp, size);
 191   __ dispatch_next(state, step);
 192   return entry;
 193 }
 194 
 195 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, int step) {
 196   address entry = __ pc();
 197   // If state != vtos, we're returning from a native method, which put it's result
 198   // into the result register. So move the value out of the return register back


1176   // --------------------------------------------------------------------------
1177   // Counter increment and overflow check.
1178   Label invocation_counter_overflow,
1179         profile_method,
1180         profile_method_continue;
1181   if (inc_counter || ProfileInterpreter) {
1182 
1183     Register Rdo_not_unlock_if_synchronized_addr = R11_scratch1;
1184     if (synchronized) {
1185       // Since at this point in the method invocation the exception handler
1186       // would try to exit the monitor of synchronized methods which hasn't
1187       // been entered yet, we set the thread local variable
1188       // _do_not_unlock_if_synchronized to true. If any exception was thrown by
1189       // runtime, exception handling i.e. unlock_if_synchronized_method will
1190       // check this thread local flag.
1191       // This flag has two effects, one is to force an unwind in the topmost
1192       // interpreter frame and not perform an unlock while doing so.
1193       __ li(R0, 1);
1194       __ stb(R0, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()), R16_thread);
1195     }
1196 
1197     // Argument and return type profiling.
1198     __ profile_parameters_type(R3_ARG1, R4_ARG2, R5_ARG3, R6_ARG4);
1199 
1200     // Increment invocation counter and check for overflow.
1201     if (inc_counter) {
1202       generate_counter_incr(&invocation_counter_overflow, &profile_method, &profile_method_continue);
1203     }
1204 
1205     __ bind(profile_method_continue);
1206 
1207     // Reset the _do_not_unlock_if_synchronized flag.
1208     if (synchronized) {
1209       __ li(R0, 0);
1210       __ stb(R0, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()), R16_thread);
1211     }
1212   }
1213 
1214   // --------------------------------------------------------------------------
1215   // Locking of synchronized methods. Must happen AFTER invocation_counter
1216   // check and stack overflow check, so method is not locked if overflows.
1217   if (synchronized) {
1218     lock_method(R3_ARG1, R4_ARG2, R5_ARG3);
1219   }


1460     // end up in the deopt interpeter entry, deoptimization prepared everything that
1461     // we will reexecute the call that called us.
1462     __ merge_frames(/*top_frame_sp*/ R21_sender_SP, /*reload return_pc*/ return_pc, R11_scratch1, R12_scratch2);
1463     __ mtlr(return_pc);
1464     __ blr();
1465 
1466     // The non-deoptimized case.
1467     __ bind(Lcaller_not_deoptimized);
1468 
1469     // Clear the popframe condition flag.
1470     __ li(R0, 0);
1471     __ stw(R0, in_bytes(JavaThread::popframe_condition_offset()), R16_thread);
1472 
1473     // Get out of the current method and re-execute the call that called us.
1474     __ merge_frames(/*top_frame_sp*/ R21_sender_SP, /*return_pc*/ noreg, R11_scratch1, R12_scratch2);
1475     __ restore_interpreter_state(R11_scratch1);
1476     __ ld(R12_scratch2, _ijava_state_neg(top_frame_sp), R11_scratch1);
1477     __ resize_frame_absolute(R12_scratch2, R11_scratch1, R0);
1478     if (ProfileInterpreter) {
1479       __ set_method_data_pointer_for_bcp();
1480       __ ld(R11_scratch1, 0, R1_SP);
1481       __ std(R28_mdx, _ijava_state_neg(mdx), R11_scratch1);
1482     }
1483 #if INCLUDE_JVMTI
1484     Label L_done;
1485 
1486     __ lbz(R11_scratch1, 0, R14_bcp);
1487     __ cmpwi(CCR0, R11_scratch1, Bytecodes::_invokestatic);
1488     __ bne(CCR0, L_done);
1489 
1490     // The member name argument must be restored if _invokestatic is re-executed after a PopFrame call.
1491     // Detect such a case in the InterpreterRuntime function and return the member name argument, or NULL.
1492     __ ld(R4_ARG2, 0, R18_locals);
1493     __ MacroAssembler::call_VM(R4_ARG2, CAST_FROM_FN_PTR(address, InterpreterRuntime::member_name_arg_or_null), R4_ARG2, R19_method, R14_bcp, false);
1494     __ restore_interpreter_state(R11_scratch1, /*bcp_and_mdx_only*/ true);
1495     __ cmpdi(CCR0, R4_ARG2, 0);

1496     __ beq(CCR0, L_done);
1497     __ std(R4_ARG2, wordSize, R15_esp);

1498     __ bind(L_done);
1499 #endif // INCLUDE_JVMTI
1500     __ dispatch_next(vtos);
1501   }
1502   // end of JVMTI PopFrame support
1503 
1504   // --------------------------------------------------------------------------
1505   // Remove activation exception entry.
1506   // This is jumped to if an interpreted method can't handle an exception itself
1507   // (we come from the throw/rethrow exception entry above). We're going to call
1508   // into the VM to find the exception handler in the caller, pop the current
1509   // frame and return the handler we calculated.
1510   Interpreter::_remove_activation_entry = __ pc();
1511   {
1512     __ pop_ptr(Rexception);
1513     __ verify_thread();
1514     __ verify_oop(Rexception);
1515     __ std(Rexception, in_bytes(JavaThread::vm_result_offset()), R16_thread);
1516 
1517     __ unlock_if_synchronized_method(vtos, /* throw_monitor_exception */ false, true);


< prev index next >