src/cpu/x86/vm/templateInterpreter_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/x86/vm

src/cpu/x86/vm/templateInterpreter_x86_64.cpp

Print this page




 181   if (state == atos) {
 182     Register mdp = rbx;
 183     Register tmp = rcx;
 184     __ profile_return_type(mdp, rax, tmp);
 185   }
 186 
 187   const Register cache = rbx;
 188   const Register index = rcx;
 189   __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
 190 
 191   const Register flags = cache;
 192   __ movl(flags, Address(cache, index, Address::times_ptr, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
 193   __ andl(flags, ConstantPoolCacheEntry::parameter_size_mask);
 194   __ lea(rsp, Address(rsp, flags, Interpreter::stackElementScale()));
 195   __ dispatch_next(state, step);
 196 
 197   return entry;
 198 }
 199 
 200 
 201 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state,
 202                                                                int step) {
 203   address entry = __ pc();
 204   // NULL last_sp until next java call
 205   __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD);
 206   __ restore_bcp();
 207   __ restore_locals();















 208   // handle exceptions
 209   {
 210     Label L;
 211     __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t) NULL_WORD);
 212     __ jcc(Assembler::zero, L);
 213     __ call_VM(noreg,
 214                CAST_FROM_FN_PTR(address,
 215                                 InterpreterRuntime::throw_pending_exception));
 216     __ should_not_reach_here();
 217     __ bind(L);
 218   }
 219   __ dispatch_next(state, step);
 220   return entry;
 221 }
 222 
 223 int AbstractInterpreter::BasicType_as_index(BasicType type) {
 224   int i = 0;
 225   switch (type) {
 226     case T_BOOLEAN: i = 0; break;
 227     case T_CHAR   : i = 1; break;


 483 
 484   // Note: the restored frame is not necessarily interpreted.
 485   // Use the shared runtime version of the StackOverflowError.
 486   assert(StubRoutines::throw_StackOverflowError_entry() != NULL, "stub not yet generated");
 487   __ jump(ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
 488 
 489   // all done with frame size check
 490   __ bind(after_frame_check);
 491 }
 492 
 493 // Allocate monitor and lock method (asm interpreter)
 494 //
 495 // Args:
 496 //      rbx: Method*
 497 //      r14: locals
 498 //
 499 // Kills:
 500 //      rax
 501 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, ...(param regs)
 502 //      rscratch1, rscratch2 (scratch regs)
 503 void InterpreterGenerator::lock_method(void) {
 504   // synchronize method
 505   const Address access_flags(rbx, Method::access_flags_offset());
 506   const Address monitor_block_top(
 507         rbp,
 508         frame::interpreter_frame_monitor_block_top_offset * wordSize);
 509   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
 510 
 511 #ifdef ASSERT
 512   {
 513     Label L;
 514     __ movl(rax, access_flags);
 515     __ testl(rax, JVM_ACC_SYNCHRONIZED);
 516     __ jcc(Assembler::notZero, L);
 517     __ stop("method doesn't need synchronization");
 518     __ bind(L);
 519   }
 520 #endif // ASSERT
 521 
 522   // get synchronization object
 523   {




 181   if (state == atos) {
 182     Register mdp = rbx;
 183     Register tmp = rcx;
 184     __ profile_return_type(mdp, rax, tmp);
 185   }
 186 
 187   const Register cache = rbx;
 188   const Register index = rcx;
 189   __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
 190 
 191   const Register flags = cache;
 192   __ movl(flags, Address(cache, index, Address::times_ptr, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
 193   __ andl(flags, ConstantPoolCacheEntry::parameter_size_mask);
 194   __ lea(rsp, Address(rsp, flags, Interpreter::stackElementScale()));
 195   __ dispatch_next(state, step);
 196 
 197   return entry;
 198 }
 199 
 200 
 201 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, int step) {

 202   address entry = __ pc();
 203   // NULL last_sp until next java call
 204   __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD);
 205   __ restore_bcp();
 206   __ restore_locals();
 207 #if INCLUDE_JVMCI
 208   // Check if we need to take lock at entry of synchronized method.
 209   if (UseJVMCICompiler) {
 210     Label L;
 211     __ cmpb(Address(r15_thread, JavaThread::pending_monitorenter_offset()), 0);
 212     __ jcc(Assembler::zero, L);
 213     // Clear flag.
 214     __ movb(Address(r15_thread, JavaThread::pending_monitorenter_offset()), 0);
 215     // Satisfy calling convention for lock_method().
 216     __ get_method(rbx);
 217     // Take lock.
 218     lock_method();
 219     __ bind(L);
 220   }
 221 #endif
 222   // handle exceptions
 223   {
 224     Label L;
 225     __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t) NULL_WORD);
 226     __ jcc(Assembler::zero, L);
 227     __ call_VM(noreg,
 228                CAST_FROM_FN_PTR(address,
 229                                 InterpreterRuntime::throw_pending_exception));
 230     __ should_not_reach_here();
 231     __ bind(L);
 232   }
 233   __ dispatch_next(state, step);
 234   return entry;
 235 }
 236 
 237 int AbstractInterpreter::BasicType_as_index(BasicType type) {
 238   int i = 0;
 239   switch (type) {
 240     case T_BOOLEAN: i = 0; break;
 241     case T_CHAR   : i = 1; break;


 497 
 498   // Note: the restored frame is not necessarily interpreted.
 499   // Use the shared runtime version of the StackOverflowError.
 500   assert(StubRoutines::throw_StackOverflowError_entry() != NULL, "stub not yet generated");
 501   __ jump(ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
 502 
 503   // all done with frame size check
 504   __ bind(after_frame_check);
 505 }
 506 
 507 // Allocate monitor and lock method (asm interpreter)
 508 //
 509 // Args:
 510 //      rbx: Method*
 511 //      r14: locals
 512 //
 513 // Kills:
 514 //      rax
 515 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, ...(param regs)
 516 //      rscratch1, rscratch2 (scratch regs)
 517 void TemplateInterpreterGenerator::lock_method() {
 518   // synchronize method
 519   const Address access_flags(rbx, Method::access_flags_offset());
 520   const Address monitor_block_top(
 521         rbp,
 522         frame::interpreter_frame_monitor_block_top_offset * wordSize);
 523   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
 524 
 525 #ifdef ASSERT
 526   {
 527     Label L;
 528     __ movl(rax, access_flags);
 529     __ testl(rax, JVM_ACC_SYNCHRONIZED);
 530     __ jcc(Assembler::notZero, L);
 531     __ stop("method doesn't need synchronization");
 532     __ bind(L);
 533   }
 534 #endif // ASSERT
 535 
 536   // get synchronization object
 537   {


src/cpu/x86/vm/templateInterpreter_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File