< prev index next >

src/hotspot/cpu/x86/interp_masm_x86.cpp

Print this page




1062     bind(loop);
1063     // check if current entry is used
1064     cmpptr(Address(rmon, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL);
1065     jcc(Assembler::notEqual, exception);
1066 
1067     addptr(rmon, entry_size); // otherwise advance to next entry
1068     bind(entry);
1069     cmpptr(rmon, rbx); // check if bottom reached
1070     jcc(Assembler::notEqual, loop); // if not at bottom then check this entry
1071   }
1072 
1073   bind(no_unlock);
1074 
1075   // jvmti support
1076   if (notify_jvmdi) {
1077     notify_method_exit(state, NotifyJVMTI);    // preserve TOSCA
1078   } else {
1079     notify_method_exit(state, SkipNotifyJVMTI); // preserve TOSCA
1080   }
1081 
1082   Label vtbuffer_slow, vtbuffer_done;


























1083   const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
1084   const uintptr_t chunk_mask = VTBufferChunk::chunk_mask();










1085   movptr(rbx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
1086   NOT_LP64(get_thread(thread1));
1087   movptr(rcx, Address(thread1, JavaThread::vt_alloc_ptr_offset()));
1088   cmpptr(rbx, rcx);
1089   jcc(Assembler::equal, vtbuffer_done);
1090   andptr(rbx, chunk_mask);
1091   andptr(rcx, chunk_mask);
1092   cmpptr(rbx, rcx);
1093   jcc(Assembler::notEqual, vtbuffer_slow);
1094   movptr(rbx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
1095   movptr(Address(thread1, JavaThread::vt_alloc_ptr_offset()), rbx);
1096   jmp(vtbuffer_done);
1097   bind(vtbuffer_slow);
1098   push(state);
1099   call_VM(noreg, CAST_FROM_FN_PTR(address,
1100                                   InterpreterRuntime::recycle_vtbuffer));

1101   pop(state);
1102   bind(vtbuffer_done);
1103 
1104   // remove activation
1105   // get sender sp
1106   movptr(rbx,
1107          Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize));
1108   if (StackReservedPages > 0) {
1109     // testing if reserved zone needs to be re-enabled
1110     Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
1111     Label no_reserved_zone_enabling;
1112 
1113     NOT_LP64(get_thread(rthread);)
1114 
1115     cmpl(Address(rthread, JavaThread::stack_guard_state_offset()), JavaThread::stack_guard_enabled);
1116     jcc(Assembler::equal, no_reserved_zone_enabling);
1117 
1118     cmpptr(rbx, Address(rthread, JavaThread::reserved_stack_activation_offset()));
1119     jcc(Assembler::lessEqual, no_reserved_zone_enabling);
1120 
1121     call_VM_leaf(
1122       CAST_FROM_FN_PTR(address, SharedRuntime::enable_stack_reserved_zone), rthread);
1123     call_VM(noreg, CAST_FROM_FN_PTR(address,
1124                    InterpreterRuntime::throw_delayed_StackOverflowError));
1125     should_not_reach_here();
1126 
1127     bind(no_reserved_zone_enabling);
1128   }
1129   if (load_values) {
1130     // We are returning a value type, load its fields into registers
1131 #ifndef _LP64
1132     super_call_VM_leaf(StubRoutines::load_value_type_fields_in_regs());
1133 #else
1134     load_klass(rdi, rax);
1135     movptr(rdi, Address(rdi, ValueKlass::unpack_handler_offset()));
1136 
1137     Label skip;
1138     testptr(rdi, rdi);
1139     jcc(Assembler::equal, skip);
1140 
1141     // Load fields from a buffered value with a value class specific
1142     // handler
1143     call(rdi);
1144 
1145     bind(skip);
1146 #endif
1147     // call above kills the value in rbx. Reload it.
1148     movptr(rbx, Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize));




1062     bind(loop);
1063     // check if current entry is used
1064     cmpptr(Address(rmon, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL);
1065     jcc(Assembler::notEqual, exception);
1066 
1067     addptr(rmon, entry_size); // otherwise advance to next entry
1068     bind(entry);
1069     cmpptr(rmon, rbx); // check if bottom reached
1070     jcc(Assembler::notEqual, loop); // if not at bottom then check this entry
1071   }
1072 
1073   bind(no_unlock);
1074 
1075   // jvmti support
1076   if (notify_jvmdi) {
1077     notify_method_exit(state, NotifyJVMTI);    // preserve TOSCA
1078   } else {
1079     notify_method_exit(state, SkipNotifyJVMTI); // preserve TOSCA
1080   }
1081 
1082   if (StackReservedPages > 0) {
1083     movptr(rbx,
1084                Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize));
1085     // testing if reserved zone needs to be re-enabled
1086     Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
1087     Label no_reserved_zone_enabling;
1088 
1089     NOT_LP64(get_thread(rthread);)
1090 
1091     cmpl(Address(rthread, JavaThread::stack_guard_state_offset()), JavaThread::stack_guard_enabled);
1092     jcc(Assembler::equal, no_reserved_zone_enabling);
1093 
1094     cmpptr(rbx, Address(rthread, JavaThread::reserved_stack_activation_offset()));
1095     jcc(Assembler::lessEqual, no_reserved_zone_enabling);
1096 
1097     call_VM_leaf(
1098       CAST_FROM_FN_PTR(address, SharedRuntime::enable_stack_reserved_zone), rthread);
1099     call_VM(noreg, CAST_FROM_FN_PTR(address,
1100                    InterpreterRuntime::throw_delayed_StackOverflowError));
1101     should_not_reach_here();
1102 
1103     bind(no_reserved_zone_enabling);
1104   }
1105 
1106   // Code below is taking care of recycling TLVB memory, no safepoint should
1107   // occur between this point and the end of the remove_activation() method
1108   Label vtbuffer_slow, vtbuffer_done, no_buffered_value_returned;
1109   const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
1110   const uintptr_t chunk_mask = VTBufferChunk::chunk_mask();
1111   NOT_LP64(get_thread(thread1));
1112   cmpptr(Address(thread1, JavaThread::return_buffered_value_offset()), (intptr_t)NULL_WORD);
1113   jcc(Assembler::equal, no_buffered_value_returned);
1114   movptr(rbx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
1115   call_VM_leaf(CAST_FROM_FN_PTR(address,
1116                                   InterpreterRuntime::return_value_step2), rax, rbx);
1117   NOT_LP64(get_thread(thread1));
1118   get_vm_result(rax, thread1);
1119   jmp(vtbuffer_done);
1120   bind(no_buffered_value_returned);
1121   movptr(rbx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
1122   NOT_LP64(get_thread(thread1));
1123   movptr(rcx, Address(thread1, JavaThread::vt_alloc_ptr_offset()));
1124   cmpptr(rbx, rcx);
1125   jcc(Assembler::equal, vtbuffer_done);
1126   andptr(rbx, chunk_mask);
1127   andptr(rcx, chunk_mask);
1128   cmpptr(rbx, rcx);
1129   jcc(Assembler::notEqual, vtbuffer_slow);
1130   movptr(rbx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
1131   movptr(Address(thread1, JavaThread::vt_alloc_ptr_offset()), rbx);
1132   jmp(vtbuffer_done);
1133   bind(vtbuffer_slow);
1134   push(state);
1135   movptr(rbx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
1136   call_VM_leaf(CAST_FROM_FN_PTR(address,
1137                                   InterpreterRuntime::recycle_vtbuffer), rbx);
1138   pop(state);
1139   bind(vtbuffer_done);
1140 
1141   // remove activation
1142   // get sender sp
1143   movptr(rbx,
1144          Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize));






1145 














1146   if (load_values) {
1147     // We are returning a value type, load its fields into registers
1148 #ifndef _LP64
1149     super_call_VM_leaf(StubRoutines::load_value_type_fields_in_regs());
1150 #else
1151     load_klass(rdi, rax);
1152     movptr(rdi, Address(rdi, ValueKlass::unpack_handler_offset()));
1153 
1154     Label skip;
1155     testptr(rdi, rdi);
1156     jcc(Assembler::equal, skip);
1157 
1158     // Load fields from a buffered value with a value class specific
1159     // handler
1160     call(rdi);
1161 
1162     bind(skip);
1163 #endif
1164     // call above kills the value in rbx. Reload it.
1165     movptr(rbx, Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize));


< prev index next >