2222 if (os::is_MP()) {
2223 __ lock();
2224 }
2225
2226 // src -> dest iff dest == rax, else rax, <- dest
2227 // *obj_reg = rbx, iff *obj_reg == rax, else rax, = *(obj_reg)
2228 __ cmpxchgptr(rbx, Address(obj_reg, 0));
2229 __ jcc(Assembler::notEqual, slow_path_unlock);
2230
2231 // slow path re-enters here
2232 __ bind(unlock_done);
2233 if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
2234 restore_native_result(masm, ret_type, stack_slots);
2235 }
2236
2237 __ bind(done);
2238
2239 }
2240
2241 {
2242 SkipIfEqual skip_if(masm, &DTraceMethodProbes, 0);
2243 // Tell dtrace about this method exit
2244 save_native_result(masm, ret_type, stack_slots);
2245 __ mov_metadata(rax, method());
2246 __ call_VM_leaf(
2247 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit),
2248 thread, rax);
2249 restore_native_result(masm, ret_type, stack_slots);
2250 }
2251
2252 // We can finally stop using that last_Java_frame we setup ages ago
2253
2254 __ reset_last_Java_frame(thread, false, true);
2255
2256 // Unpack oop result
2257 if (ret_type == T_OBJECT || ret_type == T_ARRAY) {
2258 Label L;
2259 __ cmpptr(rax, (int32_t)NULL_WORD);
2260 __ jcc(Assembler::equal, L);
2261 __ movptr(rax, Address(rax, 0));
|
2222 if (os::is_MP()) {
2223 __ lock();
2224 }
2225
2226 // src -> dest iff dest == rax, else rax, <- dest
2227 // *obj_reg = rbx, iff *obj_reg == rax, else rax, = *(obj_reg)
2228 __ cmpxchgptr(rbx, Address(obj_reg, 0));
2229 __ jcc(Assembler::notEqual, slow_path_unlock);
2230
2231 // slow path re-enters here
2232 __ bind(unlock_done);
2233 if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
2234 restore_native_result(masm, ret_type, stack_slots);
2235 }
2236
2237 __ bind(done);
2238
2239 }
2240
2241 {
2242 // Normally we do not post method_entry and method_exit events from
2243 // compiled code, only from the interpreter. If method_entry/exit
2244 // events are switched on at runtime, we will deoptimize everything
2245 // (see VM_EnterInterpOnlyMode) on the stack and call method_entry/exit
2246 // from the interpreter. But when we do that, we will not deoptimize
2247 // this native wrapper frame. Thus we have an extra check here to see
2248 // if we are now in interp_only_mode and in that case we do the jvmti
2249 // callback.
2250 Label skip_jvmti_method_exit;
2251 __ cmpb(Address(thread, JavaThread::interp_only_mode_offset()), 0);
2252 __ jccb(Assembler::zero, skip_jvmti_method_exit);
2253
2254 save_native_result(masm, ret_type, stack_slots);
2255 __ mov_metadata(rax, method());
2256 __ call_VM_leaf(
2257 CAST_FROM_FN_PTR(address, SharedRuntime::jvmti_method_exit),
2258 thread, rax);
2259 restore_native_result(masm, ret_type, stack_slots);
2260 __ bind(skip_jvmti_method_exit);
2261 }
2262
2263 {
2264 SkipIfEqual skip_if(masm, &DTraceMethodProbes, 0);
2265 // Tell dtrace about this method exit
2266 save_native_result(masm, ret_type, stack_slots);
2267 __ mov_metadata(rax, method());
2268 __ call_VM_leaf(
2269 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit),
2270 thread, rax);
2271 restore_native_result(masm, ret_type, stack_slots);
2272 }
2273
2274 // We can finally stop using that last_Java_frame we setup ages ago
2275
2276 __ reset_last_Java_frame(thread, false, true);
2277
2278 // Unpack oop result
2279 if (ret_type == T_OBJECT || ret_type == T_ARRAY) {
2280 Label L;
2281 __ cmpptr(rax, (int32_t)NULL_WORD);
2282 __ jcc(Assembler::equal, L);
2283 __ movptr(rax, Address(rax, 0));
|