2457
2458 Label slow_path_lock;
2459 Label lock_done;
2460
2461 if (method->is_synchronized()) {
2462 assert(!is_critical_native, "unhandled");
2463
2464
2465 const int mark_word_offset = BasicLock::displaced_header_offset_in_bytes();
2466
2467 // Get the handle (the 2nd argument)
2468 __ mov(oop_handle_reg, c_rarg1);
2469
2470 // Get address of the box
2471
2472 __ lea(lock_reg, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
2473
2474 // Load the oop from the handle
2475 __ movptr(obj_reg, Address(oop_handle_reg, 0));
2476
2477 __ resolve_for_write(0, obj_reg);
2478 if (UseBiasedLocking) {
2479 __ biased_locking_enter(lock_reg, obj_reg, swap_reg, rscratch1, false, lock_done, &slow_path_lock);
2480 }
2481
2482 // Load immediate 1 into swap_reg %rax
2483 __ movl(swap_reg, 1);
2484
2485 // Load (object->mark() | 1) into swap_reg %rax
2486 __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2487
2488 // Save (object->mark() | 1) into BasicLock's displaced header
2489 __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
2490
2491 if (os::is_MP()) {
2492 __ lock();
2493 }
2494
2495 // src -> dest iff dest == rax else rax <- dest
2496 __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2497 __ jcc(Assembler::equal, lock_done);
2638
2639 // change thread state
2640 __ movl(Address(r15_thread, JavaThread::thread_state_offset()), _thread_in_Java);
2641 __ bind(after_transition);
2642
2643 Label reguard;
2644 Label reguard_done;
2645 __ cmpl(Address(r15_thread, JavaThread::stack_guard_state_offset()), JavaThread::stack_guard_yellow_reserved_disabled);
2646 __ jcc(Assembler::equal, reguard);
2647 __ bind(reguard_done);
2648
2649 // native result if any is live
2650
2651 // Unlock
2652 Label unlock_done;
2653 Label slow_path_unlock;
2654 if (method->is_synchronized()) {
2655
2656 // Get locked oop from the handle we passed to jni
2657 __ movptr(obj_reg, Address(oop_handle_reg, 0));
2658 __ resolve_for_write(0, obj_reg);
2659
2660 Label done;
2661
2662 if (UseBiasedLocking) {
2663 __ biased_locking_exit(obj_reg, old_hdr, done);
2664 }
2665
2666 // Simple recursive lock?
2667
2668 __ cmpptr(Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size), (int32_t)NULL_WORD);
2669 __ jcc(Assembler::equal, done);
2670
2671 // Must save rax if if it is live now because cmpxchg must use it
2672 if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
2673 save_native_result(masm, ret_type, stack_slots);
2674 }
2675
2676
2677 // get address of the stack lock
2678 __ lea(rax, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
|
2457
2458 Label slow_path_lock;
2459 Label lock_done;
2460
2461 if (method->is_synchronized()) {
2462 assert(!is_critical_native, "unhandled");
2463
2464
2465 const int mark_word_offset = BasicLock::displaced_header_offset_in_bytes();
2466
2467 // Get the handle (the 2nd argument)
2468 __ mov(oop_handle_reg, c_rarg1);
2469
2470 // Get address of the box
2471
2472 __ lea(lock_reg, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
2473
2474 // Load the oop from the handle
2475 __ movptr(obj_reg, Address(oop_handle_reg, 0));
2476
2477 __ resolve_for_write(OOP_NOT_NULL, obj_reg);
2478 if (UseBiasedLocking) {
2479 __ biased_locking_enter(lock_reg, obj_reg, swap_reg, rscratch1, false, lock_done, &slow_path_lock);
2480 }
2481
2482 // Load immediate 1 into swap_reg %rax
2483 __ movl(swap_reg, 1);
2484
2485 // Load (object->mark() | 1) into swap_reg %rax
2486 __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2487
2488 // Save (object->mark() | 1) into BasicLock's displaced header
2489 __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
2490
2491 if (os::is_MP()) {
2492 __ lock();
2493 }
2494
2495 // src -> dest iff dest == rax else rax <- dest
2496 __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2497 __ jcc(Assembler::equal, lock_done);
2638
2639 // change thread state
2640 __ movl(Address(r15_thread, JavaThread::thread_state_offset()), _thread_in_Java);
2641 __ bind(after_transition);
2642
2643 Label reguard;
2644 Label reguard_done;
2645 __ cmpl(Address(r15_thread, JavaThread::stack_guard_state_offset()), JavaThread::stack_guard_yellow_reserved_disabled);
2646 __ jcc(Assembler::equal, reguard);
2647 __ bind(reguard_done);
2648
2649 // native result if any is live
2650
2651 // Unlock
2652 Label unlock_done;
2653 Label slow_path_unlock;
2654 if (method->is_synchronized()) {
2655
2656 // Get locked oop from the handle we passed to jni
2657 __ movptr(obj_reg, Address(oop_handle_reg, 0));
2658 __ resolve_for_write(OOP_NOT_NULL, obj_reg);
2659
2660 Label done;
2661
2662 if (UseBiasedLocking) {
2663 __ biased_locking_exit(obj_reg, old_hdr, done);
2664 }
2665
2666 // Simple recursive lock?
2667
2668 __ cmpptr(Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size), (int32_t)NULL_WORD);
2669 __ jcc(Assembler::equal, done);
2670
2671 // Must save rax if if it is live now because cmpxchg must use it
2672 if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
2673 save_native_result(masm, ret_type, stack_slots);
2674 }
2675
2676
2677 // get address of the stack lock
2678 __ lea(rax, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
|