< prev index next >

src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp

Print this page
rev 50904 : 8205523: Explicit barriers for interpreter
rev 50905 : [mq]: JDK-8205523-01-x86.patch


2352 
2353   Label slow_path_lock;
2354   Label lock_done;
2355 
2356   if (method->is_synchronized()) {
2357     assert(!is_critical_native, "unhandled");
2358 
2359 
2360     const int mark_word_offset = BasicLock::displaced_header_offset_in_bytes();
2361 
2362     // Get the handle (the 2nd argument)
2363     __ mov(oop_handle_reg, c_rarg1);
2364 
2365     // Get address of the box
2366 
2367     __ lea(lock_reg, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
2368 
2369     // Load the oop from the handle
2370     __ movptr(obj_reg, Address(oop_handle_reg, 0));
2371 
2372     __ resolve_for_write(OOP_NOT_NULL, obj_reg);
2373     if (UseBiasedLocking) {
2374       __ biased_locking_enter(lock_reg, obj_reg, swap_reg, rscratch1, false, lock_done, &slow_path_lock);
2375     }
2376 
2377     // Load immediate 1 into swap_reg %rax
2378     __ movl(swap_reg, 1);
2379 
2380     // Load (object->mark() | 1) into swap_reg %rax
2381     __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2382 
2383     // Save (object->mark() | 1) into BasicLock's displaced header
2384     __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
2385 
2386     if (os::is_MP()) {
2387       __ lock();
2388     }
2389 
2390     // src -> dest iff dest == rax else rax <- dest
2391     __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2392     __ jcc(Assembler::equal, lock_done);


2520 
2521   // change thread state
2522   __ movl(Address(r15_thread, JavaThread::thread_state_offset()), _thread_in_Java);
2523   __ bind(after_transition);
2524 
2525   Label reguard;
2526   Label reguard_done;
2527   __ cmpl(Address(r15_thread, JavaThread::stack_guard_state_offset()), JavaThread::stack_guard_yellow_reserved_disabled);
2528   __ jcc(Assembler::equal, reguard);
2529   __ bind(reguard_done);
2530 
2531   // native result if any is live
2532 
2533   // Unlock
2534   Label unlock_done;
2535   Label slow_path_unlock;
2536   if (method->is_synchronized()) {
2537 
2538     // Get locked oop from the handle we passed to jni
2539     __ movptr(obj_reg, Address(oop_handle_reg, 0));
2540     __ resolve_for_write(OOP_NOT_NULL, obj_reg);
2541 
2542     Label done;
2543 
2544     if (UseBiasedLocking) {
2545       __ biased_locking_exit(obj_reg, old_hdr, done);
2546     }
2547 
2548     // Simple recursive lock?
2549 
2550     __ cmpptr(Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size), (int32_t)NULL_WORD);
2551     __ jcc(Assembler::equal, done);
2552 
2553     // Must save rax if if it is live now because cmpxchg must use it
2554     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
2555       save_native_result(masm, ret_type, stack_slots);
2556     }
2557 
2558 
2559     // get address of the stack lock
2560     __ lea(rax, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));




2352 
2353   Label slow_path_lock;
2354   Label lock_done;
2355 
2356   if (method->is_synchronized()) {
2357     assert(!is_critical_native, "unhandled");
2358 
2359 
2360     const int mark_word_offset = BasicLock::displaced_header_offset_in_bytes();
2361 
2362     // Get the handle (the 2nd argument)
2363     __ mov(oop_handle_reg, c_rarg1);
2364 
2365     // Get address of the box
2366 
2367     __ lea(lock_reg, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
2368 
2369     // Load the oop from the handle
2370     __ movptr(obj_reg, Address(oop_handle_reg, 0));
2371 
2372     __ resolve(IS_NOT_NULL, obj_reg);
2373     if (UseBiasedLocking) {
2374       __ biased_locking_enter(lock_reg, obj_reg, swap_reg, rscratch1, false, lock_done, &slow_path_lock);
2375     }
2376 
2377     // Load immediate 1 into swap_reg %rax
2378     __ movl(swap_reg, 1);
2379 
2380     // Load (object->mark() | 1) into swap_reg %rax
2381     __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2382 
2383     // Save (object->mark() | 1) into BasicLock's displaced header
2384     __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
2385 
2386     if (os::is_MP()) {
2387       __ lock();
2388     }
2389 
2390     // src -> dest iff dest == rax else rax <- dest
2391     __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
2392     __ jcc(Assembler::equal, lock_done);


2520 
2521   // change thread state
2522   __ movl(Address(r15_thread, JavaThread::thread_state_offset()), _thread_in_Java);
2523   __ bind(after_transition);
2524 
2525   Label reguard;
2526   Label reguard_done;
2527   __ cmpl(Address(r15_thread, JavaThread::stack_guard_state_offset()), JavaThread::stack_guard_yellow_reserved_disabled);
2528   __ jcc(Assembler::equal, reguard);
2529   __ bind(reguard_done);
2530 
2531   // native result if any is live
2532 
2533   // Unlock
2534   Label unlock_done;
2535   Label slow_path_unlock;
2536   if (method->is_synchronized()) {
2537 
2538     // Get locked oop from the handle we passed to jni
2539     __ movptr(obj_reg, Address(oop_handle_reg, 0));
2540     __ resolve(IS_NOT_NULL, obj_reg);
2541 
2542     Label done;
2543 
2544     if (UseBiasedLocking) {
2545       __ biased_locking_exit(obj_reg, old_hdr, done);
2546     }
2547 
2548     // Simple recursive lock?
2549 
2550     __ cmpptr(Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size), (int32_t)NULL_WORD);
2551     __ jcc(Assembler::equal, done);
2552 
2553     // Must save rax if if it is live now because cmpxchg must use it
2554     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
2555       save_native_result(masm, ret_type, stack_slots);
2556     }
2557 
2558 
2559     // get address of the stack lock
2560     __ lea(rax, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));


< prev index next >