< prev index next >

src/hotspot/cpu/aarch64/aarch64.ad

Print this page
rev 53382 : 8217368: AArch64: C2 recursive stack locking optimisation not triggered
Reviewed-by: duke


3403     Register oop = as_Register($object$$reg);
3404     Register box = as_Register($box$$reg);
3405     Register disp_hdr = as_Register($tmp$$reg);
3406     Register tmp = as_Register($tmp2$$reg);
3407     Label cont;
3408     Label object_has_monitor;
3409     Label cas_failed;
3410 
3411     assert_different_registers(oop, box, tmp, disp_hdr);
3412 
3413     // Load markOop from object into displaced_header.
3414     __ ldr(disp_hdr, Address(oop, oopDesc::mark_offset_in_bytes()));
3415 
3416     if (UseBiasedLocking && !UseOptoBiasInlining) {
3417       __ biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
3418     }
3419 
3420     // Handle existing monitor
3421     __ tbnz(disp_hdr, exact_log2(markOopDesc::monitor_value), object_has_monitor);
3422 
3423     // Set displaced_header to be (markOop of object | UNLOCK_VALUE).
3424     __ orr(disp_hdr, disp_hdr, markOopDesc::unlocked_value);
3425 
3426     // Load Compare Value application register.
3427 
3428     // Initialize the box. (Must happen before we update the object mark!)
3429     __ str(disp_hdr, Address(box, BasicLock::displaced_header_offset_in_bytes()));
3430 
3431     // Compare object markOop with mark and if equal exchange scratch1
3432     // with object markOop.

3433     if (UseLSE) {
3434       __ mov(tmp, disp_hdr);
3435       __ casal(Assembler::xword, tmp, box, oop);
3436       __ cmp(tmp, disp_hdr);
3437       __ br(Assembler::EQ, cont);
3438     } else {
3439       Label retry_load;
3440       if ((VM_Version::features() & VM_Version::CPU_STXR_PREFETCH))
3441         __ prfm(Address(oop), PSTL1STRM);
3442       __ bind(retry_load);
3443       __ ldaxr(tmp, oop);
3444       __ cmp(tmp, disp_hdr);
3445       __ br(Assembler::NE, cas_failed);
3446       // use stlxr to ensure update is immediately visible
3447       __ stlxr(tmp, box, oop);
3448       __ cbzw(tmp, cont);
3449       __ b(retry_load);
3450     }
3451 
3452     assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
3453 
3454     // If the compare-and-exchange succeeded, then we found an unlocked
3455     // object, will have now locked it will continue at label cont
3456 
3457     __ bind(cas_failed);
3458     // We did not see an unlocked object so try the fast recursive case.
3459 
3460     // Check if the owner is self by comparing the value in the
3461     // markOop of object (disp_hdr) with the stack pointer.
3462     __ mov(rscratch1, sp);
3463     __ sub(disp_hdr, disp_hdr, rscratch1);
3464     __ mov(tmp, (address) (~(os::vm_page_size()-1) | markOopDesc::lock_mask_in_place));
3465     // If condition is true we are cont and hence we can store 0 as the
3466     // displaced header in the box, which indicates that it is a recursive lock.
3467     __ ands(tmp/*==0?*/, disp_hdr, tmp);
3468     __ str(tmp/*==0, perhaps*/, Address(box, BasicLock::displaced_header_offset_in_bytes()));




3403     Register oop = as_Register($object$$reg);
3404     Register box = as_Register($box$$reg);
3405     Register disp_hdr = as_Register($tmp$$reg);
3406     Register tmp = as_Register($tmp2$$reg);
3407     Label cont;
3408     Label object_has_monitor;
3409     Label cas_failed;
3410 
3411     assert_different_registers(oop, box, tmp, disp_hdr);
3412 
3413     // Load markOop from object into displaced_header.
3414     __ ldr(disp_hdr, Address(oop, oopDesc::mark_offset_in_bytes()));
3415 
3416     if (UseBiasedLocking && !UseOptoBiasInlining) {
3417       __ biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
3418     }
3419 
3420     // Handle existing monitor
3421     __ tbnz(disp_hdr, exact_log2(markOopDesc::monitor_value), object_has_monitor);
3422 
3423     // Set tmp to be (markOop of object | UNLOCK_VALUE).
3424     __ orr(tmp, disp_hdr, markOopDesc::unlocked_value);
3425 
3426     // Load Compare Value application register.
3427 
3428     // Initialize the box. (Must happen before we update the object mark!)
3429     __ str(tmp, Address(box, BasicLock::displaced_header_offset_in_bytes()));
3430 
3431     // Compare object markOop with an unlocked value (tmp) and if
3432     // equal exchange the stack address of our box with object markOop.
3433     // On failure disp_hdr contains the possibly locked markOop.
3434     if (UseLSE) {
3435       __ mov(disp_hdr, tmp);
3436       __ casal(Assembler::xword, disp_hdr, box, oop);  // Updates disp_hdr
3437       __ cmp(tmp, disp_hdr);
3438       __ br(Assembler::EQ, cont);
3439     } else {
3440       Label retry_load;
3441       if ((VM_Version::features() & VM_Version::CPU_STXR_PREFETCH))
3442         __ prfm(Address(oop), PSTL1STRM);
3443       __ bind(retry_load);
3444       __ ldaxr(disp_hdr, oop);
3445       __ cmp(tmp, disp_hdr);
3446       __ br(Assembler::NE, cas_failed);
3447       // use stlxr to ensure update is immediately visible
3448       __ stlxr(disp_hdr, box, oop);
3449       __ cbzw(disp_hdr, cont);
3450       __ b(retry_load);
3451     }
3452 
3453     assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
3454 
3455     // If the compare-and-exchange succeeded, then we found an unlocked
3456     // object, will have now locked it will continue at label cont
3457 
3458     __ bind(cas_failed);
3459     // We did not see an unlocked object so try the fast recursive case.
3460 
3461     // Check if the owner is self by comparing the value in the
3462     // markOop of object (disp_hdr) with the stack pointer.
3463     __ mov(rscratch1, sp);
3464     __ sub(disp_hdr, disp_hdr, rscratch1);
3465     __ mov(tmp, (address) (~(os::vm_page_size()-1) | markOopDesc::lock_mask_in_place));
3466     // If condition is true we are cont and hence we can store 0 as the
3467     // displaced header in the box, which indicates that it is a recursive lock.
3468     __ ands(tmp/*==0?*/, disp_hdr, tmp);
3469     __ str(tmp/*==0, perhaps*/, Address(box, BasicLock::displaced_header_offset_in_bytes()));


< prev index next >