< prev index next >

src/cpu/aarch64/vm/aarch64.ad

Print this page
rev 8869 : 8080293: AARCH64: Remove unnecessary dmbs from generated CAS code
Summary: The current encoding for CAS generates unnecessary leading and trailing dmbs for the MemBarAcquire and MemBarRelease which ought to be elided
Reviewed-by: kvn
rev 8870 : 8134322: AArch64: Fix several errors in C2 biased locking implementation
Summary: Several errors in C2 biased locking require fixing
Reviewed-by: adinn
Contributed-by: Hui Shi (hui.shi@linaro.org)

@@ -4894,16 +4894,16 @@
     if (EmitSync & 0x01) {
       __ cmp(oop, zr);
       return;
     }
 
-    if (UseBiasedLocking) {
-      __ biased_locking_enter(disp_hdr, oop, box, tmp, true, cont);
+    if (UseBiasedLocking && !UseOptoBiasInlining) {
+      __ biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
     }
 
     // Handle existing monitor
-    if (EmitSync & 0x02) {
+    if ((EmitSync & 0x02) == 0) {
       // we can use AArch64's bit test and branch here but
       // markoopDesc does not define a bit index just the bit value
       // so assert in case the bit pos changes
 #     define __monitor_value_log2 1
       assert(markOopDesc::monitor_value == (1 << __monitor_value_log2), "incorrect bit position");

@@ -5039,11 +5039,11 @@
     if (EmitSync & 0x01) {
       __ cmp(oop, zr); // Oop can't be 0 here => always false.
       return;
     }
 
-    if (UseBiasedLocking) {
+    if (UseBiasedLocking && !UseOptoBiasInlining) {
       __ biased_locking_exit(oop, tmp, cont);
     }
 
     // Find the lock address and load the displaced header from the stack.
     __ ldr(disp_hdr, Address(box, BasicLock::displaced_header_offset_in_bytes()));
< prev index next >