< prev index next >

src/cpu/aarch64/vm/c1_LIRAssembler_aarch64.cpp

Print this page
rev 10278 : 8150394: aarch64: add support for 8.1 LSE CAS instructions
Reviewed-by: duke
Contributed-by: ananth.jasty@caviumnetworks.com, edward.nevill@gmail.com

@@ -1554,10 +1554,16 @@
     ShouldNotReachHere();
   }
 }
 
 void LIR_Assembler::casw(Register addr, Register newval, Register cmpval) {
+  if (UseLSE) {
+    __ mov(rscratch1, cmpval);
+    __ casalw(cmpval, newval, addr);
+    __ cmpw(rscratch1, cmpval);
+    __ cset(rscratch1, Assembler::NE);
+  } else {
   Label retry_load, nope;
   // flush and load exclusive from the memory location
   // and fail if it is not what we expect
   __ bind(retry_load);
   __ ldaxrw(rscratch1, addr);

@@ -1568,14 +1574,21 @@
   __ stlxrw(rscratch1, newval, addr);
   // retry so we only ever return after a load fails to compare
   // ensures we don't return a stale value after a failed write.
   __ cbnzw(rscratch1, retry_load);
   __ bind(nope);
+  }
   __ membar(__ AnyAny);
 }
 
 void LIR_Assembler::casl(Register addr, Register newval, Register cmpval) {
+  if (UseLSE) {
+    __ mov(rscratch1, cmpval);
+    __ casal(cmpval, newval, addr);
+    __ cmp(rscratch1, cmpval);
+    __ cset(rscratch1, Assembler::NE);
+  } else {
   Label retry_load, nope;
   // flush and load exclusive from the memory location
   // and fail if it is not what we expect
   __ bind(retry_load);
   __ ldaxr(rscratch1, addr);

@@ -1586,10 +1599,11 @@
   __ stlxr(rscratch1, newval, addr);
   // retry so we only ever return after a load fails to compare
   // ensures we don't return a stale value after a failed write.
   __ cbnz(rscratch1, retry_load);
   __ bind(nope);
+  }
   __ membar(__ AnyAny);
 }
 
 
 void LIR_Assembler::emit_compare_and_swap(LIR_OpCompareAndSwap* op) {
< prev index next >