< prev index next >

src/cpu/aarch64/vm/macroAssembler_aarch64.cpp

Print this page
rev 12203 : [mq]: casfixes.patch

*** 2200,2225 **** void MacroAssembler::cmpxchg_oop_shenandoah(Register addr, Register expected, Register new_val, enum operand_size size, bool acquire, bool release, bool weak, ! Register res, Register tmp2) { assert(UseShenandoahGC, "only for shenandoah"); ! Register result = res; ! if (result == noreg) result = rscratch1; assert_different_registers(addr, expected, new_val, result, tmp2); Label retry, done, fail; // CAS, using LL/SC pair. bind(retry); load_exclusive(result, addr, size, acquire); ! if (size == xword) { ! cmp(result, expected); ! } else { cmpw(result, expected); } br(Assembler::NE, fail); store_exclusive(tmp2, new_val, addr, size, release); if (weak) { cmpw(tmp2, 0u); // If the store fails, return NE to our caller --- 2200,2235 ---- void MacroAssembler::cmpxchg_oop_shenandoah(Register addr, Register expected, Register new_val, enum operand_size size, bool acquire, bool release, bool weak, ! Register result, Register tmp2) { assert(UseShenandoahGC, "only for shenandoah"); ! bool is_cae = (result != noreg); ! bool is_narrow = (size == word); ! ! if (! is_cae) result = rscratch1; assert_different_registers(addr, expected, new_val, result, tmp2); + if (ShenandoahStoreCheck) { + if (is_narrow) { + decode_heap_oop(tmp2, new_val); + shenandoah_store_check(addr, tmp2); + } else { + shenandoah_store_check(addr, new_val); + } + } Label retry, done, fail; // CAS, using LL/SC pair. bind(retry); load_exclusive(result, addr, size, acquire); ! if (is_narrow) { cmpw(result, expected); + } else { + cmp(result, expected); } br(Assembler::NE, fail); store_exclusive(tmp2, new_val, addr, size, release); if (weak) { cmpw(tmp2, 0u); // If the store fails, return NE to our caller
*** 2231,2252 **** bind(fail); // Check if rb(expected)==rb(result) // Shuffle registers so that we have memory value ready for next expected. mov(tmp2, expected); mov(expected, result); ! if (size == word) { decode_heap_oop(result, result); decode_heap_oop(tmp2, tmp2); } oopDesc::bs()->interpreter_read_barrier(this, result); oopDesc::bs()->interpreter_read_barrier(this, tmp2); cmp(result, tmp2); // Retry with expected now being the value we just loaded from addr. br(Assembler::EQ, retry); ! if (size == word && res != noreg) { // For cmp-and-exchange and narrow oops, we need to restore ! // the compressed old-value. mov(result, expected); } bind(done); } --- 2241,2262 ---- bind(fail); // Check if rb(expected)==rb(result) // Shuffle registers so that we have memory value ready for next expected. mov(tmp2, expected); mov(expected, result); ! if (is_narrow) { decode_heap_oop(result, result); decode_heap_oop(tmp2, tmp2); } oopDesc::bs()->interpreter_read_barrier(this, result); oopDesc::bs()->interpreter_read_barrier(this, tmp2); cmp(result, tmp2); // Retry with expected now being the value we just loaded from addr. br(Assembler::EQ, retry); ! if (is_narrow && is_cae) { // For cmp-and-exchange and narrow oops, we need to restore ! // the compressed old-value. We moved it to 'expected' a few lines up. mov(result, expected); } bind(done); }
< prev index next >