< prev index next >

src/cpu/aarch64/vm/macroAssembler_aarch64.cpp

Print this page
rev 12202 : imported patch weakex.patch


2180     if ((VM_Version::features() & VM_Version::CPU_STXR_PREFETCH))
2181       prfm(Address(addr), PSTL1STRM);
2182     bind(retry_load);
2183     load_exclusive(result, addr, size, acquire);
2184     if (size == xword)
2185       cmp(result, expected);
2186     else
2187       cmpw(result, expected);
2188     br(Assembler::NE, done);
2189     store_exclusive(rscratch1, new_val, addr, size, release);
2190     if (weak) {
2191       cmpw(rscratch1, 0u);  // If the store fails, return NE to our caller.
2192     } else {
2193       cbnzw(rscratch1, retry_load);
2194     }
2195     bind(done);
2196     BLOCK_COMMENT("} cmpxchg");
2197   }
2198 }
2199 
2200 void MacroAssembler::cmpxchg_oop_shenandoah(Register res, Register addr, Register expected,
2201                                             Register new_val,
2202                                             bool narrow,
2203                                             bool acquire, bool release,
2204                                             Register tmp1, Register tmp2) {

2205   assert(UseShenandoahGC, "only for shenandoah");
2206   assert_different_registers(res, addr, expected, new_val, tmp1, tmp2);
2207 
2208   Label retry, done, fail;
2209 
2210   mov(res, 0);
2211 
2212   // CAS, using LL/SC pair.
2213   bind(retry);
2214   load_exclusive(tmp1, addr, narrow ? word : xword, true);
2215   if (narrow) cmpw(tmp1, expected);
2216   else        cmp(tmp1, expected);



2217   br(Assembler::NE, fail);
2218   store_exclusive(tmp2, new_val, addr, narrow ? word : xword, true);



2219   cbnzw(tmp2, retry);
2220   mov(res, 1);
2221   b(done);
2222 
2223   bind(fail);
2224   // Check if rb(expected)==rb(tmp1)
2225   // Shuffle registers so that we have memory value ready for next expected.
2226   mov(tmp2, expected);
2227   mov(expected, tmp1);
2228   if (narrow) {
2229     decode_heap_oop(tmp1, tmp1);
2230     decode_heap_oop(tmp2, tmp2);
2231   }
2232   oopDesc::bs()->interpreter_read_barrier(this, tmp1);
2233   oopDesc::bs()->interpreter_read_barrier(this, tmp2);
2234   cmp(tmp1, tmp2);
2235   // Retry with expected now being the value we just loaded from addr.
2236   br(Assembler::EQ, retry);
2237 
2238   bind(done);
2239   membar(AnyAny);
2240 }
2241 
2242 static bool different(Register a, RegisterOrConstant b, Register c) {
2243   if (b.is_constant())
2244     return a != c;
2245   else
2246     return a != b.as_register() && a != c && b.as_register() != c;
2247 }
2248 
2249 #define ATOMIC_OP(NAME, LDXR, OP, IOP, AOP, STXR, sz)                   \
2250 void MacroAssembler::atomic_##NAME(Register prev, RegisterOrConstant incr, Register addr) { \
2251   if (UseLSE) {                                                         \
2252     prev = prev->is_valid() ? prev : zr;                                \
2253     if (incr.is_register()) {                                           \
2254       AOP(sz, incr.as_register(), prev, addr);                          \
2255     } else {                                                            \
2256       mov(rscratch2, incr.as_constant());                               \
2257       AOP(sz, rscratch2, prev, addr);                                   \
2258     }                                                                   \
2259     return;                                                             \




2180     if ((VM_Version::features() & VM_Version::CPU_STXR_PREFETCH))
2181       prfm(Address(addr), PSTL1STRM);
2182     bind(retry_load);
2183     load_exclusive(result, addr, size, acquire);
2184     if (size == xword)
2185       cmp(result, expected);
2186     else
2187       cmpw(result, expected);
2188     br(Assembler::NE, done);
2189     store_exclusive(rscratch1, new_val, addr, size, release);
2190     if (weak) {
2191       cmpw(rscratch1, 0u);  // If the store fails, return NE to our caller.
2192     } else {
2193       cbnzw(rscratch1, retry_load);
2194     }
2195     bind(done);
2196     BLOCK_COMMENT("} cmpxchg");
2197   }
2198 }
2199 
2200 void MacroAssembler::cmpxchg_oop_shenandoah(Register addr, Register expected,
2201                                             Register new_val,
2202                                             enum operand_size size,
2203                                             bool acquire, bool release,
2204                                             bool weak,
2205                                             Register result, Register tmp2) {
2206   assert(UseShenandoahGC, "only for shenandoah");
2207   assert_different_registers(addr, expected, new_val, result, tmp2);
2208 
2209   Label retry, done, fail;
2210 


2211   // CAS, using LL/SC pair.
2212   bind(retry);
2213   load_exclusive(result, addr, size, acquire);
2214   if (size == xword) {
2215     cmp(result, expected);
2216   } else {
2217     cmpw(result, expected);
2218   }
2219   br(Assembler::NE, fail);
2220   store_exclusive(tmp2, new_val, addr, size, release);
2221   if (weak) {
2222     cmpw(tmp2, 0u); // If the store fails, return NE to our caller
2223   } else {
2224     cbnzw(tmp2, retry);
2225   }
2226   b(done);
2227 
2228   bind(fail);
2229   // Check if rb(expected)==rb(result)
2230   // Shuffle registers so that we have memory value ready for next expected.
2231   mov(tmp2, expected);
2232   mov(expected, result);
2233   if (size == word) {
2234     decode_heap_oop(result, result);
2235     decode_heap_oop(tmp2, tmp2);
2236   }
2237   oopDesc::bs()->interpreter_read_barrier(this, result);
2238   oopDesc::bs()->interpreter_read_barrier(this, tmp2);
2239   cmp(result, tmp2);
2240   // Retry with expected now being the value we just loaded from addr.
2241   br(Assembler::EQ, retry);
2242 
2243   bind(done);

2244 }
2245 
2246 static bool different(Register a, RegisterOrConstant b, Register c) {
2247   if (b.is_constant())
2248     return a != c;
2249   else
2250     return a != b.as_register() && a != c && b.as_register() != c;
2251 }
2252 
2253 #define ATOMIC_OP(NAME, LDXR, OP, IOP, AOP, STXR, sz)                   \
2254 void MacroAssembler::atomic_##NAME(Register prev, RegisterOrConstant incr, Register addr) { \
2255   if (UseLSE) {                                                         \
2256     prev = prev->is_valid() ? prev : zr;                                \
2257     if (incr.is_register()) {                                           \
2258       AOP(sz, incr.as_register(), prev, addr);                          \
2259     } else {                                                            \
2260       mov(rscratch2, incr.as_constant());                               \
2261       AOP(sz, rscratch2, prev, addr);                                   \
2262     }                                                                   \
2263     return;                                                             \


< prev index next >