2053
2054 // this simulates the behaviour of the x86 cmpxchg instruction using a
2055 // load linked/store conditional pair. we use the acquire/release
2056 // versions of these instructions so that we flush pending writes as
2057 // per Java semantics.
2058
2059 // n.b the x86 version assumes the old value to be compared against is
2060 // in rax and updates rax with the value located in memory if the
2061 // cmpxchg fails. we supply a register for the old value explicitly
2062
2063 // the aarch64 load linked/store conditional instructions do not
2064 // accept an offset. so, unlike x86, we must provide a plain register
2065 // to identify the memory word to be compared/exchanged rather than a
2066 // register+offset Address.
2067
2068 void MacroAssembler::cmpxchgptr(Register oldv, Register newv, Register addr, Register tmp,
2069 Label &succeed, Label *fail) {
2070 // oldv holds comparison value
2071 // newv holds value to write in exchange
2072 // addr identifies memory word to compare against/update
2073 // tmp returns 0/1 for success/failure
2074 Label retry_load, nope;
2075
2076 bind(retry_load);
2077 // flush and load exclusive from the memory location
2078 // and fail if it is not what we expect
2079 ldaxr(tmp, addr);
2080 cmp(tmp, oldv);
2081 br(Assembler::NE, nope);
2082 // if we store+flush with no intervening write tmp wil be zero
2083 stlxr(tmp, newv, addr);
2084 cbzw(tmp, succeed);
2085 // retry so we only ever return after a load fails to compare
2086 // ensures we don't return a stale value after a failed write.
2087 b(retry_load);
2088 // if the memory word differs we return it in oldv and signal a fail
2089 bind(nope);
2090 membar(AnyAny);
2091 mov(oldv, tmp);
2092 if (fail)
2093 b(*fail);
2094 }
2095
2096 void MacroAssembler::cmpxchgw(Register oldv, Register newv, Register addr, Register tmp,
2097 Label &succeed, Label *fail) {
2098 // oldv holds comparison value
2099 // newv holds value to write in exchange
2100 // addr identifies memory word to compare against/update
2101 // tmp returns 0/1 for success/failure
2102 Label retry_load, nope;
2103
2104 bind(retry_load);
2105 // flush and load exclusive from the memory location
2106 // and fail if it is not what we expect
2107 ldaxrw(tmp, addr);
2108 cmp(tmp, oldv);
2109 br(Assembler::NE, nope);
2110 // if we store+flush with no intervening write tmp wil be zero
2111 stlxrw(tmp, newv, addr);
2112 cbzw(tmp, succeed);
2113 // retry so we only ever return after a load fails to compare
2114 // ensures we don't return a stale value after a failed write.
2115 b(retry_load);
2116 // if the memory word differs we return it in oldv and signal a fail
2117 bind(nope);
2118 membar(AnyAny);
2119 mov(oldv, tmp);
2120 if (fail)
2121 b(*fail);
2122 }
2123
2124 static bool different(Register a, RegisterOrConstant b, Register c) {
2125 if (b.is_constant())
2126 return a != c;
2127 else
2128 return a != b.as_register() && a != c && b.as_register() != c;
2129 }
2130
2131 #define ATOMIC_OP(LDXR, OP, IOP, STXR) \
2132 void MacroAssembler::atomic_##OP(Register prev, RegisterOrConstant incr, Register addr) { \
2133 Register result = rscratch2; \
2134 if (prev->is_valid()) \
2135 result = different(prev, incr, addr) ? prev : rscratch2; \
2136 \
2137 Label retry_load; \
2138 bind(retry_load); \
2139 LDXR(result, addr); \
2140 OP(rscratch1, result, incr); \
2141 STXR(rscratch2, rscratch1, addr); \
|
2053
2054 // this simulates the behaviour of the x86 cmpxchg instruction using a
2055 // load linked/store conditional pair. we use the acquire/release
2056 // versions of these instructions so that we flush pending writes as
2057 // per Java semantics.
2058
2059 // n.b the x86 version assumes the old value to be compared against is
2060 // in rax and updates rax with the value located in memory if the
2061 // cmpxchg fails. we supply a register for the old value explicitly
2062
2063 // the aarch64 load linked/store conditional instructions do not
2064 // accept an offset. so, unlike x86, we must provide a plain register
2065 // to identify the memory word to be compared/exchanged rather than a
2066 // register+offset Address.
2067
2068 void MacroAssembler::cmpxchgptr(Register oldv, Register newv, Register addr, Register tmp,
2069 Label &succeed, Label *fail) {
2070 // oldv holds comparison value
2071 // newv holds value to write in exchange
2072 // addr identifies memory word to compare against/update
2073 if (UseLSE) {
2074 mov(tmp, oldv);
2075 casal(Assembler::xword, oldv, newv, addr);
2076 cmp(tmp, oldv);
2077 br(Assembler::EQ, succeed);
2078 membar(AnyAny);
2079 } else {
2080 Label retry_load, nope;
2081
2082 bind(retry_load);
2083 // flush and load exclusive from the memory location
2084 // and fail if it is not what we expect
2085 ldaxr(tmp, addr);
2086 cmp(tmp, oldv);
2087 br(Assembler::NE, nope);
2088 // if we store+flush with no intervening write tmp wil be zero
2089 stlxr(tmp, newv, addr);
2090 cbzw(tmp, succeed);
2091 // retry so we only ever return after a load fails to compare
2092 // ensures we don't return a stale value after a failed write.
2093 b(retry_load);
2094 // if the memory word differs we return it in oldv and signal a fail
2095 bind(nope);
2096 membar(AnyAny);
2097 mov(oldv, tmp);
2098 }
2099 if (fail)
2100 b(*fail);
2101 }
2102
2103 void MacroAssembler::cmpxchgw(Register oldv, Register newv, Register addr, Register tmp,
2104 Label &succeed, Label *fail) {
2105 // oldv holds comparison value
2106 // newv holds value to write in exchange
2107 // addr identifies memory word to compare against/update
2108 if (UseLSE) {
2109 mov(tmp, oldv);
2110 casal(Assembler::word, oldv, newv, addr);
2111 cmp(tmp, oldv);
2112 br(Assembler::EQ, succeed);
2113 membar(AnyAny);
2114 } else {
2115 Label retry_load, nope;
2116
2117 bind(retry_load);
2118 // flush and load exclusive from the memory location
2119 // and fail if it is not what we expect
2120 ldaxrw(tmp, addr);
2121 cmp(tmp, oldv);
2122 br(Assembler::NE, nope);
2123 // if we store+flush with no intervening write tmp wil be zero
2124 stlxrw(tmp, newv, addr);
2125 cbzw(tmp, succeed);
2126 // retry so we only ever return after a load fails to compare
2127 // ensures we don't return a stale value after a failed write.
2128 b(retry_load);
2129 // if the memory word differs we return it in oldv and signal a fail
2130 bind(nope);
2131 membar(AnyAny);
2132 mov(oldv, tmp);
2133 }
2134 if (fail)
2135 b(*fail);
2136 }
2137
2138 void MacroAssembler::cmpxchg(operand_size sz,
2139 Register oldv, Register newv, Register addr,
2140 bool acquire, bool release,
2141 Register tmp)
2142 {
2143 // oldv holds comparison value
2144 // newv holds value to write in exchange
2145 // addr identifies memory word to compare against/update
2146 // returns EQ == success, NE == failure
2147 if (UseLSE) {
2148 mov(tmp, oldv);
2149 compare_and_swap(tmp, newv, addr, sz, acquire?1:0, release?1:0);
2150 cmp(tmp, oldv);
2151 } else {
2152 Label retry_load, done;
2153
2154 bind(retry_load);
2155 // flush and load exclusive from the memory location
2156 // and fail if it is not what we expect
2157 cas_load(sz, tmp, addr, acquire?1:0);
2158 cmp(tmp, oldv);
2159 br(Assembler::NE, done);
2160 // if we store+flush with no intervening write tmp wil be zero
2161 cas_store(sz, tmp, newv, addr, release?1:0);
2162 cbnzw(tmp, retry_load);
2163 // if the memory word differs we return it in oldv and signal a fail
2164 bind(done);
2165 }
2166 }
2167
2168 static bool different(Register a, RegisterOrConstant b, Register c) {
2169 if (b.is_constant())
2170 return a != c;
2171 else
2172 return a != b.as_register() && a != c && b.as_register() != c;
2173 }
2174
2175 #define ATOMIC_OP(LDXR, OP, IOP, STXR) \
2176 void MacroAssembler::atomic_##OP(Register prev, RegisterOrConstant incr, Register addr) { \
2177 Register result = rscratch2; \
2178 if (prev->is_valid()) \
2179 result = different(prev, incr, addr) ? prev : rscratch2; \
2180 \
2181 Label retry_load; \
2182 bind(retry_load); \
2183 LDXR(result, addr); \
2184 OP(rscratch1, result, incr); \
2185 STXR(rscratch2, rscratch1, addr); \
|