1 /* 2 * Copyright (c) 2018, Red Hat, Inc. All rights reserved. 3 * 4 * This code is free software; you can redistribute it and/or modify it 5 * under the terms of the GNU General Public License version 2 only, as 6 * published by the Free Software Foundation. 7 * 8 * This code is distributed in the hope that it will be useful, but WITHOUT 9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 11 * version 2 for more details (a copy is included in the LICENSE file that 12 * accompanied this code). 13 * 14 * You should have received a copy of the GNU General Public License version 15 * 2 along with this work; if not, write to the Free Software Foundation, 16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 17 * 18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 19 * or visit www.oracle.com if you need additional information or have any 20 * questions. 21 * 22 */ 23 24 #include "precompiled.hpp" 25 #include "c1/c1_MacroAssembler.hpp" 26 #include "c1/c1_LIRAssembler.hpp" 27 #include "macroAssembler_x86.hpp" 28 #include "shenandoahBarrierSetAssembler_x86.hpp" 29 #include "gc_implementation/shenandoah/shenandoahBarrierSet.hpp" 30 #include "gc_implementation/shenandoah/shenandoahBarrierSetC1.hpp" 31 #include "gc_implementation/shenandoah/shenandoahForwarding.hpp" 32 #include "gc_implementation/shenandoah/shenandoahHeap.hpp" 33 #include "gc_implementation/shenandoah/shenandoahHeapRegion.hpp" 34 #include "gc_implementation/shenandoah/shenandoahRuntime.hpp" 35 #include "runtime/stubCodeGenerator.hpp" 36 37 ShenandoahBarrierSetAssembler* ShenandoahBarrierSetAssembler::bsasm() { 38 return ShenandoahBarrierSet::barrier_set()->bsasm(); 39 } 40 41 #define __ masm-> 42 43 void ShenandoahBarrierSetAssembler::resolve_forward_pointer(MacroAssembler* masm, Register dst, Register tmp) { 44 assert(ShenandoahCASBarrier, "should be enabled"); 45 Label is_null; 46 __ testptr(dst, dst); 47 __ jcc(Assembler::zero, is_null); 48 resolve_forward_pointer_not_null(masm, dst, tmp); 49 __ bind(is_null); 50 } 51 52 void ShenandoahBarrierSetAssembler::resolve_forward_pointer_not_null(MacroAssembler* masm, Register dst, Register tmp) { 53 assert(ShenandoahCASBarrier || ShenandoahLoadRefBarrier, "should be enabled"); 54 // The below loads the mark word, checks if the lowest two bits are 55 // set, and if so, clear the lowest two bits and copy the result 56 // to dst. Otherwise it leaves dst alone. 57 // Implementing this is surprisingly awkward. I do it here by: 58 // - Inverting the mark word 59 // - Test lowest two bits == 0 60 // - If so, set the lowest two bits 61 // - Invert the result back, and copy to dst 62 63 bool borrow_reg = (tmp == noreg); 64 if (borrow_reg) { 65 // No free registers available. Make one useful. 66 tmp = LP64_ONLY(rscratch1) NOT_LP64(rdx); 67 if (tmp == dst) { 68 tmp = LP64_ONLY(rscratch2) NOT_LP64(rcx); 69 } 70 __ push(tmp); 71 } 72 73 assert_different_registers(dst, tmp); 74 75 Label done; 76 __ movptr(tmp, Address(dst, oopDesc::mark_offset_in_bytes())); 77 __ notptr(tmp); 78 __ testb(tmp, markOopDesc::marked_value); 79 __ jccb(Assembler::notZero, done); 80 __ orptr(tmp, markOopDesc::marked_value); 81 __ notptr(tmp); 82 __ mov(dst, tmp); 83 __ bind(done); 84 85 if (borrow_reg) { 86 __ pop(tmp); 87 } 88 } 89 90 void ShenandoahBarrierSetAssembler::load_reference_barrier_not_null(MacroAssembler* masm, Register dst) { 91 assert(ShenandoahLoadRefBarrier, "Should be enabled"); 92 93 Label done; 94 95 #ifdef _LP64 96 Register thread = r15_thread; 97 #else 98 Register thread = rcx; 99 if (thread == dst) { 100 thread = rbx; 101 } 102 __ push(thread); 103 __ get_thread(thread); 104 #endif 105 assert_different_registers(dst, thread); 106 107 Address gc_state(thread, in_bytes(JavaThread::gc_state_offset())); 108 __ testb(gc_state, ShenandoahHeap::HAS_FORWARDED); 109 __ jcc(Assembler::zero, done); 110 111 { 112 __ save_vector_registers(); 113 114 __ subptr(rsp, LP64_ONLY(16) NOT_LP64(8) * wordSize); 115 116 __ movptr(Address(rsp, 0 * wordSize), rax); 117 __ movptr(Address(rsp, 1 * wordSize), rcx); 118 __ movptr(Address(rsp, 2 * wordSize), rdx); 119 __ movptr(Address(rsp, 3 * wordSize), rbx); 120 // skip rsp 121 __ movptr(Address(rsp, 5 * wordSize), rbp); 122 __ movptr(Address(rsp, 6 * wordSize), rsi); 123 __ movptr(Address(rsp, 7 * wordSize), rdi); 124 #ifdef _LP64 125 __ movptr(Address(rsp, 8 * wordSize), r8); 126 __ movptr(Address(rsp, 9 * wordSize), r9); 127 __ movptr(Address(rsp, 10 * wordSize), r10); 128 __ movptr(Address(rsp, 11 * wordSize), r11); 129 __ movptr(Address(rsp, 12 * wordSize), r12); 130 __ movptr(Address(rsp, 13 * wordSize), r13); 131 __ movptr(Address(rsp, 14 * wordSize), r14); 132 __ movptr(Address(rsp, 15 * wordSize), r15); 133 #endif 134 } 135 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_interpreter), dst); 136 { 137 #ifdef _LP64 138 __ movptr(r15, Address(rsp, 15 * wordSize)); 139 __ movptr(r14, Address(rsp, 14 * wordSize)); 140 __ movptr(r13, Address(rsp, 13 * wordSize)); 141 __ movptr(r12, Address(rsp, 12 * wordSize)); 142 __ movptr(r11, Address(rsp, 11 * wordSize)); 143 __ movptr(r10, Address(rsp, 10 * wordSize)); 144 __ movptr(r9, Address(rsp, 9 * wordSize)); 145 __ movptr(r8, Address(rsp, 8 * wordSize)); 146 #endif 147 __ movptr(rdi, Address(rsp, 7 * wordSize)); 148 __ movptr(rsi, Address(rsp, 6 * wordSize)); 149 __ movptr(rbp, Address(rsp, 5 * wordSize)); 150 // skip rsp 151 __ movptr(rbx, Address(rsp, 3 * wordSize)); 152 __ movptr(rdx, Address(rsp, 2 * wordSize)); 153 __ movptr(rcx, Address(rsp, 1 * wordSize)); 154 if (dst != rax) { 155 __ movptr(dst, rax); 156 __ movptr(rax, Address(rsp, 0 * wordSize)); 157 } 158 __ addptr(rsp, LP64_ONLY(16) NOT_LP64(8) * wordSize); 159 160 __ restore_vector_registers(); 161 } 162 __ bind(done); 163 164 #ifndef _LP64 165 __ pop(thread); 166 #endif 167 } 168 169 void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm, Register dst) { 170 if (ShenandoahLoadRefBarrier) { 171 Label done; 172 __ testptr(dst, dst); 173 __ jcc(Assembler::zero, done); 174 load_reference_barrier_not_null(masm, dst); 175 __ bind(done); 176 } 177 } 178 179 // Special Shenandoah CAS implementation that handles false negatives 180 // due to concurrent evacuation. 181 void ShenandoahBarrierSetAssembler::cmpxchg_oop(MacroAssembler* masm, 182 Register res, Address addr, Register oldval, Register newval, 183 bool exchange, Register tmp1, Register tmp2) { 184 assert(ShenandoahCASBarrier, "Should only be used when CAS barrier is enabled"); 185 assert(oldval == rax, "must be in rax for implicit use in cmpxchg"); 186 187 Label retry, done; 188 189 // Remember oldval for retry logic below 190 #ifdef _LP64 191 if (UseCompressedOops) { 192 __ movl(tmp1, oldval); 193 } else 194 #endif 195 { 196 __ movptr(tmp1, oldval); 197 } 198 199 // Step 1. Try to CAS with given arguments. If successful, then we are done, 200 // and can safely return. 201 if (os::is_MP()) __ lock(); 202 #ifdef _LP64 203 if (UseCompressedOops) { 204 __ cmpxchgl(newval, addr); 205 } else 206 #endif 207 { 208 __ cmpxchgptr(newval, addr); 209 } 210 __ jcc(Assembler::equal, done, true); 211 212 // Step 2. CAS had failed. This may be a false negative. 213 // 214 // The trouble comes when we compare the to-space pointer with the from-space 215 // pointer to the same object. To resolve this, it will suffice to resolve both 216 // oldval and the value from memory -- this will give both to-space pointers. 217 // If they mismatch, then it was a legitimate failure. 218 // 219 #ifdef _LP64 220 if (UseCompressedOops) { 221 __ decode_heap_oop(tmp1); 222 } 223 #endif 224 resolve_forward_pointer(masm, tmp1); 225 226 #ifdef _LP64 227 if (UseCompressedOops) { 228 __ movl(tmp2, oldval); 229 __ decode_heap_oop(tmp2); 230 } else 231 #endif 232 { 233 __ movptr(tmp2, oldval); 234 } 235 resolve_forward_pointer(masm, tmp2); 236 237 __ cmpptr(tmp1, tmp2); 238 __ jcc(Assembler::notEqual, done, true); 239 240 // Step 3. Try to CAS again with resolved to-space pointers. 241 // 242 // Corner case: it may happen that somebody stored the from-space pointer 243 // to memory while we were preparing for retry. Therefore, we can fail again 244 // on retry, and so need to do this in loop, always resolving the failure 245 // witness. 246 __ bind(retry); 247 if (os::is_MP()) __ lock(); 248 #ifdef _LP64 249 if (UseCompressedOops) { 250 __ cmpxchgl(newval, addr); 251 } else 252 #endif 253 { 254 __ cmpxchgptr(newval, addr); 255 } 256 __ jcc(Assembler::equal, done, true); 257 258 #ifdef _LP64 259 if (UseCompressedOops) { 260 __ movl(tmp2, oldval); 261 __ decode_heap_oop(tmp2); 262 } else 263 #endif 264 { 265 __ movptr(tmp2, oldval); 266 } 267 resolve_forward_pointer(masm, tmp2); 268 269 __ cmpptr(tmp1, tmp2); 270 __ jcc(Assembler::equal, retry, true); 271 272 // Step 4. If we need a boolean result out of CAS, check the flag again, 273 // and promote the result. Note that we handle the flag from both the CAS 274 // itself and from the retry loop. 275 __ bind(done); 276 if (!exchange) { 277 assert(res != NULL, "need result register"); 278 #ifdef _LP64 279 __ setb(Assembler::equal, res); 280 __ movzbl(res, res); 281 #else 282 // Need something else to clean the result, because some registers 283 // do not have byte encoding that movzbl wants. Cannot do the xor first, 284 // because it modifies the flags. 285 Label res_non_zero; 286 __ movptr(res, 1); 287 __ jcc(Assembler::equal, res_non_zero, true); 288 __ xorptr(res, res); 289 __ bind(res_non_zero); 290 #endif 291 } 292 } 293 294 #undef __ 295 296 #ifdef COMPILER1 297 298 #define __ ce->masm()-> 299 300 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) { 301 __ bind(*stub->entry()); 302 303 Label done; 304 Register obj = stub->obj()->as_register(); 305 Register res = stub->result()->as_register(); 306 307 if (res != obj) { 308 __ mov(res, obj); 309 } 310 311 // Check for null. 312 if (stub->needs_null_check()) { 313 __ testptr(res, res); 314 __ jcc(Assembler::zero, done); 315 } 316 317 load_reference_barrier_not_null(ce->masm(), res); 318 319 __ bind(done); 320 __ jmp(*stub->continuation()); 321 } 322 323 #undef __ 324 325 #endif // COMPILER1