1 /*
   2  * Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "asm/macroAssembler.inline.hpp"
  26 #include "code/codeBlob.hpp"
  27 #include "gc/z/zBarrier.inline.hpp"
  28 #include "gc/z/zBarrierSet.hpp"
  29 #include "gc/z/zBarrierSetAssembler.hpp"
  30 #include "gc/z/zBarrierSetRuntime.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #ifdef COMPILER1
  33 #include "c1/c1_LIRAssembler.hpp"
  34 #include "c1/c1_MacroAssembler.hpp"
  35 #include "gc/z/c1/zBarrierSetC1.hpp"
  36 #endif // COMPILER1
  37 
  38 #include "gc/z/zThreadLocalData.hpp"
  39 
  40 ZBarrierSetAssembler::ZBarrierSetAssembler() :
  41     _load_barrier_slow_stub(),
  42     _load_barrier_weak_slow_stub() {}
  43 
  44 #ifdef PRODUCT
  45 #define BLOCK_COMMENT(str) /* nothing */
  46 #else
  47 #define BLOCK_COMMENT(str) __ block_comment(str)
  48 #endif
  49 
  50 #undef __
  51 #define __ masm->
  52 
  53 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
  54                                    DecoratorSet decorators,
  55                                    BasicType type,
  56                                    Register dst,
  57                                    Address src,
  58                                    Register tmp1,
  59                                    Register tmp_thread) {
  60   if (!ZBarrierSet::barrier_needed(decorators, type)) {
  61     // Barrier not needed
  62     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
  63     return;
  64   }
  65 
  66   // rscratch1 can be passed as src or dst, so don't use it.
  67   RegSet savedRegs = RegSet::of(rscratch2, rheapbase);
  68 
  69   Label done;
  70   assert_different_registers(rheapbase, rscratch2, dst);
  71   assert_different_registers(rheapbase, rscratch2, src.base());
  72 
  73   __ push(savedRegs, sp);
  74 
  75   // Load bad mask into scratch register.
  76   __ ldr(rheapbase, address_bad_mask_from_thread(rthread));
  77   __ lea(rscratch2, src);
  78   __ ldr(dst, src);
  79 
  80   // Test reference against bad mask. If mask bad, then we need to fix it up.
  81   __ tst(dst, rheapbase);
  82   __ br(Assembler::EQ, done);
  83 
  84   __ enter();
  85 
  86   __ push(RegSet::range(r0,r28) - RegSet::of(dst), sp);
  87 
  88   if (c_rarg0 != dst) {
  89     __ mov(c_rarg0, dst);
  90   }
  91   __ mov(c_rarg1, rscratch2);
  92 
  93   int step = 4 * wordSize;
  94   __ mov(rscratch1, -step);
  95   __ sub(sp, sp, step);
  96 
  97   for (int i = 28; i >= 4; i -= 4) {
  98     __ st1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2),
  99         as_FloatRegister(i+3), __ T1D, Address(__ post(sp, rscratch1)));
 100   }
 101 
 102   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 103 
 104   for (int i = 0; i <= 28; i += 4) {
 105     __ ld1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2),
 106         as_FloatRegister(i+3), __ T1D, Address(__ post(sp, step)));
 107   }
 108 
 109   // Make sure dst has the return value.
 110   if (dst != r0) {
 111     __ mov(dst, r0);
 112   }
 113 
 114   __ pop(RegSet::range(r0,r28) - RegSet::of(dst), sp);
 115   __ leave();
 116 
 117   __ bind(done);
 118 
 119   // Restore tmps
 120   __ pop(savedRegs, sp);
 121 }
 122 
 123 #ifdef ASSERT
 124 
 125 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
 126                                         DecoratorSet decorators,
 127                                         BasicType type,
 128                                         Address dst,
 129                                         Register val,
 130                                         Register tmp1,
 131                                         Register tmp2) {
 132   // Verify value
 133   if (type == T_OBJECT || type == T_ARRAY) {
 134     // Note that src could be noreg, which means we
 135     // are storing null and can skip verification.
 136     if (val != noreg) {
 137       Label done;
 138 
 139       // tmp1 and tmp2 are often set to noreg.
 140       RegSet savedRegs = RegSet::of(rscratch1);
 141       __ push(savedRegs, sp);
 142 
 143       __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
 144       __ tst(val, rscratch1);
 145       __ br(Assembler::EQ, done);
 146       __ stop("Verify oop store failed");
 147       __ should_not_reach_here();
 148       __ bind(done);
 149       __ pop(savedRegs, sp);
 150     }
 151   }
 152 
 153   // Store value
 154   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);
 155 }
 156 
 157 #endif // ASSERT
 158 
 159 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
 160                                               DecoratorSet decorators,
 161                                               bool is_oop,
 162                                               Register src,
 163                                               Register dst,
 164                                               Register count,
 165                                               RegSet saved_regs) {
 166   if (!is_oop) {
 167     // Barrier not needed
 168     return;
 169   }
 170 
 171   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
 172 
 173   assert_different_registers(src, count, rscratch1);
 174 
 175   __ pusha();
 176 
 177   if (count == c_rarg0) {
 178     if (src == c_rarg1) {
 179       // exactly backwards!!
 180       __ mov(rscratch1, c_rarg0);
 181       __ mov(c_rarg0, c_rarg1);
 182       __ mov(c_rarg1, rscratch1);
 183     } else {
 184       __ mov(c_rarg1, count);
 185       __ mov(c_rarg0, src);
 186     }
 187   } else {
 188     __ mov(c_rarg0, src);
 189     __ mov(c_rarg1, count);
 190   }
 191 
 192   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);
 193 
 194   __ popa();
 195   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
 196 }
 197 
 198 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
 199                                                          Register jni_env,
 200                                                          Register robj,
 201                                                          Register tmp,
 202                                                          Label& slowpath) {
 203   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
 204 
 205   assert_different_registers(jni_env, robj, tmp);
 206 
 207   // Resolve jobject
 208   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);
 209 
 210   // The Address offset is too large to direct load - -784. Our range is +127, -128.
 211   __ mov(tmp, (long int)(in_bytes(ZThreadLocalData::address_bad_mask_offset()) -
 212       in_bytes(JavaThread::jni_environment_offset())));
 213   // Load address bad mask
 214   __ add(tmp, jni_env, tmp);
 215   __ ldr(tmp, Address(tmp));
 216 
 217   // Check address bad mask
 218   __ tst(robj, tmp);
 219   __ br(Assembler::NE, slowpath);
 220 
 221   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
 222 }
 223 
 224 #ifdef COMPILER1
 225 
 226 #undef __
 227 #define __ ce->masm()->
 228 
 229 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
 230                                                          LIR_Opr ref) const {
 231   assert_different_registers(rheapbase, rthread, ref->as_register());
 232 
 233   __ ldr(rheapbase, address_bad_mask_from_thread(rthread));
 234   __ tst(ref->as_register(), rheapbase);
 235 }
 236 
 237 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
 238                                                          ZLoadBarrierStubC1* stub) const {
 239   // Stub entry
 240   __ bind(*stub->entry());
 241 
 242   Register ref = stub->ref()->as_register();
 243   Register ref_addr = noreg;
 244   Register tmp = noreg;
 245 
 246   if (stub->tmp()->is_valid()) {
 247     // Load address into tmp register
 248     ce->leal(stub->ref_addr(), stub->tmp());
 249     ref_addr = tmp = stub->tmp()->as_pointer_register();
 250   } else {
 251     // Address already in register
 252     ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
 253   }
 254 
 255   assert_different_registers(ref, ref_addr, noreg);
 256 
 257   // Save r0 unless it is the result or tmp register
 258   // Set up SP to accomodate parameters and maybe r0..
 259   if (ref != r0 && tmp != r0) {
 260     __ sub(sp, sp, 32);
 261     __ str(r0, Address(sp, 16));
 262   } else {
 263     __ sub(sp, sp, 16);
 264   }
 265 
 266   // Setup arguments and call runtime stub
 267   ce->store_parameter(ref_addr, 1);
 268   ce->store_parameter(ref, 0);
 269 
 270   __ far_call(stub->runtime_stub());
 271 
 272   // Verify result
 273   __ verify_oop(r0, "Bad oop");
 274 
 275   // Move result into place
 276   if (ref != r0) {
 277     __ mov(ref, r0);
 278   }
 279 
 280   // Restore r0 unless it is the result or tmp register
 281   if (ref != r0 && tmp != r0) {
 282     __ ldr(r0, Address(sp, 16));
 283     __ add(sp, sp, 32);
 284   } else {
 285     __ add(sp, sp, 16);
 286   }
 287 
 288   // Stub exit
 289   __ b(*stub->continuation());
 290 }
 291 
 292 #undef __
 293 #define __ sasm->
 294 
 295 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
 296                                                                  DecoratorSet decorators) const {
 297   __ prologue("zgc_load_barrier stub", false);
 298 
 299   // We don't use push/pop_clobbered_registers() - we need to pull out the result from r0.
 300   for (int i = 0; i < 32; i +=2) {
 301     __ stpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ pre(sp,-16)));
 302   }
 303 
 304   RegSet saveRegs = RegSet::range(r0,r28) - RegSet::of(r0);
 305   __ push(saveRegs, sp);
 306 
 307   // Setup arguments
 308   __ load_parameter(0, c_rarg0);
 309   __ load_parameter(1, c_rarg1);
 310 
 311   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 312 
 313   __ pop(saveRegs, sp);
 314 
 315   for (int i = 30; i >0; i -=2) {
 316       __ ldpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ post(sp, 16)));
 317     }
 318 
 319   __ epilogue();
 320 }
 321 #endif // COMPILER1
 322 
 323 #undef __
 324 #define __ cgen->assembler()->
 325 
 326 // Generates a register specific stub for calling
 327 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 328 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 329 //
 330 // The raddr register serves as both input and output for this stub. When the stub is
 331 // called the raddr register contains the object field address (oop*) where the bad oop
 332 // was loaded from, which caused the slow path to be taken. On return from the stub the
 333 // raddr register contains the good/healed oop returned from
 334 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 335 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 336 static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) {
 337   // Don't generate stub for invalid registers
 338   if (raddr == zr || raddr == r29 || raddr == r30) {
 339     return NULL;
 340   }
 341 
 342   // Create stub name
 343   char name[64];
 344   const bool weak = (decorators & ON_WEAK_OOP_REF) != 0;
 345   os::snprintf(name, sizeof(name), "zgc_load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name());
 346 
 347   __ align(CodeEntryAlignment);
 348   StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode));
 349   address start = __ pc();
 350 
 351   // Save live registers
 352   RegSet savedRegs = RegSet::range(r0,r18) - RegSet::of(raddr);
 353 
 354   __ enter();
 355   __ push(savedRegs, sp);
 356 
 357   // Setup arguments
 358   if (raddr != c_rarg1) {
 359     __ mov(c_rarg1, raddr);
 360   }
 361 
 362   __ ldr(c_rarg0, Address(raddr));
 363 
 364   // Call barrier function
 365   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 366 
 367   // Move result returned in r0 to raddr, if needed
 368   if (raddr != r0) {
 369     __ mov(raddr, r0);
 370   }
 371 
 372   __ pop(savedRegs, sp);
 373   __ leave();
 374   __ ret(lr);
 375 
 376   return start;
 377 }
 378 
 379 #undef __
 380 
 381 static void barrier_stubs_init_inner(const char* label, const DecoratorSet decorators, address* stub) {
 382   const int nregs = 28;              // Exclude FP, XZR, SP from calculation.
 383   const int code_size = nregs * 254; // Rough estimate of code size
 384 
 385   ResourceMark rm;
 386 
 387   CodeBuffer buf(BufferBlob::create(label, code_size));
 388   StubCodeGenerator cgen(&buf);
 389 
 390   for (int i = 0; i < nregs; i++) {
 391     const Register reg = as_Register(i);
 392     stub[i] = generate_load_barrier_stub(&cgen, reg, decorators);
 393   }
 394 }
 395 
 396 void ZBarrierSetAssembler::barrier_stubs_init() {
 397   barrier_stubs_init_inner("zgc_load_barrier_stubs", ON_STRONG_OOP_REF, _load_barrier_slow_stub);
 398   barrier_stubs_init_inner("zgc_load_barrier_weak_stubs", ON_WEAK_OOP_REF, _load_barrier_weak_slow_stub);
 399 }
 400 
 401 address ZBarrierSetAssembler::load_barrier_slow_stub(Register reg) {
 402   return _load_barrier_slow_stub[reg->encoding()];
 403 }
 404 
 405 address ZBarrierSetAssembler::load_barrier_weak_slow_stub(Register reg) {
 406   return _load_barrier_weak_slow_stub[reg->encoding()];
 407 }