1 /*
   2  * Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "asm/macroAssembler.inline.hpp"
  26 #include "code/codeBlob.hpp"
  27 #include "gc/z/zBarrier.inline.hpp"
  28 #include "gc/z/zBarrierSet.hpp"
  29 #include "gc/z/zBarrierSetAssembler.hpp"
  30 #include "gc/z/zBarrierSetRuntime.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #ifdef COMPILER1
  33 #include "c1/c1_LIRAssembler.hpp"
  34 #include "c1/c1_MacroAssembler.hpp"
  35 #include "gc/z/c1/zBarrierSetC1.hpp"
  36 #endif // COMPILER1
  37 
  38 #include "gc/z/zThreadLocalData.hpp"
  39 
  40 ZBarrierSetAssembler::ZBarrierSetAssembler() :
  41     _load_barrier_slow_stub(),
  42     _load_barrier_weak_slow_stub() {}
  43 
  44 #ifdef PRODUCT
  45 #define BLOCK_COMMENT(str) /* nothing */
  46 #else
  47 #define BLOCK_COMMENT(str) __ block_comment(str)
  48 #endif
  49 
  50 #undef __
  51 #define __ masm->
  52 
  53 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
  54                                    DecoratorSet decorators,
  55                                    BasicType type,
  56                                    Register dst,
  57                                    Address src,
  58                                    Register tmp1,
  59                                    Register tmp_thread) {
  60   if (!ZBarrierSet::barrier_needed(decorators, type)) {
  61     // Barrier not needed
  62     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
  63     return;
  64   }
  65 
  66   // rscratch1 can be passed as src or dst, so don't use it.
  67   RegSet savedRegs = RegSet::of(rscratch2, rheapbase);
  68 
  69   Label done;
  70   assert_different_registers(rheapbase, rscratch2, dst);
  71   assert_different_registers(rheapbase, rscratch2, src.base());
  72 
  73   __ push(savedRegs, sp);
  74 
  75   // Load bad mask into scratch register.
  76   __ ldr(rheapbase, address_bad_mask_from_thread(rthread));
  77   __ lea(rscratch2, src);
  78   __ ldr(dst, src);
  79 
  80   // Test reference against bad mask. If mask bad, then we need to fix it up.
  81   __ tst(dst, rheapbase);
  82   __ br(Assembler::EQ, done);
  83 
  84   __ enter();
  85 
  86   __ push(RegSet::range(r0,r28) - RegSet::of(dst), sp);
  87 
  88   if (c_rarg0 != dst) {
  89     __ mov(c_rarg0, dst);
  90   }
  91   __ mov(c_rarg1, rscratch2);
  92 
  93   int step = 4 * wordSize;
  94   __ mov(rscratch1, -step);
  95   __ sub(sp, sp, step);
  96 
  97   for (int i = 28; i >= 4; i -= 4) {
  98     __ st1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2),
  99         as_FloatRegister(i+3), __ T1D, Address(__ post(sp, rscratch1)));
 100   }
 101 
 102   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 103 
 104   for (int i = 0; i <= 28; i += 4) {
 105     __ ld1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2),
 106         as_FloatRegister(i+3), __ T1D, Address(__ post(sp, step)));
 107   }
 108 
 109   // Make sure dst has the return value.
 110   if (dst != r0) {
 111     __ mov(dst, r0);
 112   }
 113 
 114   __ pop(RegSet::range(r0,r28) - RegSet::of(dst), sp);
 115   __ leave();
 116 
 117   __ bind(done);
 118 
 119   // Restore tmps
 120   __ pop(savedRegs, sp);
 121 }
 122 
 123 #ifdef ASSERT
 124 
 125 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
 126                                         DecoratorSet decorators,
 127                                         BasicType type,
 128                                         Address dst,
 129                                         Register val,
 130                                         Register tmp1,
 131                                         Register tmp2,
 132                                         Register tmp3) {
 133   // Verify value
 134   if (type == T_OBJECT || type == T_ARRAY) {
 135     // Note that src could be noreg, which means we
 136     // are storing null and can skip verification.
 137     if (val != noreg) {
 138       Label done;
 139 
 140       // tmp1 and tmp2 are often set to noreg.
 141       RegSet savedRegs = RegSet::of(rscratch1);
 142       __ push(savedRegs, sp);
 143 
 144       __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
 145       __ tst(val, rscratch1);
 146       __ br(Assembler::EQ, done);
 147       __ stop("Verify oop store failed");
 148       __ should_not_reach_here();
 149       __ bind(done);
 150       __ pop(savedRegs, sp);
 151     }
 152   }
 153 
 154   // Store value
 155   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);
 156 }
 157 
 158 #endif // ASSERT
 159 
 160 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
 161                                               DecoratorSet decorators,
 162                                               bool is_oop,
 163                                               Register src,
 164                                               Register dst,
 165                                               Register count,
 166                                               RegSet saved_regs) {
 167   if (!is_oop) {
 168     // Barrier not needed
 169     return;
 170   }
 171 
 172   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
 173 
 174   assert_different_registers(src, count, rscratch1);
 175 
 176   __ pusha();
 177 
 178   if (count == c_rarg0) {
 179     if (src == c_rarg1) {
 180       // exactly backwards!!
 181       __ mov(rscratch1, c_rarg0);
 182       __ mov(c_rarg0, c_rarg1);
 183       __ mov(c_rarg1, rscratch1);
 184     } else {
 185       __ mov(c_rarg1, count);
 186       __ mov(c_rarg0, src);
 187     }
 188   } else {
 189     __ mov(c_rarg0, src);
 190     __ mov(c_rarg1, count);
 191   }
 192 
 193   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);
 194 
 195   __ popa();
 196   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
 197 }
 198 
 199 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
 200                                                          Register jni_env,
 201                                                          Register robj,
 202                                                          Register tmp,
 203                                                          Label& slowpath) {
 204   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
 205 
 206   assert_different_registers(jni_env, robj, tmp);
 207 
 208   // Resolve jobject
 209   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);
 210 
 211   // The Address offset is too large to direct load - -784. Our range is +127, -128.
 212   __ mov(tmp, (long int)(in_bytes(ZThreadLocalData::address_bad_mask_offset()) -
 213       in_bytes(JavaThread::jni_environment_offset())));
 214   // Load address bad mask
 215   __ add(tmp, jni_env, tmp);
 216   __ ldr(tmp, Address(tmp));
 217 
 218   // Check address bad mask
 219   __ tst(robj, tmp);
 220   __ br(Assembler::NE, slowpath);
 221 
 222   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
 223 }
 224 
 225 #ifdef COMPILER1
 226 
 227 #undef __
 228 #define __ ce->masm()->
 229 
 230 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
 231                                                          LIR_Opr ref) const {
 232   assert_different_registers(rheapbase, rthread, ref->as_register());
 233 
 234   __ ldr(rheapbase, address_bad_mask_from_thread(rthread));
 235   __ tst(ref->as_register(), rheapbase);
 236 }
 237 
 238 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
 239                                                          ZLoadBarrierStubC1* stub) const {
 240   // Stub entry
 241   __ bind(*stub->entry());
 242 
 243   Register ref = stub->ref()->as_register();
 244   Register ref_addr = noreg;
 245   Register tmp = noreg;
 246 
 247   if (stub->tmp()->is_valid()) {
 248     // Load address into tmp register
 249     ce->leal(stub->ref_addr(), stub->tmp());
 250     ref_addr = tmp = stub->tmp()->as_pointer_register();
 251   } else {
 252     // Address already in register
 253     ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
 254   }
 255 
 256   assert_different_registers(ref, ref_addr, noreg);
 257 
 258   // Save r0 unless it is the result or tmp register
 259   // Set up SP to accomodate parameters and maybe r0..
 260   if (ref != r0 && tmp != r0) {
 261     __ sub(sp, sp, 32);
 262     __ str(r0, Address(sp, 16));
 263   } else {
 264     __ sub(sp, sp, 16);
 265   }
 266 
 267   // Setup arguments and call runtime stub
 268   ce->store_parameter(ref_addr, 1);
 269   ce->store_parameter(ref, 0);
 270 
 271   __ far_call(stub->runtime_stub());
 272 
 273   // Verify result
 274   __ verify_oop(r0, "Bad oop");
 275 
 276   // Move result into place
 277   if (ref != r0) {
 278     __ mov(ref, r0);
 279   }
 280 
 281   // Restore r0 unless it is the result or tmp register
 282   if (ref != r0 && tmp != r0) {
 283     __ ldr(r0, Address(sp, 16));
 284     __ add(sp, sp, 32);
 285   } else {
 286     __ add(sp, sp, 16);
 287   }
 288 
 289   // Stub exit
 290   __ b(*stub->continuation());
 291 }
 292 
 293 #undef __
 294 #define __ sasm->
 295 
 296 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
 297                                                                  DecoratorSet decorators) const {
 298   __ prologue("zgc_load_barrier stub", false);
 299 
 300   // We don't use push/pop_clobbered_registers() - we need to pull out the result from r0.
 301   for (int i = 0; i < 32; i +=2) {
 302     __ stpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ pre(sp,-16)));
 303   }
 304 
 305   RegSet saveRegs = RegSet::range(r0,r28) - RegSet::of(r0);
 306   __ push(saveRegs, sp);
 307 
 308   // Setup arguments
 309   __ load_parameter(0, c_rarg0);
 310   __ load_parameter(1, c_rarg1);
 311 
 312   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 313 
 314   __ pop(saveRegs, sp);
 315 
 316   for (int i = 30; i >0; i -=2) {
 317       __ ldpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ post(sp, 16)));
 318     }
 319 
 320   __ epilogue();
 321 }
 322 #endif // COMPILER1
 323 
 324 #undef __
 325 #define __ cgen->assembler()->
 326 
 327 // Generates a register specific stub for calling
 328 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 329 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 330 //
 331 // The raddr register serves as both input and output for this stub. When the stub is
 332 // called the raddr register contains the object field address (oop*) where the bad oop
 333 // was loaded from, which caused the slow path to be taken. On return from the stub the
 334 // raddr register contains the good/healed oop returned from
 335 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 336 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 337 static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) {
 338   // Don't generate stub for invalid registers
 339   if (raddr == zr || raddr == r29 || raddr == r30) {
 340     return NULL;
 341   }
 342 
 343   // Create stub name
 344   char name[64];
 345   const bool weak = (decorators & ON_WEAK_OOP_REF) != 0;
 346   os::snprintf(name, sizeof(name), "zgc_load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name());
 347 
 348   __ align(CodeEntryAlignment);
 349   StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode));
 350   address start = __ pc();
 351 
 352   // Save live registers
 353   RegSet savedRegs = RegSet::range(r0,r18) - RegSet::of(raddr);
 354 
 355   __ enter();
 356   __ push(savedRegs, sp);
 357 
 358   // Setup arguments
 359   if (raddr != c_rarg1) {
 360     __ mov(c_rarg1, raddr);
 361   }
 362 
 363   __ ldr(c_rarg0, Address(raddr));
 364 
 365   // Call barrier function
 366   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 367 
 368   // Move result returned in r0 to raddr, if needed
 369   if (raddr != r0) {
 370     __ mov(raddr, r0);
 371   }
 372 
 373   __ pop(savedRegs, sp);
 374   __ leave();
 375   __ ret(lr);
 376 
 377   return start;
 378 }
 379 
 380 #undef __
 381 
 382 static void barrier_stubs_init_inner(const char* label, const DecoratorSet decorators, address* stub) {
 383   const int nregs = 28;              // Exclude FP, XZR, SP from calculation.
 384   const int code_size = nregs * 254; // Rough estimate of code size
 385 
 386   ResourceMark rm;
 387 
 388   CodeBuffer buf(BufferBlob::create(label, code_size));
 389   StubCodeGenerator cgen(&buf);
 390 
 391   for (int i = 0; i < nregs; i++) {
 392     const Register reg = as_Register(i);
 393     stub[i] = generate_load_barrier_stub(&cgen, reg, decorators);
 394   }
 395 }
 396 
 397 void ZBarrierSetAssembler::barrier_stubs_init() {
 398   barrier_stubs_init_inner("zgc_load_barrier_stubs", ON_STRONG_OOP_REF, _load_barrier_slow_stub);
 399   barrier_stubs_init_inner("zgc_load_barrier_weak_stubs", ON_WEAK_OOP_REF, _load_barrier_weak_slow_stub);
 400 }
 401 
 402 address ZBarrierSetAssembler::load_barrier_slow_stub(Register reg) {
 403   return _load_barrier_slow_stub[reg->encoding()];
 404 }
 405 
 406 address ZBarrierSetAssembler::load_barrier_weak_slow_stub(Register reg) {
 407   return _load_barrier_weak_slow_stub[reg->encoding()];
 408 }