1 /*
   2  * Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "asm/macroAssembler.inline.hpp"
  26 #include "code/codeBlob.hpp"
  27 #include "gc/z/zBarrier.inline.hpp"
  28 #include "gc/z/zBarrierSet.hpp"
  29 #include "gc/z/zBarrierSetAssembler.hpp"
  30 #include "gc/z/zBarrierSetRuntime.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #ifdef COMPILER1
  33 #include "c1/c1_LIRAssembler.hpp"
  34 #include "c1/c1_MacroAssembler.hpp"
  35 #include "gc/z/c1/zBarrierSetC1.hpp"
  36 #endif // COMPILER1
  37 
  38 #include "gc/z/zThreadLocalData.hpp"
  39 
  40 ZBarrierSetAssembler::ZBarrierSetAssembler() :
  41     _load_barrier_slow_stub(),
  42     _load_barrier_weak_slow_stub() {}
  43 
  44 #ifdef PRODUCT
  45 #define BLOCK_COMMENT(str) /* nothing */
  46 #else
  47 #define BLOCK_COMMENT(str) __ block_comment(str)
  48 #endif
  49 
  50 #undef __
  51 #define __ masm->
  52 
  53 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
  54                                    DecoratorSet decorators,
  55                                    BasicType type,
  56                                    Register dst,
  57                                    Address src,
  58                                    Register tmp1,
  59                                    Register tmp_thread) {
  60   if (!ZBarrierSet::barrier_needed(decorators, type)) {
  61     // Barrier not needed
  62     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
  63     return;
  64   }
  65 
  66   assert_different_registers(rscratch1, rscratch2, src.base());
  67   assert_different_registers(rscratch1, rscratch2, dst);
  68 
  69   RegSet savedRegs = RegSet::range(r0,r28) - RegSet::of(dst, rscratch1, rscratch2);
  70 
  71   Label done;
  72 
  73   // Load bad mask into scratch register.
  74   __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
  75   __ lea(rscratch2, src);
  76   __ ldr(dst, src);
  77 
  78   // Test reference against bad mask. If mask bad, then we need to fix it up.
  79   __ tst(dst, rscratch1);
  80   __ br(Assembler::EQ, done);
  81 
  82   __ enter();
  83 
  84   __ push(savedRegs, sp);
  85 
  86   if (c_rarg0 != dst) {
  87     __ mov(c_rarg0, dst);
  88   }
  89   __ mov(c_rarg1, rscratch2);
  90 
  91   int step = 4 * wordSize;
  92   __ mov(rscratch2, -step);
  93   __ sub(sp, sp, step);
  94 
  95   for (int i = 28; i >= 4; i -= 4) {
  96     __ st1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2),
  97         as_FloatRegister(i+3), __ T1D, Address(__ post(sp, rscratch2)));
  98   }
  99   __ st1(as_FloatRegister(0), as_FloatRegister(1), as_FloatRegister(2),
 100       as_FloatRegister(3), __ T1D, Address(sp));
 101 
 102   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 103 
 104   for (int i = 0; i <= 28; i += 4) {
 105     __ ld1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2),
 106         as_FloatRegister(i+3), __ T1D, Address(__ post(sp, step)));
 107   }
 108 
 109   // Make sure dst has the return value.
 110   if (dst != r0) {
 111     __ mov(dst, r0);
 112   }
 113 
 114   __ pop(savedRegs, sp);
 115   __ leave();
 116 
 117   __ bind(done);
 118 }
 119 
 120 #ifdef ASSERT
 121 
 122 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
 123                                         DecoratorSet decorators,
 124                                         BasicType type,
 125                                         Address dst,
 126                                         Register val,
 127                                         Register tmp1,
 128                                         Register tmp2) {
 129   // Verify value
 130   if (is_reference_type(type)) {
 131     // Note that src could be noreg, which means we
 132     // are storing null and can skip verification.
 133     if (val != noreg) {
 134       Label done;
 135 
 136       // tmp1 and tmp2 are often set to noreg.
 137       RegSet savedRegs = RegSet::of(rscratch1);
 138       __ push(savedRegs, sp);
 139 
 140       __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
 141       __ tst(val, rscratch1);
 142       __ br(Assembler::EQ, done);
 143       __ stop("Verify oop store failed");
 144       __ should_not_reach_here();
 145       __ bind(done);
 146       __ pop(savedRegs, sp);
 147     }
 148   }
 149 
 150   // Store value
 151   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);
 152 }
 153 
 154 #endif // ASSERT
 155 
 156 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
 157                                               DecoratorSet decorators,
 158                                               bool is_oop,
 159                                               Register src,
 160                                               Register dst,
 161                                               Register count,
 162                                               RegSet saved_regs) {
 163   if (!is_oop) {
 164     // Barrier not needed
 165     return;
 166   }
 167 
 168   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
 169 
 170   assert_different_registers(src, count, rscratch1);
 171 
 172   __ pusha();
 173 
 174   if (count == c_rarg0) {
 175     if (src == c_rarg1) {
 176       // exactly backwards!!
 177       __ mov(rscratch1, c_rarg0);
 178       __ mov(c_rarg0, c_rarg1);
 179       __ mov(c_rarg1, rscratch1);
 180     } else {
 181       __ mov(c_rarg1, count);
 182       __ mov(c_rarg0, src);
 183     }
 184   } else {
 185     __ mov(c_rarg0, src);
 186     __ mov(c_rarg1, count);
 187   }
 188 
 189   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);
 190 
 191   __ popa();
 192   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
 193 }
 194 
 195 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
 196                                                          Register jni_env,
 197                                                          Register robj,
 198                                                          Register tmp,
 199                                                          Label& slowpath) {
 200   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
 201 
 202   assert_different_registers(jni_env, robj, tmp);
 203 
 204   // Resolve jobject
 205   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);
 206 
 207   // The Address offset is too large to direct load - -784. Our range is +127, -128.
 208   __ mov(tmp, (long int)(in_bytes(ZThreadLocalData::address_bad_mask_offset()) -
 209       in_bytes(JavaThread::jni_environment_offset())));
 210   // Load address bad mask
 211   __ add(tmp, jni_env, tmp);
 212   __ ldr(tmp, Address(tmp));
 213 
 214   // Check address bad mask
 215   __ tst(robj, tmp);
 216   __ br(Assembler::NE, slowpath);
 217 
 218   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
 219 }
 220 
 221 #ifdef COMPILER1
 222 
 223 #undef __
 224 #define __ ce->masm()->
 225 
 226 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
 227                                                          LIR_Opr ref) const {
 228   assert_different_registers(rheapbase, rthread, ref->as_register());
 229 
 230   __ ldr(rheapbase, address_bad_mask_from_thread(rthread));
 231   __ tst(ref->as_register(), rheapbase);
 232 }
 233 
 234 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
 235                                                          ZLoadBarrierStubC1* stub) const {
 236   // Stub entry
 237   __ bind(*stub->entry());
 238 
 239   Register ref = stub->ref()->as_register();
 240   Register ref_addr = noreg;
 241   Register tmp = noreg;
 242 
 243   if (stub->tmp()->is_valid()) {
 244     // Load address into tmp register
 245     ce->leal(stub->ref_addr(), stub->tmp());
 246     ref_addr = tmp = stub->tmp()->as_pointer_register();
 247   } else {
 248     // Address already in register
 249     ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
 250   }
 251 
 252   assert_different_registers(ref, ref_addr, noreg);
 253 
 254   // Save r0 unless it is the result or tmp register
 255   // Set up SP to accomodate parameters and maybe r0..
 256   if (ref != r0 && tmp != r0) {
 257     __ sub(sp, sp, 32);
 258     __ str(r0, Address(sp, 16));
 259   } else {
 260     __ sub(sp, sp, 16);
 261   }
 262 
 263   // Setup arguments and call runtime stub
 264   ce->store_parameter(ref_addr, 1);
 265   ce->store_parameter(ref, 0);
 266 
 267   __ far_call(stub->runtime_stub());
 268 
 269   // Verify result
 270   __ verify_oop(r0, "Bad oop");
 271 
 272   // Move result into place
 273   if (ref != r0) {
 274     __ mov(ref, r0);
 275   }
 276 
 277   // Restore r0 unless it is the result or tmp register
 278   if (ref != r0 && tmp != r0) {
 279     __ ldr(r0, Address(sp, 16));
 280     __ add(sp, sp, 32);
 281   } else {
 282     __ add(sp, sp, 16);
 283   }
 284 
 285   // Stub exit
 286   __ b(*stub->continuation());
 287 }
 288 
 289 #undef __
 290 #define __ sasm->
 291 
 292 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
 293                                                                  DecoratorSet decorators) const {
 294   __ prologue("zgc_load_barrier stub", false);
 295 
 296   // We don't use push/pop_clobbered_registers() - we need to pull out the result from r0.
 297   for (int i = 0; i < 32; i +=2) {
 298     __ stpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ pre(sp,-16)));
 299   }
 300 
 301   RegSet saveRegs = RegSet::range(r0,r28) - RegSet::of(r0);
 302   __ push(saveRegs, sp);
 303 
 304   // Setup arguments
 305   __ load_parameter(0, c_rarg0);
 306   __ load_parameter(1, c_rarg1);
 307 
 308   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 309 
 310   __ pop(saveRegs, sp);
 311 
 312   for (int i = 30; i >0; i -=2) {
 313       __ ldpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ post(sp, 16)));
 314     }
 315 
 316   __ epilogue();
 317 }
 318 #endif // COMPILER1
 319 
 320 #undef __
 321 #define __ cgen->assembler()->
 322 
 323 // Generates a register specific stub for calling
 324 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 325 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 326 //
 327 // The raddr register serves as both input and output for this stub. When the stub is
 328 // called the raddr register contains the object field address (oop*) where the bad oop
 329 // was loaded from, which caused the slow path to be taken. On return from the stub the
 330 // raddr register contains the good/healed oop returned from
 331 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 332 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 333 static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) {
 334   // Don't generate stub for invalid registers
 335   if (raddr == zr || raddr == r29 || raddr == r30) {
 336     return NULL;
 337   }
 338 
 339   // Create stub name
 340   char name[64];
 341   const bool weak = (decorators & ON_WEAK_OOP_REF) != 0;
 342   os::snprintf(name, sizeof(name), "zgc_load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name());
 343 
 344   __ align(CodeEntryAlignment);
 345   StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode));
 346   address start = __ pc();
 347 
 348   // Save live registers
 349   RegSet savedRegs = RegSet::range(r0,r18) - RegSet::of(raddr);
 350 
 351   __ enter();
 352   __ push(savedRegs, sp);
 353 
 354   // Setup arguments
 355   if (raddr != c_rarg1) {
 356     __ mov(c_rarg1, raddr);
 357   }
 358 
 359   __ ldr(c_rarg0, Address(raddr));
 360 
 361   // Call barrier function
 362   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 363 
 364   // Move result returned in r0 to raddr, if needed
 365   if (raddr != r0) {
 366     __ mov(raddr, r0);
 367   }
 368 
 369   __ pop(savedRegs, sp);
 370   __ leave();
 371   __ ret(lr);
 372 
 373   return start;
 374 }
 375 
 376 #undef __
 377 
 378 static void barrier_stubs_init_inner(const char* label, const DecoratorSet decorators, address* stub) {
 379   const int nregs = 28;              // Exclude FP, XZR, SP from calculation.
 380   const int code_size = nregs * 254; // Rough estimate of code size
 381 
 382   ResourceMark rm;
 383 
 384   CodeBuffer buf(BufferBlob::create(label, code_size));
 385   StubCodeGenerator cgen(&buf);
 386 
 387   for (int i = 0; i < nregs; i++) {
 388     const Register reg = as_Register(i);
 389     stub[i] = generate_load_barrier_stub(&cgen, reg, decorators);
 390   }
 391 }
 392 
 393 void ZBarrierSetAssembler::barrier_stubs_init() {
 394   barrier_stubs_init_inner("zgc_load_barrier_stubs", ON_STRONG_OOP_REF, _load_barrier_slow_stub);
 395   barrier_stubs_init_inner("zgc_load_barrier_weak_stubs", ON_WEAK_OOP_REF, _load_barrier_weak_slow_stub);
 396 }
 397 
 398 address ZBarrierSetAssembler::load_barrier_slow_stub(Register reg) {
 399   return _load_barrier_slow_stub[reg->encoding()];
 400 }
 401 
 402 address ZBarrierSetAssembler::load_barrier_weak_slow_stub(Register reg) {
 403   return _load_barrier_weak_slow_stub[reg->encoding()];
 404 }