1 /*
   2  * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "asm/macroAssembler.inline.hpp"
  26 #include "gc/z/zBarrier.inline.hpp"
  27 #include "gc/z/zBarrierSet.hpp"
  28 #include "gc/z/zBarrierSetAssembler.hpp"
  29 #include "gc/z/zBarrierSetRuntime.hpp"
  30 #include "runtime/stubCodeGenerator.hpp"
  31 #include "utilities/macros.hpp"
  32 #ifdef COMPILER1
  33 #include "c1/c1_LIRAssembler.hpp"
  34 #include "c1/c1_MacroAssembler.hpp"
  35 #include "gc/z/c1/zBarrierSetC1.hpp"
  36 #endif // COMPILER1
  37 
  38 #undef __
  39 #define __ masm->
  40 
  41 #ifdef PRODUCT
  42 #define BLOCK_COMMENT(str) /* nothing */
  43 #else
  44 #define BLOCK_COMMENT(str) __ block_comment(str)
  45 #endif
  46 
  47 static void call_vm(MacroAssembler* masm,
  48                     address entry_point,
  49                     Register arg0,
  50                     Register arg1) {
  51   // Setup arguments
  52   if (arg1 == c_rarg0) {
  53     if (arg0 == c_rarg1) {
  54       __ xchgptr(c_rarg1, c_rarg0);
  55     } else {
  56       __ movptr(c_rarg1, arg1);
  57       __ movptr(c_rarg0, arg0);
  58     }
  59   } else {
  60     if (arg0 != c_rarg0) {
  61       __ movptr(c_rarg0, arg0);
  62     }
  63     if (arg1 != c_rarg1) {
  64       __ movptr(c_rarg1, arg1);
  65     }
  66   }
  67 
  68   // Call VM
  69   __ MacroAssembler::call_VM_leaf_base(entry_point, 2);
  70 }
  71 
  72 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
  73                                    DecoratorSet decorators,
  74                                    BasicType type,
  75                                    Register dst,
  76                                    Address src,
  77                                    Register tmp1,
  78                                    Register tmp_thread) {
  79   if (!ZBarrierSet::barrier_needed(decorators, type)) {
  80     // Barrier not needed
  81     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
  82     return;
  83   }
  84 
  85   BLOCK_COMMENT("ZBarrierSetAssembler::load_at {");
  86 
  87   // Allocate scratch register
  88   Register scratch = tmp1;
  89   if (tmp1 == noreg) {
  90     scratch = r12;
  91     __ push(scratch);
  92   }
  93 
  94   assert_different_registers(dst, scratch);
  95 
  96   Label done;
  97 
  98   //
  99   // Fast Path
 100   //
 101 
 102   // Load address
 103   __ lea(scratch, src);
 104 
 105   // Load oop at address
 106   __ movptr(dst, Address(scratch, 0));
 107 
 108   // Test address bad mask
 109   __ testptr(dst, address_bad_mask_from_thread(r15_thread));
 110   __ jcc(Assembler::zero, done);
 111 
 112   //
 113   // Slow path
 114   //
 115 
 116   // Save registers
 117   __ push(rax);
 118   __ push(rcx);
 119   __ push(rdx);
 120   __ push(rdi);
 121   __ push(rsi);
 122   __ push(r8);
 123   __ push(r9);
 124   __ push(r10);
 125   __ push(r11);
 126 
 127   // We may end up here from generate_native_wrapper, then the method may have
 128   // floats as arguments, and we must spill them before calling the VM runtime
 129   // leaf. From the interpreter all floats are passed on the stack.
 130   assert(Argument::n_float_register_parameters_j == 8, "Assumption");
 131   const int xmm_size = wordSize * 2;
 132   const int xmm_spill_size = xmm_size * Argument::n_float_register_parameters_j;
 133   __ subptr(rsp, xmm_spill_size);
 134   __ movdqu(Address(rsp, xmm_size * 7), xmm7);
 135   __ movdqu(Address(rsp, xmm_size * 6), xmm6);
 136   __ movdqu(Address(rsp, xmm_size * 5), xmm5);
 137   __ movdqu(Address(rsp, xmm_size * 4), xmm4);
 138   __ movdqu(Address(rsp, xmm_size * 3), xmm3);
 139   __ movdqu(Address(rsp, xmm_size * 2), xmm2);
 140   __ movdqu(Address(rsp, xmm_size * 1), xmm1);
 141   __ movdqu(Address(rsp, xmm_size * 0), xmm0);
 142 
 143   // Call VM
 144   call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), dst, scratch);
 145 
 146   // Restore registers
 147   __ movdqu(xmm0, Address(rsp, xmm_size * 0));
 148   __ movdqu(xmm1, Address(rsp, xmm_size * 1));
 149   __ movdqu(xmm2, Address(rsp, xmm_size * 2));
 150   __ movdqu(xmm3, Address(rsp, xmm_size * 3));
 151   __ movdqu(xmm4, Address(rsp, xmm_size * 4));
 152   __ movdqu(xmm5, Address(rsp, xmm_size * 5));
 153   __ movdqu(xmm6, Address(rsp, xmm_size * 6));
 154   __ movdqu(xmm7, Address(rsp, xmm_size * 7));
 155   __ addptr(rsp, xmm_spill_size);
 156 
 157   __ pop(r11);
 158   __ pop(r10);
 159   __ pop(r9);
 160   __ pop(r8);
 161   __ pop(rsi);
 162   __ pop(rdi);
 163   __ pop(rdx);
 164   __ pop(rcx);
 165 
 166   if (dst == rax) {
 167     __ addptr(rsp, wordSize);
 168   } else {
 169     __ movptr(dst, rax);
 170     __ pop(rax);
 171   }
 172 
 173   __ bind(done);
 174 
 175   // Restore scratch register
 176   if (tmp1 == noreg) {
 177     __ pop(scratch);
 178   }
 179 
 180   BLOCK_COMMENT("} ZBarrierSetAssembler::load_at");
 181 }
 182 
 183 #ifdef ASSERT
 184 
 185 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
 186                                     DecoratorSet decorators,
 187                                     BasicType type,
 188                                     Address dst,
 189                                     Register src,
 190                                     Register tmp1,
 191                                     Register tmp2) {
 192   BLOCK_COMMENT("ZBarrierSetAssembler::store_at {");
 193 
 194   // Verify oop store
 195   if (type == T_OBJECT || type == T_ARRAY) {
 196     // Note that src could be noreg, which means we
 197     // are storing null and can skip verification.
 198     if (src != noreg) {
 199       Label done;
 200       __ testptr(src, address_bad_mask_from_thread(r15_thread));
 201       __ jcc(Assembler::zero, done);
 202       __ stop("Verify oop store failed");
 203       __ should_not_reach_here();
 204       __ bind(done);
 205     }
 206   }
 207 
 208   // Store value
 209   BarrierSetAssembler::store_at(masm, decorators, type, dst, src, tmp1, tmp2);
 210 
 211   BLOCK_COMMENT("} ZBarrierSetAssembler::store_at");
 212 }
 213 
 214 #endif // ASSERT
 215 
 216 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
 217                                               DecoratorSet decorators,
 218                                               BasicType type,
 219                                               Register src,
 220                                               Register dst,
 221                                               Register count) {
 222   if (!ZBarrierSet::barrier_needed(decorators, type)) {
 223     // Barrier not needed
 224     return;
 225   }
 226 
 227   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
 228 
 229   // Save registers
 230   __ pusha();
 231 
 232   // Call VM
 233   call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), src, count);
 234 
 235   // Restore registers
 236   __ popa();
 237 
 238   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
 239 }
 240 
 241 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
 242                                                          Register jni_env,
 243                                                          Register obj,
 244                                                          Register tmp,
 245                                                          Label& slowpath) {
 246   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
 247 
 248   // Resolve jobject
 249   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
 250 
 251   // Test address bad mask
 252   __ testptr(obj, address_bad_mask_from_jni_env(jni_env));
 253   __ jcc(Assembler::notZero, slowpath);
 254 
 255   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
 256 }
 257 
 258 #ifdef COMPILER1
 259 
 260 #undef __
 261 #define __ ce->masm()->
 262 
 263 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
 264                                                          LIR_Opr ref) const {
 265   __ testptr(ref->as_register(), address_bad_mask_from_thread(r15_thread));
 266 }
 267 
 268 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
 269                                                          ZLoadBarrierStubC1* stub) const {
 270   // Stub entry
 271   __ bind(*stub->entry());
 272 
 273   Register ref = stub->ref()->as_register();
 274   Register ref_addr = noreg;
 275 
 276   if (stub->ref_addr()->is_register()) {
 277     // Address already in register
 278     ref_addr = stub->ref_addr()->as_pointer_register();
 279   } else {
 280     // Load address into tmp register
 281     ce->leal(stub->ref_addr(), stub->tmp(), stub->patch_code(), stub->patch_info());
 282     ref_addr = stub->tmp()->as_pointer_register();
 283   }
 284 
 285   assert_different_registers(ref, ref_addr, noreg);
 286 
 287   // Save rax unless it is the result register
 288   if (ref != rax) {
 289     __ push(rax);
 290   }
 291 
 292   // Setup arguments and call runtime stub
 293   __ subptr(rsp, 2 * BytesPerWord);
 294   ce->store_parameter(ref_addr, 1);
 295   ce->store_parameter(ref, 0);
 296   __ call(RuntimeAddress(stub->runtime_stub()));
 297   __ addptr(rsp, 2 * BytesPerWord);
 298 
 299   // Verify result
 300   __ verify_oop(rax, "Bad oop");
 301 
 302   // Restore rax unless it is the result register
 303   if (ref != rax) {
 304     __ movptr(ref, rax);
 305     __ pop(rax);
 306   }
 307 
 308   // Stub exit
 309   __ jmp(*stub->continuation());
 310 }
 311 
 312 #undef __
 313 #define __ sasm->
 314 
 315 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
 316                                                                  DecoratorSet decorators) const {
 317   // Enter and save registers
 318   __ enter();
 319   __ save_live_registers_no_oop_map(true /* save_fpu_registers */);
 320 
 321   // Setup arguments
 322   __ load_parameter(1, c_rarg1);
 323   __ load_parameter(0, c_rarg0);
 324 
 325   // Call VM
 326   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 327 
 328   // Restore registers and return
 329   __ restore_live_registers_except_rax(true /* restore_fpu_registers */);
 330   __ leave();
 331   __ ret(0);
 332 }
 333 
 334 #endif // COMPILER1
 335 
 336 #undef __
 337 #define __ cgen->assembler()->
 338 
 339 // Generates a register specific stub for calling
 340 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 341 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 342 //
 343 // The raddr register serves as both input and output for this stub. When the stub is
 344 // called the raddr register contains the object field address (oop*) where the bad oop
 345 // was loaded from, which caused the slow path to be taken. On return from the stub the
 346 // raddr register contains the good/healed oop returned from
 347 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 348 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 349 static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) {
 350   // Don't generate stub for invalid registers
 351   if (raddr == rsp || raddr == r12 || raddr == r15) {
 352     return NULL;
 353   }
 354 
 355   // Create stub name
 356   char name[64];
 357   const bool weak = (decorators & ON_WEAK_OOP_REF) != 0;
 358   os::snprintf(name, sizeof(name), "load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name());
 359 
 360   __ align(CodeEntryAlignment);
 361   StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode));
 362   address start = __ pc();
 363 
 364   // Save live registers
 365   if (raddr != rax) {
 366     __ push(rax);
 367   }
 368   if (raddr != rcx) {
 369     __ push(rcx);
 370   }
 371   if (raddr != rdx) {
 372     __ push(rdx);
 373   }
 374   if (raddr != rsi) {
 375     __ push(rsi);
 376   }
 377   if (raddr != rdi) {
 378     __ push(rdi);
 379   }
 380   if (raddr != r8) {
 381     __ push(r8);
 382   }
 383   if (raddr != r9) {
 384     __ push(r9);
 385   }
 386   if (raddr != r10) {
 387     __ push(r10);
 388   }
 389   if (raddr != r11) {
 390     __ push(r11);
 391   }
 392 
 393   // Setup arguments
 394   if (c_rarg1 != raddr) {
 395     __ movq(c_rarg1, raddr);
 396   }
 397   __ movq(c_rarg0, Address(raddr, 0));
 398 
 399   // Call barrier function
 400   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 401 
 402   // Move result returned in rax to raddr, if needed
 403   if (raddr != rax) {
 404     __ movq(raddr, rax);
 405   }
 406 
 407   // Restore saved registers
 408   if (raddr != r11) {
 409     __ pop(r11);
 410   }
 411   if (raddr != r10) {
 412     __ pop(r10);
 413   }
 414   if (raddr != r9) {
 415     __ pop(r9);
 416   }
 417   if (raddr != r8) {
 418     __ pop(r8);
 419   }
 420   if (raddr != rdi) {
 421     __ pop(rdi);
 422   }
 423   if (raddr != rsi) {
 424     __ pop(rsi);
 425   }
 426   if (raddr != rdx) {
 427     __ pop(rdx);
 428   }
 429   if (raddr != rcx) {
 430     __ pop(rcx);
 431   }
 432   if (raddr != rax) {
 433     __ pop(rax);
 434   }
 435 
 436   __ ret(0);
 437 
 438   return start;
 439 }
 440 
 441 #undef __
 442 
 443 void ZBarrierSetAssembler::barrier_stubs_init() {
 444   // Load barrier stubs
 445   int stub_code_size = 256 * 16; // Rough estimate of code size
 446 
 447   ResourceMark rm;
 448   BufferBlob* bb = BufferBlob::create("zgc_load_barrier_stubs", stub_code_size);
 449   CodeBuffer buf(bb);
 450   StubCodeGenerator cgen(&buf);
 451 
 452   Register rr = as_Register(0);
 453   for (int i = 0; i < RegisterImpl::number_of_registers; i++) {
 454     _load_barrier_slow_stub[i] = generate_load_barrier_stub(&cgen, rr, ON_STRONG_OOP_REF);
 455     _load_barrier_weak_slow_stub[i] = generate_load_barrier_stub(&cgen, rr, ON_WEAK_OOP_REF);
 456     rr = rr->successor();
 457   }
 458 }