1 /*
   2  * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "asm/macroAssembler.inline.hpp"
  26 #include "code/codeBlob.hpp"
  27 #include "gc/z/zBarrier.inline.hpp"
  28 #include "gc/z/zBarrierSet.hpp"
  29 #include "gc/z/zBarrierSetAssembler.hpp"
  30 #include "gc/z/zBarrierSetRuntime.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "runtime/stubCodeGenerator.hpp"
  33 #include "utilities/macros.hpp"
  34 #ifdef COMPILER1
  35 #include "c1/c1_LIRAssembler.hpp"
  36 #include "c1/c1_MacroAssembler.hpp"
  37 #include "gc/z/c1/zBarrierSetC1.hpp"
  38 #endif // COMPILER1
  39 
  40 #undef __
  41 #define __ masm->
  42 
  43 #ifdef PRODUCT
  44 #define BLOCK_COMMENT(str) /* nothing */
  45 #else
  46 #define BLOCK_COMMENT(str) __ block_comment(str)
  47 #endif
  48 
  49 static void call_vm(MacroAssembler* masm,
  50                     address entry_point,
  51                     Register arg0,
  52                     Register arg1) {
  53   // Setup arguments
  54   if (arg1 == c_rarg0) {
  55     if (arg0 == c_rarg1) {
  56       __ xchgptr(c_rarg1, c_rarg0);
  57     } else {
  58       __ movptr(c_rarg1, arg1);
  59       __ movptr(c_rarg0, arg0);
  60     }
  61   } else {
  62     if (arg0 != c_rarg0) {
  63       __ movptr(c_rarg0, arg0);
  64     }
  65     if (arg1 != c_rarg1) {
  66       __ movptr(c_rarg1, arg1);
  67     }
  68   }
  69 
  70   // Call VM
  71   __ MacroAssembler::call_VM_leaf_base(entry_point, 2);
  72 }
  73 
  74 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
  75                                    DecoratorSet decorators,
  76                                    BasicType type,
  77                                    Register dst,
  78                                    Address src,
  79                                    Register tmp1,
  80                                    Register tmp_thread) {
  81   if (!ZBarrierSet::barrier_needed(decorators, type)) {
  82     // Barrier not needed
  83     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
  84     return;
  85   }
  86 
  87   BLOCK_COMMENT("ZBarrierSetAssembler::load_at {");
  88 
  89   // Allocate scratch register
  90   Register scratch = tmp1;
  91   if (tmp1 == noreg) {
  92     scratch = r12;
  93     __ push(scratch);
  94   }
  95 
  96   assert_different_registers(dst, scratch);
  97 
  98   Label done;
  99 
 100   //
 101   // Fast Path
 102   //
 103 
 104   // Load address
 105   __ lea(scratch, src);
 106 
 107   // Load oop at address
 108   __ movptr(dst, Address(scratch, 0));
 109 
 110   // Test address bad mask
 111   __ testptr(dst, address_bad_mask_from_thread(r15_thread));
 112   __ jcc(Assembler::zero, done);
 113 
 114   //
 115   // Slow path
 116   //
 117 
 118   // Save registers
 119   __ push(rax);
 120   __ push(rcx);
 121   __ push(rdx);
 122   __ push(rdi);
 123   __ push(rsi);
 124   __ push(r8);
 125   __ push(r9);
 126   __ push(r10);
 127   __ push(r11);
 128 
 129   // We may end up here from generate_native_wrapper, then the method may have
 130   // floats as arguments, and we must spill them before calling the VM runtime
 131   // leaf. From the interpreter all floats are passed on the stack.
 132   assert(Argument::n_float_register_parameters_j == 8, "Assumption");
 133   const int xmm_size = wordSize * 2;
 134   const int xmm_spill_size = xmm_size * Argument::n_float_register_parameters_j;
 135   __ subptr(rsp, xmm_spill_size);
 136   __ movdqu(Address(rsp, xmm_size * 7), xmm7);
 137   __ movdqu(Address(rsp, xmm_size * 6), xmm6);
 138   __ movdqu(Address(rsp, xmm_size * 5), xmm5);
 139   __ movdqu(Address(rsp, xmm_size * 4), xmm4);
 140   __ movdqu(Address(rsp, xmm_size * 3), xmm3);
 141   __ movdqu(Address(rsp, xmm_size * 2), xmm2);
 142   __ movdqu(Address(rsp, xmm_size * 1), xmm1);
 143   __ movdqu(Address(rsp, xmm_size * 0), xmm0);
 144 
 145   // Call VM
 146   call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), dst, scratch);
 147 
 148   // Restore registers
 149   __ movdqu(xmm0, Address(rsp, xmm_size * 0));
 150   __ movdqu(xmm1, Address(rsp, xmm_size * 1));
 151   __ movdqu(xmm2, Address(rsp, xmm_size * 2));
 152   __ movdqu(xmm3, Address(rsp, xmm_size * 3));
 153   __ movdqu(xmm4, Address(rsp, xmm_size * 4));
 154   __ movdqu(xmm5, Address(rsp, xmm_size * 5));
 155   __ movdqu(xmm6, Address(rsp, xmm_size * 6));
 156   __ movdqu(xmm7, Address(rsp, xmm_size * 7));
 157   __ addptr(rsp, xmm_spill_size);
 158 
 159   __ pop(r11);
 160   __ pop(r10);
 161   __ pop(r9);
 162   __ pop(r8);
 163   __ pop(rsi);
 164   __ pop(rdi);
 165   __ pop(rdx);
 166   __ pop(rcx);
 167 
 168   if (dst == rax) {
 169     __ addptr(rsp, wordSize);
 170   } else {
 171     __ movptr(dst, rax);
 172     __ pop(rax);
 173   }
 174 
 175   __ bind(done);
 176 
 177   // Restore scratch register
 178   if (tmp1 == noreg) {
 179     __ pop(scratch);
 180   }
 181 
 182   BLOCK_COMMENT("} ZBarrierSetAssembler::load_at");
 183 }
 184 
 185 #ifdef ASSERT
 186 
 187 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
 188                                     DecoratorSet decorators,
 189                                     BasicType type,
 190                                     Address dst,
 191                                     Register src,
 192                                     Register tmp1,
 193                                     Register tmp2) {
 194   BLOCK_COMMENT("ZBarrierSetAssembler::store_at {");
 195 
 196   // Verify oop store
 197   if (type == T_OBJECT || type == T_ARRAY) {
 198     // Note that src could be noreg, which means we
 199     // are storing null and can skip verification.
 200     if (src != noreg) {
 201       Label done;
 202       __ testptr(src, address_bad_mask_from_thread(r15_thread));
 203       __ jcc(Assembler::zero, done);
 204       __ stop("Verify oop store failed");
 205       __ should_not_reach_here();
 206       __ bind(done);
 207     }
 208   }
 209 
 210   // Store value
 211   BarrierSetAssembler::store_at(masm, decorators, type, dst, src, tmp1, tmp2);
 212 
 213   BLOCK_COMMENT("} ZBarrierSetAssembler::store_at");
 214 }
 215 
 216 #endif // ASSERT
 217 
 218 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
 219                                               DecoratorSet decorators,
 220                                               BasicType type,
 221                                               Register src,
 222                                               Register dst,
 223                                               Register count) {
 224   if (!ZBarrierSet::barrier_needed(decorators, type)) {
 225     // Barrier not needed
 226     return;
 227   }
 228 
 229   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
 230 
 231   // Save registers
 232   __ pusha();
 233 
 234   // Call VM
 235   call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), src, count);
 236 
 237   // Restore registers
 238   __ popa();
 239 
 240   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
 241 }
 242 
 243 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
 244                                                          Register jni_env,
 245                                                          Register obj,
 246                                                          Register tmp,
 247                                                          Label& slowpath) {
 248   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
 249 
 250   // Resolve jobject
 251   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
 252 
 253   // Test address bad mask
 254   __ testptr(obj, address_bad_mask_from_jni_env(jni_env));
 255   __ jcc(Assembler::notZero, slowpath);
 256 
 257   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
 258 }
 259 
 260 #ifdef COMPILER1
 261 
 262 #undef __
 263 #define __ ce->masm()->
 264 
 265 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
 266                                                          LIR_Opr ref) const {
 267   __ testptr(ref->as_register(), address_bad_mask_from_thread(r15_thread));
 268 }
 269 
 270 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
 271                                                          ZLoadBarrierStubC1* stub) const {
 272   // Stub entry
 273   __ bind(*stub->entry());
 274 
 275   Register ref = stub->ref()->as_register();
 276   Register ref_addr = noreg;
 277 
 278   if (stub->ref_addr()->is_register()) {
 279     // Address already in register
 280     ref_addr = stub->ref_addr()->as_pointer_register();
 281   } else {
 282     // Load address into tmp register
 283     ce->leal(stub->ref_addr(), stub->tmp());
 284     ref_addr = stub->tmp()->as_pointer_register();
 285   }
 286 
 287   assert_different_registers(ref, ref_addr, noreg);
 288 
 289   // Save rax unless it is the result register
 290   if (ref != rax) {
 291     __ push(rax);
 292   }
 293 
 294   // Setup arguments and call runtime stub
 295   __ subptr(rsp, 2 * BytesPerWord);
 296   ce->store_parameter(ref_addr, 1);
 297   ce->store_parameter(ref, 0);
 298   __ call(RuntimeAddress(stub->runtime_stub()));
 299   __ addptr(rsp, 2 * BytesPerWord);
 300 
 301   // Verify result
 302   __ verify_oop(rax, "Bad oop");
 303 
 304   // Restore rax unless it is the result register
 305   if (ref != rax) {
 306     __ movptr(ref, rax);
 307     __ pop(rax);
 308   }
 309 
 310   // Stub exit
 311   __ jmp(*stub->continuation());
 312 }
 313 
 314 #undef __
 315 #define __ sasm->
 316 
 317 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
 318                                                                  DecoratorSet decorators) const {
 319   // Enter and save registers
 320   __ enter();
 321   __ save_live_registers_no_oop_map(true /* save_fpu_registers */);
 322 
 323   // Setup arguments
 324   __ load_parameter(1, c_rarg1);
 325   __ load_parameter(0, c_rarg0);
 326 
 327   // Call VM
 328   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 329 
 330   // Restore registers and return
 331   __ restore_live_registers_except_rax(true /* restore_fpu_registers */);
 332   __ leave();
 333   __ ret(0);
 334 }
 335 
 336 #endif // COMPILER1
 337 
 338 #undef __
 339 #define __ cgen->assembler()->
 340 
 341 // Generates a register specific stub for calling
 342 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 343 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 344 //
 345 // The raddr register serves as both input and output for this stub. When the stub is
 346 // called the raddr register contains the object field address (oop*) where the bad oop
 347 // was loaded from, which caused the slow path to be taken. On return from the stub the
 348 // raddr register contains the good/healed oop returned from
 349 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 350 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 351 static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) {
 352   // Don't generate stub for invalid registers
 353   if (raddr == rsp || raddr == r12 || raddr == r15) {
 354     return NULL;
 355   }
 356 
 357   // Create stub name
 358   char name[64];
 359   const bool weak = (decorators & ON_WEAK_OOP_REF) != 0;
 360   os::snprintf(name, sizeof(name), "load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name());
 361 
 362   __ align(CodeEntryAlignment);
 363   StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode));
 364   address start = __ pc();
 365 
 366   // Save live registers
 367   if (raddr != rax) {
 368     __ push(rax);
 369   }
 370   if (raddr != rcx) {
 371     __ push(rcx);
 372   }
 373   if (raddr != rdx) {
 374     __ push(rdx);
 375   }
 376   if (raddr != rsi) {
 377     __ push(rsi);
 378   }
 379   if (raddr != rdi) {
 380     __ push(rdi);
 381   }
 382   if (raddr != r8) {
 383     __ push(r8);
 384   }
 385   if (raddr != r9) {
 386     __ push(r9);
 387   }
 388   if (raddr != r10) {
 389     __ push(r10);
 390   }
 391   if (raddr != r11) {
 392     __ push(r11);
 393   }
 394 
 395   // Setup arguments
 396   if (c_rarg1 != raddr) {
 397     __ movq(c_rarg1, raddr);
 398   }
 399   __ movq(c_rarg0, Address(raddr, 0));
 400 
 401   // Call barrier function
 402   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 403 
 404   // Move result returned in rax to raddr, if needed
 405   if (raddr != rax) {
 406     __ movq(raddr, rax);
 407   }
 408 
 409   // Restore saved registers
 410   if (raddr != r11) {
 411     __ pop(r11);
 412   }
 413   if (raddr != r10) {
 414     __ pop(r10);
 415   }
 416   if (raddr != r9) {
 417     __ pop(r9);
 418   }
 419   if (raddr != r8) {
 420     __ pop(r8);
 421   }
 422   if (raddr != rdi) {
 423     __ pop(rdi);
 424   }
 425   if (raddr != rsi) {
 426     __ pop(rsi);
 427   }
 428   if (raddr != rdx) {
 429     __ pop(rdx);
 430   }
 431   if (raddr != rcx) {
 432     __ pop(rcx);
 433   }
 434   if (raddr != rax) {
 435     __ pop(rax);
 436   }
 437 
 438   __ ret(0);
 439 
 440   return start;
 441 }
 442 
 443 #undef __
 444 
 445 void ZBarrierSetAssembler::barrier_stubs_init() {
 446   // Load barrier stubs
 447   int stub_code_size = 256 * 16; // Rough estimate of code size
 448 
 449   ResourceMark rm;
 450   BufferBlob* bb = BufferBlob::create("zgc_load_barrier_stubs", stub_code_size);
 451   CodeBuffer buf(bb);
 452   StubCodeGenerator cgen(&buf);
 453 
 454   Register rr = as_Register(0);
 455   for (int i = 0; i < RegisterImpl::number_of_registers; i++) {
 456     _load_barrier_slow_stub[i] = generate_load_barrier_stub(&cgen, rr, ON_STRONG_OOP_REF);
 457     _load_barrier_weak_slow_stub[i] = generate_load_barrier_stub(&cgen, rr, ON_WEAK_OOP_REF);
 458     rr = rr->successor();
 459   }
 460 }