1 /* 2 * Copyright (c) 2013, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.lir.aarch64; 26 27 import static jdk.vm.ci.aarch64.AArch64.sp; 28 import static jdk.vm.ci.aarch64.AArch64.zr; 29 import static jdk.vm.ci.code.ValueUtil.asAllocatableValue; 30 import static jdk.vm.ci.code.ValueUtil.asRegister; 31 import static jdk.vm.ci.code.ValueUtil.asStackSlot; 32 import static jdk.vm.ci.code.ValueUtil.isRegister; 33 import static jdk.vm.ci.code.ValueUtil.isStackSlot; 34 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.COMPOSITE; 35 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.HINT; 36 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.REG; 37 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.STACK; 38 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.UNINITIALIZED; 39 import static org.graalvm.compiler.lir.LIRValueUtil.asJavaConstant; 40 import static org.graalvm.compiler.lir.LIRValueUtil.isJavaConstant; 41 42 import org.graalvm.compiler.asm.aarch64.AArch64Address; 43 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler; 44 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler.ScratchRegister; 45 import org.graalvm.compiler.core.common.LIRKind; 46 import org.graalvm.compiler.core.common.type.DataPointerConstant; 47 import org.graalvm.compiler.debug.GraalError; 48 import org.graalvm.compiler.lir.LIRFrameState; 49 import org.graalvm.compiler.lir.LIRInstructionClass; 50 import org.graalvm.compiler.lir.Opcode; 51 import org.graalvm.compiler.lir.StandardOp; 52 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp; 53 import org.graalvm.compiler.lir.StandardOp.NullCheck; 54 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp; 55 import org.graalvm.compiler.lir.VirtualStackSlot; 56 import org.graalvm.compiler.lir.asm.CompilationResultBuilder; 57 58 import jdk.vm.ci.aarch64.AArch64Kind; 59 import jdk.vm.ci.code.MemoryBarriers; 60 import jdk.vm.ci.code.Register; 61 import jdk.vm.ci.code.StackSlot; 62 import jdk.vm.ci.meta.AllocatableValue; 63 import jdk.vm.ci.meta.Constant; 64 import jdk.vm.ci.meta.JavaConstant; 65 import jdk.vm.ci.meta.PlatformKind; 66 import jdk.vm.ci.meta.Value; 67 68 public class AArch64Move { 69 70 public static class LoadInlineConstant extends AArch64LIRInstruction implements LoadConstantOp { 71 public static final LIRInstructionClass<LoadInlineConstant> TYPE = LIRInstructionClass.create(LoadInlineConstant.class); 72 73 private JavaConstant constant; 74 @Def({REG, STACK}) AllocatableValue result; 75 76 public LoadInlineConstant(JavaConstant constant, AllocatableValue result) { 77 super(TYPE); 78 this.constant = constant; 79 this.result = result; 80 } 81 82 @Override 83 public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 84 if (isRegister(result)) { 85 const2reg(crb, masm, result, constant); 86 } else if (isStackSlot(result)) { 87 StackSlot slot = asStackSlot(result); 88 const2stack(crb, masm, slot, constant); 89 } 90 } 91 92 @Override 93 public Constant getConstant() { 94 return constant; 95 } 96 97 @Override 98 public AllocatableValue getResult() { 99 return result; 100 } 101 } 102 103 @Opcode("MOVE") 104 public static class Move extends AArch64LIRInstruction implements ValueMoveOp { 105 public static final LIRInstructionClass<Move> TYPE = LIRInstructionClass.create(Move.class); 106 107 @Def({REG, STACK, HINT}) protected AllocatableValue result; 108 @Use({REG, STACK}) protected AllocatableValue input; 109 110 public Move(AllocatableValue result, AllocatableValue input) { 111 super(TYPE); 112 this.result = result; 113 this.input = input; 114 assert !(isStackSlot(result) && isStackSlot(input)); 115 } 116 117 @Override 118 public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 119 move(crb, masm, getResult(), getInput()); 120 } 121 122 @Override 123 public AllocatableValue getInput() { 124 return input; 125 } 126 127 @Override 128 public AllocatableValue getResult() { 129 return result; 130 } 131 } 132 133 public static class LoadAddressOp extends AArch64LIRInstruction { 134 public static final LIRInstructionClass<LoadAddressOp> TYPE = LIRInstructionClass.create(LoadAddressOp.class); 135 136 @Def protected AllocatableValue result; 137 @Use(COMPOSITE) protected AArch64AddressValue address; 138 139 public LoadAddressOp(AllocatableValue result, AArch64AddressValue address) { 140 super(TYPE); 141 this.result = result; 142 this.address = address; 143 } 144 145 @Override 146 public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 147 Register dst = asRegister(result); 148 AArch64Address adr = address.toAddress(); 149 masm.loadAddress(dst, adr, address.getScaleFactor()); 150 } 151 } 152 153 public static class LoadDataOp extends AArch64LIRInstruction { 154 public static final LIRInstructionClass<LoadDataOp> TYPE = LIRInstructionClass.create(LoadDataOp.class); 155 156 @Def protected AllocatableValue result; 157 private final DataPointerConstant data; 158 159 public LoadDataOp(AllocatableValue result, DataPointerConstant data) { 160 super(TYPE); 161 this.result = result; 162 this.data = data; 163 } 164 165 @Override 166 public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 167 Register dst = asRegister(result); 168 if (crb.compilationResult.isImmutablePIC()) { 169 crb.recordDataReferenceInCode(data); 170 masm.addressOf(dst); 171 } else { 172 masm.loadAddress(dst, (AArch64Address) crb.recordDataReferenceInCode(data), data.getAlignment()); 173 } 174 } 175 } 176 177 public static class StackLoadAddressOp extends AArch64LIRInstruction { 178 public static final LIRInstructionClass<StackLoadAddressOp> TYPE = LIRInstructionClass.create(StackLoadAddressOp.class); 179 180 @Def protected AllocatableValue result; 181 @Use({STACK, UNINITIALIZED}) protected AllocatableValue slot; 182 183 public StackLoadAddressOp(AllocatableValue result, AllocatableValue slot) { 184 super(TYPE); 185 assert slot instanceof VirtualStackSlot || slot instanceof StackSlot; 186 this.result = result; 187 this.slot = slot; 188 } 189 190 @Override 191 public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 192 try (ScratchRegister addrReg = masm.getScratchRegister()) { 193 AArch64Address address = loadStackSlotAddress(crb, masm, (StackSlot) slot, addrReg.getRegister()); 194 PlatformKind kind = AArch64Kind.QWORD; 195 masm.loadAddress(asRegister(result, kind), address, kind.getSizeInBytes()); 196 } 197 } 198 } 199 200 public static class MembarOp extends AArch64LIRInstruction { 201 public static final LIRInstructionClass<MembarOp> TYPE = LIRInstructionClass.create(MembarOp.class); 202 203 // For future use. 204 @SuppressWarnings("unused") private final int barriers; 205 206 public MembarOp(int barriers) { 207 super(TYPE); 208 this.barriers = barriers; 209 } 210 211 @Override 212 // The odd-looking @SuppressWarnings("all") is here because of 213 // a compiler bug which warns that crb is unused, and also 214 // warns that @SuppressWarnings("unused") is unnecessary. 215 public void emitCode(@SuppressWarnings("all") CompilationResultBuilder crb, AArch64MacroAssembler masm) { 216 assert barriers >= MemoryBarriers.LOAD_LOAD && barriers <= (MemoryBarriers.STORE_STORE | MemoryBarriers.STORE_LOAD | MemoryBarriers.LOAD_STORE | MemoryBarriers.LOAD_LOAD); 217 switch (barriers) { 218 case MemoryBarriers.STORE_STORE: 219 masm.dmb(AArch64MacroAssembler.BarrierKind.STORE_STORE); 220 break; 221 case MemoryBarriers.LOAD_LOAD: 222 case MemoryBarriers.LOAD_STORE: 223 case MemoryBarriers.LOAD_LOAD | MemoryBarriers.LOAD_STORE: 224 masm.dmb(AArch64MacroAssembler.BarrierKind.LOAD_LOAD); 225 break; 226 default: 227 masm.dmb(AArch64MacroAssembler.BarrierKind.ANY_ANY); 228 break; 229 } 230 } 231 } 232 233 abstract static class MemOp extends AArch64LIRInstruction implements StandardOp.ImplicitNullCheck { 234 235 protected final AArch64Kind kind; 236 @Use({COMPOSITE}) protected AArch64AddressValue addressValue; 237 @State protected LIRFrameState state; 238 239 MemOp(LIRInstructionClass<? extends MemOp> c, AArch64Kind kind, AArch64AddressValue address, LIRFrameState state) { 240 super(c); 241 this.kind = kind; 242 this.addressValue = address; 243 this.state = state; 244 } 245 246 protected abstract void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm); 247 248 @Override 249 public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 250 if (state != null) { 251 crb.recordImplicitException(masm.position(), state); 252 } 253 emitMemAccess(crb, masm); 254 } 255 256 @Override 257 public boolean makeNullCheckFor(Value value, LIRFrameState nullCheckState, int implicitNullCheckLimit) { 258 int displacement = addressValue.getDisplacement(); 259 if (state == null && value.equals(addressValue.getBase()) && addressValue.getOffset().equals(Value.ILLEGAL) && displacement >= 0 && displacement < implicitNullCheckLimit) { 260 state = nullCheckState; 261 return true; 262 } 263 return false; 264 } 265 } 266 267 public static final class LoadOp extends MemOp { 268 public static final LIRInstructionClass<LoadOp> TYPE = LIRInstructionClass.create(LoadOp.class); 269 270 @Def protected AllocatableValue result; 271 272 public LoadOp(AArch64Kind kind, AllocatableValue result, AArch64AddressValue address, LIRFrameState state) { 273 super(TYPE, kind, address, state); 274 this.result = result; 275 } 276 277 @Override 278 protected void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 279 AArch64Address address = addressValue.toAddress(); 280 Register dst = asRegister(result); 281 282 int destSize = result.getPlatformKind().getSizeInBytes() * Byte.SIZE; 283 int srcSize = kind.getSizeInBytes() * Byte.SIZE; 284 if (kind.isInteger()) { 285 masm.ldr(srcSize, dst, address); 286 } else { 287 assert srcSize == destSize; 288 masm.fldr(srcSize, dst, address); 289 } 290 } 291 } 292 293 public static class StoreOp extends MemOp { 294 public static final LIRInstructionClass<StoreOp> TYPE = LIRInstructionClass.create(StoreOp.class); 295 @Use protected AllocatableValue input; 296 297 public StoreOp(AArch64Kind kind, AArch64AddressValue address, AllocatableValue input, LIRFrameState state) { 298 super(TYPE, kind, address, state); 299 this.input = input; 300 } 301 302 @Override 303 protected void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 304 emitStore(crb, masm, kind, addressValue.toAddress(), input); 305 } 306 } 307 308 public static final class StoreConstantOp extends MemOp { 309 public static final LIRInstructionClass<StoreConstantOp> TYPE = LIRInstructionClass.create(StoreConstantOp.class); 310 311 protected final JavaConstant input; 312 313 public StoreConstantOp(AArch64Kind kind, AArch64AddressValue address, JavaConstant input, LIRFrameState state) { 314 super(TYPE, kind, address, state); 315 this.input = input; 316 if (!input.isDefaultForKind()) { 317 throw GraalError.shouldNotReachHere("Can only store null constants to memory"); 318 } 319 } 320 321 @Override 322 public void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 323 emitStore(crb, masm, kind, addressValue.toAddress(), zr.asValue(LIRKind.combine(addressValue))); 324 } 325 } 326 327 public static final class NullCheckOp extends AArch64LIRInstruction implements NullCheck { 328 public static final LIRInstructionClass<NullCheckOp> TYPE = LIRInstructionClass.create(NullCheckOp.class); 329 330 @Use(COMPOSITE) protected AArch64AddressValue address; 331 @State protected LIRFrameState state; 332 333 public NullCheckOp(AArch64AddressValue address, LIRFrameState state) { 334 super(TYPE); 335 this.address = address; 336 this.state = state; 337 } 338 339 @Override 340 public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) { 341 crb.recordImplicitException(masm.position(), state); 342 masm.ldr(64, zr, address.toAddress()); 343 } 344 345 @Override 346 public Value getCheckedValue() { 347 return address.base; 348 } 349 350 @Override 351 public LIRFrameState getState() { 352 return state; 353 } 354 } 355 356 private static void emitStore(@SuppressWarnings("unused") CompilationResultBuilder crb, AArch64MacroAssembler masm, AArch64Kind kind, AArch64Address dst, Value src) { 357 int destSize = kind.getSizeInBytes() * Byte.SIZE; 358 if (kind.isInteger()) { 359 masm.str(destSize, asRegister(src), dst); 360 } else { 361 masm.fstr(destSize, asRegister(src), dst); 362 } 363 } 364 365 public static void move(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, Value input) { 366 if (isRegister(input)) { 367 if (isRegister(result)) { 368 reg2reg(crb, masm, result, asAllocatableValue(input)); 369 } else if (isStackSlot(result)) { 370 reg2stack(crb, masm, result, asAllocatableValue(input)); 371 } else { 372 throw GraalError.shouldNotReachHere(); 373 } 374 } else if (isStackSlot(input)) { 375 if (isRegister(result)) { 376 stack2reg(crb, masm, result, asAllocatableValue(input)); 377 } else if (isStackSlot(result)) { 378 emitStackMove(crb, masm, result, input); 379 } else { 380 throw GraalError.shouldNotReachHere(); 381 } 382 } else if (isJavaConstant(input)) { 383 if (isRegister(result)) { 384 const2reg(crb, masm, result, asJavaConstant(input)); 385 } else { 386 throw GraalError.shouldNotReachHere(); 387 } 388 } else { 389 throw GraalError.shouldNotReachHere(); 390 } 391 } 392 393 private static void emitStackMove(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, Value input) { 394 try (ScratchRegister r1 = masm.getScratchRegister()) { 395 try (ScratchRegister r2 = masm.getScratchRegister()) { 396 Register rscratch1 = r1.getRegister(); 397 Register rscratch2 = r2.getRegister(); 398 // use the slot kind to define the operand size 399 PlatformKind kind = input.getPlatformKind(); 400 final int size = kind.getSizeInBytes() * Byte.SIZE; 401 402 // Always perform stack -> stack copies through integer registers 403 crb.blockComment("[stack -> stack copy]"); 404 AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), rscratch2); 405 masm.ldr(size, rscratch1, src); 406 AArch64Address dst = loadStackSlotAddress(crb, masm, asStackSlot(result), rscratch2); 407 masm.str(size, rscratch1, dst); 408 } 409 } 410 } 411 412 private static void reg2reg(@SuppressWarnings("unused") CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) { 413 Register dst = asRegister(result); 414 Register src = asRegister(input); 415 if (src.equals(dst)) { 416 return; 417 } 418 AArch64Kind kind = (AArch64Kind) input.getPlatformKind(); 419 int size = kind.getSizeInBytes() * Byte.SIZE; 420 if (kind.isInteger()) { 421 masm.mov(size, dst, src); 422 } else { 423 masm.fmov(size, dst, src); 424 } 425 } 426 427 static void reg2stack(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) { 428 AArch64Address dest = loadStackSlotAddress(crb, masm, asStackSlot(result), Value.ILLEGAL); 429 Register src = asRegister(input); 430 // use the slot kind to define the operand size 431 AArch64Kind kind = (AArch64Kind) result.getPlatformKind(); 432 final int size = kind.getSizeInBytes() * Byte.SIZE; 433 if (kind.isInteger()) { 434 masm.str(size, src, dest); 435 } else { 436 masm.fstr(size, src, dest); 437 } 438 } 439 440 static void stack2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) { 441 AArch64Kind kind = (AArch64Kind) input.getPlatformKind(); 442 // use the slot kind to define the operand size 443 final int size = kind.getSizeInBytes() * Byte.SIZE; 444 if (kind.isInteger()) { 445 AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), result); 446 masm.ldr(size, asRegister(result), src); 447 } else { 448 try (ScratchRegister sc = masm.getScratchRegister()) { 449 AllocatableValue scratchRegisterValue = sc.getRegister().asValue(LIRKind.combine(input)); 450 AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), scratchRegisterValue); 451 masm.fldr(size, asRegister(result), src); 452 } 453 } 454 } 455 456 private static void const2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant input) { 457 Register dst = asRegister(result); 458 switch (input.getJavaKind().getStackKind()) { 459 case Int: 460 final int value = input.asInt(); 461 int maskedValue; 462 switch (input.getJavaKind()) { 463 case Boolean: 464 case Byte: 465 maskedValue = value & 0xFF; 466 break; 467 case Char: 468 case Short: 469 maskedValue = value & 0xFFFF; 470 break; 471 case Int: 472 maskedValue = value; 473 break; 474 default: 475 throw GraalError.shouldNotReachHere(); 476 } 477 masm.mov(dst, maskedValue); 478 break; 479 case Long: 480 masm.mov(dst, input.asLong()); 481 break; 482 case Float: 483 if (AArch64MacroAssembler.isFloatImmediate(input.asFloat())) { 484 masm.fmov(32, dst, input.asFloat()); 485 } else if (crb.compilationResult.isImmutablePIC()) { 486 try (ScratchRegister scr = masm.getScratchRegister()) { 487 Register scratch = scr.getRegister(); 488 masm.mov(scratch, Float.floatToRawIntBits(input.asFloat())); 489 masm.fmov(32, dst, scratch); 490 } 491 } else { 492 masm.fldr(32, dst, (AArch64Address) crb.asFloatConstRef(input)); 493 } 494 break; 495 case Double: 496 if (AArch64MacroAssembler.isDoubleImmediate(input.asDouble())) { 497 masm.fmov(64, dst, input.asDouble()); 498 } else if (crb.compilationResult.isImmutablePIC()) { 499 try (ScratchRegister scr = masm.getScratchRegister()) { 500 Register scratch = scr.getRegister(); 501 masm.mov(scratch, Double.doubleToRawLongBits(input.asDouble())); 502 masm.fmov(64, dst, scratch); 503 } 504 } else { 505 masm.fldr(64, dst, (AArch64Address) crb.asDoubleConstRef(input)); 506 } 507 break; 508 case Object: 509 if (input.isNull()) { 510 masm.mov(dst, 0); 511 } else if (crb.target.inlineObjects) { 512 crb.recordInlineDataInCode(input); 513 masm.movNativeAddress(dst, 0xDEADDEADDEADDEADL); 514 } else { 515 masm.ldr(64, dst, (AArch64Address) crb.recordDataReferenceInCode(input, 8)); 516 } 517 break; 518 default: 519 throw GraalError.shouldNotReachHere("kind=" + input.getJavaKind().getStackKind()); 520 } 521 } 522 523 private static void const2stack(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant constant) { 524 try (ScratchRegister addrReg = masm.getScratchRegister()) { 525 StackSlot slot = (StackSlot) result; 526 AArch64Address resultAddress = loadStackSlotAddress(crb, masm, slot, addrReg.getRegister()); 527 if (constant.isDefaultForKind() || constant.isNull()) { 528 emitStore(crb, masm, (AArch64Kind) result.getPlatformKind(), resultAddress, zr.asValue(LIRKind.combine(result))); 529 } else { 530 try (ScratchRegister sc = masm.getScratchRegister()) { 531 Value scratchRegisterValue = sc.getRegister().asValue(LIRKind.combine(result)); 532 const2reg(crb, masm, scratchRegisterValue, constant); 533 emitStore(crb, masm, (AArch64Kind) result.getPlatformKind(), resultAddress, scratchRegisterValue); 534 } 535 } 536 } 537 } 538 539 /** 540 * Returns AArch64Address of given StackSlot. We cannot use CompilationResultBuilder.asAddress 541 * since this calls AArch64MacroAssembler.makeAddress with displacements that may be larger than 542 * 9-bit signed, which cannot be handled by that method. 543 * 544 * Instead we create an address ourselves. We use scaled unsigned addressing since we know the 545 * transfersize, which gives us a 15-bit address range (for longs/doubles) respectively a 14-bit 546 * range (for everything else). 547 * 548 * @param scratch Scratch register that can be used to load address. If Value.ILLEGAL this 549 * instruction fails if we try to access a StackSlot that is too large to be loaded 550 * directly. 551 * @return AArch64Address of given StackSlot. Uses scratch register if necessary to do so. 552 */ 553 private static AArch64Address loadStackSlotAddress(CompilationResultBuilder crb, AArch64MacroAssembler masm, StackSlot slot, AllocatableValue scratch) { 554 Register scratchReg = Value.ILLEGAL.equals(scratch) ? zr : asRegister(scratch); 555 return loadStackSlotAddress(crb, masm, slot, scratchReg); 556 } 557 558 private static AArch64Address loadStackSlotAddress(CompilationResultBuilder crb, AArch64MacroAssembler masm, StackSlot slot, Register scratchReg) { 559 int displacement = crb.frameMap.offsetForStackSlot(slot); 560 int transferSize = slot.getPlatformKind().getSizeInBytes(); 561 return masm.makeAddress(sp, displacement, scratchReg, transferSize, /* allowOverwrite */false); 562 } 563 564 }