1 /*
   2  * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.lir.aarch64;
  24 
  25 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.COMPOSITE;
  26 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.HINT;
  27 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.REG;
  28 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.STACK;
  29 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.UNINITIALIZED;
  30 import static org.graalvm.compiler.lir.LIRValueUtil.asJavaConstant;
  31 import static org.graalvm.compiler.lir.LIRValueUtil.isJavaConstant;
  32 import static jdk.vm.ci.aarch64.AArch64.sp;
  33 import static jdk.vm.ci.aarch64.AArch64.zr;
  34 import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
  35 import static jdk.vm.ci.code.ValueUtil.asRegister;
  36 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
  37 import static jdk.vm.ci.code.ValueUtil.isRegister;
  38 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
  39 
  40 import org.graalvm.compiler.asm.Label;
  41 import org.graalvm.compiler.asm.aarch64.AArch64Address;
  42 import org.graalvm.compiler.asm.aarch64.AArch64Assembler;
  43 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
  44 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler.ScratchRegister;
  45 import org.graalvm.compiler.core.common.LIRKind;
  46 import org.graalvm.compiler.core.common.type.DataPointerConstant;
  47 import org.graalvm.compiler.debug.GraalError;
  48 import org.graalvm.compiler.lir.LIRFrameState;
  49 import org.graalvm.compiler.lir.LIRInstructionClass;
  50 import org.graalvm.compiler.lir.Opcode;
  51 import org.graalvm.compiler.lir.StandardOp;
  52 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
  53 import org.graalvm.compiler.lir.StandardOp.NullCheck;
  54 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
  55 import org.graalvm.compiler.lir.VirtualStackSlot;
  56 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  57 
  58 import jdk.vm.ci.aarch64.AArch64Kind;
  59 import jdk.vm.ci.code.Register;
  60 import jdk.vm.ci.code.StackSlot;
  61 import jdk.vm.ci.meta.AllocatableValue;
  62 import jdk.vm.ci.meta.Constant;
  63 import jdk.vm.ci.meta.JavaConstant;
  64 import jdk.vm.ci.meta.PlatformKind;
  65 import jdk.vm.ci.meta.Value;
  66 
  67 public class AArch64Move {
  68 
  69     public static class LoadInlineConstant extends AArch64LIRInstruction implements LoadConstantOp {
  70         public static final LIRInstructionClass<LoadInlineConstant> TYPE = LIRInstructionClass.create(LoadInlineConstant.class);
  71 
  72         private JavaConstant constant;
  73         @Def({REG, STACK}) AllocatableValue result;
  74 
  75         public LoadInlineConstant(JavaConstant constant, AllocatableValue result) {
  76             super(TYPE);
  77             this.constant = constant;
  78             this.result = result;
  79         }
  80 
  81         @Override
  82         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
  83             if (isRegister(result)) {
  84                 const2reg(crb, masm, result, constant);
  85             } else if (isStackSlot(result)) {
  86                 StackSlot slot = asStackSlot(result);
  87                 const2stack(crb, masm, slot, constant);
  88             }
  89         }
  90 
  91         @Override
  92         public Constant getConstant() {
  93             return constant;
  94         }
  95 
  96         @Override
  97         public AllocatableValue getResult() {
  98             return result;
  99         }
 100     }
 101 
 102     @Opcode("MOVE")
 103     public static class Move extends AArch64LIRInstruction implements ValueMoveOp {
 104         public static final LIRInstructionClass<Move> TYPE = LIRInstructionClass.create(Move.class);
 105 
 106         @Def({REG, STACK, HINT}) protected AllocatableValue result;
 107         @Use({REG, STACK}) protected AllocatableValue input;
 108 
 109         public Move(AllocatableValue result, AllocatableValue input) {
 110             super(TYPE);
 111             this.result = result;
 112             this.input = input;
 113             assert !(isStackSlot(result) && isStackSlot(input));
 114         }
 115 
 116         @Override
 117         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 118             move(crb, masm, getResult(), getInput());
 119         }
 120 
 121         @Override
 122         public AllocatableValue getInput() {
 123             return input;
 124         }
 125 
 126         @Override
 127         public AllocatableValue getResult() {
 128             return result;
 129         }
 130     }
 131 
 132     public static class LoadAddressOp extends AArch64LIRInstruction {
 133         public static final LIRInstructionClass<LoadAddressOp> TYPE = LIRInstructionClass.create(LoadAddressOp.class);
 134 
 135         @Def protected AllocatableValue result;
 136         @Use(COMPOSITE) protected AArch64AddressValue address;
 137 
 138         public LoadAddressOp(AllocatableValue result, AArch64AddressValue address) {
 139             super(TYPE);
 140             this.result = result;
 141             this.address = address;
 142         }
 143 
 144         @Override
 145         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 146             Register dst = asRegister(result);
 147             AArch64Address adr = address.toAddress();
 148             masm.loadAddress(dst, adr, address.getPlatformKind().getSizeInBytes());
 149         }
 150     }
 151 
 152     public static class LoadDataOp extends AArch64LIRInstruction {
 153         public static final LIRInstructionClass<LoadDataOp> TYPE = LIRInstructionClass.create(LoadDataOp.class);
 154 
 155         @Def protected AllocatableValue result;
 156         private final DataPointerConstant data;
 157 
 158         public LoadDataOp(AllocatableValue result, DataPointerConstant data) {
 159             super(TYPE);
 160             this.result = result;
 161             this.data = data;
 162         }
 163 
 164         @Override
 165         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 166             Register dst = asRegister(result);
 167             masm.loadAddress(dst, (AArch64Address) crb.recordDataReferenceInCode(data), data.getAlignment());
 168         }
 169     }
 170 
 171     public static class StackLoadAddressOp extends AArch64LIRInstruction {
 172         public static final LIRInstructionClass<StackLoadAddressOp> TYPE = LIRInstructionClass.create(StackLoadAddressOp.class);
 173 
 174         @Def protected AllocatableValue result;
 175         @Use({STACK, UNINITIALIZED}) protected AllocatableValue slot;
 176 
 177         public StackLoadAddressOp(AllocatableValue result, AllocatableValue slot) {
 178             super(TYPE);
 179             assert slot instanceof VirtualStackSlot || slot instanceof StackSlot;
 180             this.result = result;
 181             this.slot = slot;
 182         }
 183 
 184         @Override
 185         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 186             try (ScratchRegister addrReg = masm.getScratchRegister()) {
 187                 AArch64Address address = loadStackSlotAddress(crb, masm, (StackSlot) slot, addrReg.getRegister());
 188                 PlatformKind kind = AArch64Kind.QWORD;
 189                 masm.loadAddress(asRegister(result, kind), address, kind.getSizeInBytes());
 190             }
 191         }
 192     }
 193 
 194     public static class MembarOp extends AArch64LIRInstruction {
 195         public static final LIRInstructionClass<MembarOp> TYPE = LIRInstructionClass.create(MembarOp.class);
 196 
 197         @SuppressWarnings("unused") private final int barriers;
 198 
 199         public MembarOp(int barriers) {
 200             super(TYPE);
 201             this.barriers = barriers;
 202         }
 203 
 204         @Override
 205         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 206             // As I understand it load acquire/store release have the same semantics as on IA64
 207             // and allow us to handle LoadStore, LoadLoad and StoreStore without an explicit
 208             // barrier.
 209             // But Graal support to figure out if a load/store is volatile is non-existant so for
 210             // now
 211             // just use
 212             // memory barriers everywhere.
 213             // if ((barrier & MemoryBarriers.STORE_LOAD) != 0) {
 214             masm.dmb(AArch64MacroAssembler.BarrierKind.ANY_ANY);
 215             // }
 216         }
 217     }
 218 
 219     abstract static class MemOp extends AArch64LIRInstruction implements StandardOp.ImplicitNullCheck {
 220 
 221         protected final AArch64Kind kind;
 222         @Use({COMPOSITE}) protected AArch64AddressValue addressValue;
 223         @State protected LIRFrameState state;
 224 
 225         MemOp(LIRInstructionClass<? extends MemOp> c, AArch64Kind kind, AArch64AddressValue address, LIRFrameState state) {
 226             super(c);
 227             this.kind = kind;
 228             this.addressValue = address;
 229             this.state = state;
 230         }
 231 
 232         protected abstract void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm);
 233 
 234         @Override
 235         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 236             if (state != null) {
 237                 crb.recordImplicitException(masm.position(), state);
 238             }
 239             emitMemAccess(crb, masm);
 240         }
 241 
 242         @Override
 243         public boolean makeNullCheckFor(Value value, LIRFrameState nullCheckState, int implicitNullCheckLimit) {
 244             int immediate = addressValue.getImmediate();
 245             if (state == null && value.equals(addressValue.getBase()) && addressValue.getOffset().equals(Value.ILLEGAL) && immediate >= 0 && immediate < implicitNullCheckLimit) {
 246                 state = nullCheckState;
 247                 return true;
 248             }
 249             return false;
 250         }
 251     }
 252 
 253     public static final class LoadOp extends MemOp {
 254         public static final LIRInstructionClass<LoadOp> TYPE = LIRInstructionClass.create(LoadOp.class);
 255 
 256         @Def protected AllocatableValue result;
 257 
 258         public LoadOp(AArch64Kind kind, AllocatableValue result, AArch64AddressValue address, LIRFrameState state) {
 259             super(TYPE, kind, address, state);
 260             this.result = result;
 261         }
 262 
 263         @Override
 264         protected void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 265             AArch64Address address = addressValue.toAddress();
 266             Register dst = asRegister(result);
 267 
 268             int destSize = result.getPlatformKind().getSizeInBytes() * Byte.SIZE;
 269             int srcSize = kind.getSizeInBytes() * Byte.SIZE;
 270             if (kind.isInteger()) {
 271                 masm.ldr(srcSize, dst, address);
 272             } else {
 273                 assert srcSize == destSize;
 274                 masm.fldr(srcSize, dst, address);
 275             }
 276         }
 277     }
 278 
 279     public static class StoreOp extends MemOp {
 280         public static final LIRInstructionClass<StoreOp> TYPE = LIRInstructionClass.create(StoreOp.class);
 281         @Use protected AllocatableValue input;
 282 
 283         public StoreOp(AArch64Kind kind, AArch64AddressValue address, AllocatableValue input, LIRFrameState state) {
 284             super(TYPE, kind, address, state);
 285             this.input = input;
 286         }
 287 
 288         @Override
 289         protected void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 290             emitStore(crb, masm, kind, addressValue.toAddress(), input);
 291         }
 292     }
 293 
 294     public static final class StoreConstantOp extends MemOp {
 295         public static final LIRInstructionClass<StoreConstantOp> TYPE = LIRInstructionClass.create(StoreConstantOp.class);
 296 
 297         protected final JavaConstant input;
 298 
 299         public StoreConstantOp(AArch64Kind kind, AArch64AddressValue address, JavaConstant input, LIRFrameState state) {
 300             super(TYPE, kind, address, state);
 301             this.input = input;
 302             if (!input.isDefaultForKind()) {
 303                 throw GraalError.shouldNotReachHere("Can only store null constants to memory");
 304             }
 305         }
 306 
 307         @Override
 308         public void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 309             emitStore(crb, masm, kind, addressValue.toAddress(), zr.asValue(LIRKind.combine(addressValue)));
 310         }
 311     }
 312 
 313     public static final class NullCheckOp extends AArch64LIRInstruction implements NullCheck {
 314         public static final LIRInstructionClass<NullCheckOp> TYPE = LIRInstructionClass.create(NullCheckOp.class);
 315 
 316         @Use(COMPOSITE) protected AArch64AddressValue address;
 317         @State protected LIRFrameState state;
 318 
 319         public NullCheckOp(AArch64AddressValue address, LIRFrameState state) {
 320             super(TYPE);
 321             this.address = address;
 322             this.state = state;
 323         }
 324 
 325         @Override
 326         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 327             crb.recordImplicitException(masm.position(), state);
 328             masm.ldr(64, zr, address.toAddress());
 329         }
 330 
 331         @Override
 332         public Value getCheckedValue() {
 333             return address.base;
 334         }
 335 
 336         @Override
 337         public LIRFrameState getState() {
 338             return state;
 339         }
 340     }
 341 
 342     /**
 343      * Compare and swap instruction. Does the following atomically: <code>
 344      *  CAS(newVal, expected, address):
 345      *    oldVal = *address
 346      *    if oldVal == expected:
 347      *        *address = newVal
 348      *    return oldVal
 349      * </code>
 350      */
 351     @Opcode("CAS")
 352     public static class CompareAndSwapOp extends AArch64LIRInstruction {
 353         public static final LIRInstructionClass<CompareAndSwapOp> TYPE = LIRInstructionClass.create(CompareAndSwapOp.class);
 354 
 355         @Def protected AllocatableValue resultValue;
 356         @Alive protected Value expectedValue;
 357         @Alive protected AllocatableValue newValue;
 358         @Alive protected AllocatableValue addressValue;
 359         @Temp protected AllocatableValue scratchValue;
 360 
 361         public CompareAndSwapOp(AllocatableValue result, Value expectedValue, AllocatableValue newValue, AllocatableValue addressValue, AllocatableValue scratch) {
 362             super(TYPE);
 363             this.resultValue = result;
 364             this.expectedValue = expectedValue;
 365             this.newValue = newValue;
 366             this.addressValue = addressValue;
 367             this.scratchValue = scratch;
 368         }
 369 
 370         @Override
 371         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 372             AArch64Kind kind = (AArch64Kind) expectedValue.getPlatformKind();
 373             assert kind.isInteger();
 374             final int size = kind.getSizeInBytes() * Byte.SIZE;
 375 
 376             Register address = asRegister(addressValue);
 377             Register result = asRegister(resultValue);
 378             Register newVal = asRegister(newValue);
 379             Register scratch = asRegister(scratchValue);
 380             // We could avoid using a scratch register here, by reusing resultValue for the stlxr
 381             // success flag and issue a mov resultValue, expectedValue in case of success before
 382             // returning.
 383             Label retry = new Label();
 384             Label fail = new Label();
 385             masm.bind(retry);
 386             masm.ldaxr(size, result, address);
 387             AArch64Compare.gpCompare(masm, resultValue, expectedValue);
 388             masm.branchConditionally(AArch64Assembler.ConditionFlag.NE, fail);
 389             masm.stlxr(size, scratch, newVal, address);
 390             // if scratch == 0 then write successful, else retry.
 391             masm.cbnz(32, scratch, retry);
 392             masm.bind(fail);
 393         }
 394     }
 395 
 396     private static void emitStore(@SuppressWarnings("unused") CompilationResultBuilder crb, AArch64MacroAssembler masm, AArch64Kind kind, AArch64Address dst, Value src) {
 397         int destSize = kind.getSizeInBytes() * Byte.SIZE;
 398         if (kind.isInteger()) {
 399             masm.str(destSize, asRegister(src), dst);
 400         } else {
 401             masm.fstr(destSize, asRegister(src), dst);
 402         }
 403     }
 404 
 405     public static void move(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, Value input) {
 406         if (isRegister(input)) {
 407             if (isRegister(result)) {
 408                 reg2reg(crb, masm, result, asAllocatableValue(input));
 409             } else if (isStackSlot(result)) {
 410                 reg2stack(crb, masm, result, asAllocatableValue(input));
 411             } else {
 412                 throw GraalError.shouldNotReachHere();
 413             }
 414         } else if (isStackSlot(input)) {
 415             if (isRegister(result)) {
 416                 stack2reg(crb, masm, result, asAllocatableValue(input));
 417             } else if (isStackSlot(result)) {
 418                 emitStackMove(crb, masm, result, input);
 419             } else {
 420                 throw GraalError.shouldNotReachHere();
 421             }
 422         } else if (isJavaConstant(input)) {
 423             if (isRegister(result)) {
 424                 const2reg(crb, masm, result, asJavaConstant(input));
 425             } else {
 426                 throw GraalError.shouldNotReachHere();
 427             }
 428         } else {
 429             throw GraalError.shouldNotReachHere();
 430         }
 431     }
 432 
 433     private static void emitStackMove(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, Value input) {
 434         try (ScratchRegister r1 = masm.getScratchRegister()) {
 435             try (ScratchRegister r2 = masm.getScratchRegister()) {
 436                 Register rscratch1 = r1.getRegister();
 437                 Register rscratch2 = r2.getRegister();
 438                 PlatformKind kind = input.getPlatformKind();
 439                 final int size = kind.getSizeInBytes() <= 4 ? 32 : 64;
 440 
 441                 // Always perform stack -> stack copies through integer registers
 442                 crb.blockComment("[stack -> stack copy]");
 443                 AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), rscratch2);
 444                 masm.ldr(size, rscratch1, src);
 445                 AArch64Address dst = loadStackSlotAddress(crb, masm, asStackSlot(result), rscratch2);
 446                 masm.str(size, rscratch1, dst);
 447             }
 448         }
 449     }
 450 
 451     private static void reg2reg(@SuppressWarnings("unused") CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) {
 452         Register dst = asRegister(result);
 453         Register src = asRegister(input);
 454         if (src.equals(dst)) {
 455             return;
 456         }
 457         AArch64Kind kind = (AArch64Kind) input.getPlatformKind();
 458         int size = kind.getSizeInBytes() * Byte.SIZE;
 459         if (kind.isInteger()) {
 460             masm.mov(size, dst, src);
 461         } else {
 462             masm.fmov(size, dst, src);
 463         }
 464     }
 465 
 466     private static void reg2stack(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) {
 467         AArch64Address dest = loadStackSlotAddress(crb, masm, asStackSlot(result), Value.ILLEGAL);
 468         Register src = asRegister(input);
 469         AArch64Kind kind = (AArch64Kind) input.getPlatformKind();
 470         int size = kind.getSizeInBytes() * Byte.SIZE;
 471         if (kind.isInteger()) {
 472             masm.str(size, src, dest);
 473         } else {
 474             masm.fstr(size, src, dest);
 475         }
 476     }
 477 
 478     private static void stack2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) {
 479         AArch64Kind kind = (AArch64Kind) input.getPlatformKind();
 480         final int size = kind.getSizeInBytes() * Byte.SIZE;
 481         if (kind.isInteger()) {
 482             AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), result);
 483             masm.ldr(size, asRegister(result), src);
 484         } else {
 485             try (ScratchRegister sc = masm.getScratchRegister()) {
 486                 AllocatableValue scratchRegisterValue = sc.getRegister().asValue(LIRKind.combine(input));
 487                 AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), scratchRegisterValue);
 488                 masm.fldr(size, asRegister(result), src);
 489             }
 490         }
 491     }
 492 
 493     private static void const2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant input) {
 494         Register dst = asRegister(result);
 495         switch (input.getJavaKind().getStackKind()) {
 496             case Int:
 497                 final int value = input.asInt();
 498                 int maskedValue;
 499                 switch (input.getJavaKind()) {
 500                     case Boolean:
 501                     case Byte:
 502                         maskedValue = value & 0xFF;
 503                         break;
 504                     case Char:
 505                     case Short:
 506                         maskedValue = value & 0xFFFF;
 507                         break;
 508                     case Int:
 509                         maskedValue = value;
 510                         break;
 511                     default:
 512                         throw GraalError.shouldNotReachHere();
 513                 }
 514                 masm.mov(dst, maskedValue);
 515                 break;
 516             case Long:
 517                 masm.mov(dst, input.asLong());
 518                 break;
 519             case Float:
 520                 if (AArch64MacroAssembler.isFloatImmediate(input.asFloat())) {
 521                     masm.fmov(32, dst, input.asFloat());
 522                 } else {
 523                     masm.fldr(32, dst, (AArch64Address) crb.asFloatConstRef(input));
 524                 }
 525                 break;
 526             case Double:
 527                 if (AArch64MacroAssembler.isDoubleImmediate(input.asDouble())) {
 528                     masm.fmov(64, dst, input.asDouble());
 529                 } else {
 530                     masm.fldr(64, dst, (AArch64Address) crb.asDoubleConstRef(input));
 531                 }
 532                 break;
 533             case Object:
 534                 if (input.isNull()) {
 535                     masm.mov(dst, 0);
 536                 } else if (crb.target.inlineObjects) {
 537                     crb.recordInlineDataInCode(input);
 538                     masm.movNativeAddress(dst, 0xDEADDEADDEADDEADL);
 539                 } else {
 540                     masm.ldr(64, dst, (AArch64Address) crb.recordDataReferenceInCode(input, 8));
 541                 }
 542                 break;
 543             default:
 544                 throw GraalError.shouldNotReachHere("kind=" + input.getJavaKind().getStackKind());
 545         }
 546     }
 547 
 548     private static void const2stack(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant constant) {
 549         try (ScratchRegister addrReg = masm.getScratchRegister()) {
 550             StackSlot slot = (StackSlot) result;
 551             AArch64Address resultAddress = loadStackSlotAddress(crb, masm, slot, addrReg.getRegister());
 552             if (constant.isDefaultForKind() || constant.isNull()) {
 553                 emitStore(crb, masm, (AArch64Kind) result.getPlatformKind(), resultAddress, zr.asValue(LIRKind.combine(result)));
 554             } else {
 555                 try (ScratchRegister sc = masm.getScratchRegister()) {
 556                     Value scratchRegisterValue = sc.getRegister().asValue(LIRKind.combine(result));
 557                     const2reg(crb, masm, scratchRegisterValue, constant);
 558                     emitStore(crb, masm, (AArch64Kind) result.getPlatformKind(), resultAddress, scratchRegisterValue);
 559                 }
 560             }
 561         }
 562     }
 563 
 564     /**
 565      * Returns AArch64Address of given StackSlot. We cannot use CompilationResultBuilder.asAddress
 566      * since this calls AArch64MacroAssembler.makeAddress with displacements that may be larger than
 567      * 9-bit signed, which cannot be handled by that method.
 568      *
 569      * Instead we create an address ourselves. We use scaled unsigned addressing since we know the
 570      * transfersize, which gives us a 15-bit address range (for longs/doubles) respectively a 14-bit
 571      * range (for everything else).
 572      *
 573      * @param scratch Scratch register that can be used to load address. If Value.ILLEGAL this
 574      *            instruction fails if we try to access a StackSlot that is too large to be loaded
 575      *            directly.
 576      * @return AArch64Address of given StackSlot. Uses scratch register if necessary to do so.
 577      */
 578     private static AArch64Address loadStackSlotAddress(CompilationResultBuilder crb, AArch64MacroAssembler masm, StackSlot slot, AllocatableValue scratch) {
 579         Register scratchReg = Value.ILLEGAL.equals(scratch) ? zr : asRegister(scratch);
 580         return loadStackSlotAddress(crb, masm, slot, scratchReg);
 581     }
 582 
 583     private static AArch64Address loadStackSlotAddress(CompilationResultBuilder crb, AArch64MacroAssembler masm, StackSlot slot, Register scratchReg) {
 584         int displacement = crb.frameMap.offsetForStackSlot(slot);
 585         int transferSize = slot.getPlatformKind().getSizeInBytes();
 586         return masm.makeAddress(sp, displacement, scratchReg, transferSize, /* allowOverwrite */false);
 587     }
 588 
 589 }