1 /*
   2  * Copyright (c) 2013, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 
  25 package org.graalvm.compiler.lir.aarch64;
  26 
  27 import static jdk.vm.ci.aarch64.AArch64.sp;
  28 import static jdk.vm.ci.aarch64.AArch64.zr;
  29 import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
  30 import static jdk.vm.ci.code.ValueUtil.asRegister;
  31 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
  32 import static jdk.vm.ci.code.ValueUtil.isRegister;
  33 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
  34 import static org.graalvm.compiler.core.common.GraalOptions.GeneratePIC;
  35 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.COMPOSITE;
  36 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.CONST;
  37 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.HINT;
  38 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.ILLEGAL;
  39 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.REG;
  40 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.STACK;
  41 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.UNINITIALIZED;
  42 import static org.graalvm.compiler.lir.LIRValueUtil.asJavaConstant;
  43 import static org.graalvm.compiler.lir.LIRValueUtil.isJavaConstant;
  44 
  45 import org.graalvm.compiler.asm.Label;
  46 import org.graalvm.compiler.asm.aarch64.AArch64Address;
  47 import org.graalvm.compiler.asm.aarch64.AArch64Assembler;
  48 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
  49 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler.ScratchRegister;
  50 import org.graalvm.compiler.core.common.CompressEncoding;
  51 import org.graalvm.compiler.core.common.LIRKind;
  52 import org.graalvm.compiler.core.common.spi.LIRKindTool;
  53 import org.graalvm.compiler.core.common.type.DataPointerConstant;
  54 import org.graalvm.compiler.debug.GraalError;
  55 import org.graalvm.compiler.lir.LIRFrameState;
  56 import org.graalvm.compiler.lir.LIRInstructionClass;
  57 import org.graalvm.compiler.lir.Opcode;
  58 import org.graalvm.compiler.lir.StandardOp;
  59 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
  60 import org.graalvm.compiler.lir.StandardOp.NullCheck;
  61 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
  62 import org.graalvm.compiler.lir.VirtualStackSlot;
  63 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  64 import org.graalvm.compiler.options.OptionValues;
  65 
  66 import jdk.vm.ci.aarch64.AArch64Kind;
  67 import jdk.vm.ci.code.MemoryBarriers;
  68 import jdk.vm.ci.code.Register;
  69 import jdk.vm.ci.code.StackSlot;
  70 import jdk.vm.ci.meta.AllocatableValue;
  71 import jdk.vm.ci.meta.Constant;
  72 import jdk.vm.ci.meta.JavaConstant;
  73 import jdk.vm.ci.meta.PlatformKind;
  74 import jdk.vm.ci.meta.Value;
  75 
  76 public class AArch64Move {
  77 
  78     public static class LoadInlineConstant extends AArch64LIRInstruction implements LoadConstantOp {
  79         public static final LIRInstructionClass<LoadInlineConstant> TYPE = LIRInstructionClass.create(LoadInlineConstant.class);
  80 
  81         private JavaConstant constant;
  82         @Def({REG, STACK}) AllocatableValue result;
  83 
  84         public LoadInlineConstant(JavaConstant constant, AllocatableValue result) {
  85             super(TYPE);
  86             this.constant = constant;
  87             this.result = result;
  88         }
  89 
  90         @Override
  91         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
  92             if (isRegister(result)) {
  93                 const2reg(crb, masm, result, constant);
  94             } else if (isStackSlot(result)) {
  95                 StackSlot slot = asStackSlot(result);
  96                 const2stack(crb, masm, slot, constant);
  97             }
  98         }
  99 
 100         @Override
 101         public Constant getConstant() {
 102             return constant;
 103         }
 104 
 105         @Override
 106         public AllocatableValue getResult() {
 107             return result;
 108         }
 109     }
 110 
 111     @Opcode("MOVE")
 112     public static class Move extends AArch64LIRInstruction implements ValueMoveOp {
 113         public static final LIRInstructionClass<Move> TYPE = LIRInstructionClass.create(Move.class);
 114 
 115         @Def({REG, STACK, HINT}) protected AllocatableValue result;
 116         @Use({REG, STACK}) protected AllocatableValue input;
 117 
 118         public Move(AllocatableValue result, AllocatableValue input) {
 119             super(TYPE);
 120             this.result = result;
 121             this.input = input;
 122             assert !(isStackSlot(result) && isStackSlot(input));
 123         }
 124 
 125         @Override
 126         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 127             move(crb, masm, getResult(), getInput());
 128         }
 129 
 130         @Override
 131         public AllocatableValue getInput() {
 132             return input;
 133         }
 134 
 135         @Override
 136         public AllocatableValue getResult() {
 137             return result;
 138         }
 139     }
 140 
 141     public static class LoadAddressOp extends AArch64LIRInstruction {
 142         public static final LIRInstructionClass<LoadAddressOp> TYPE = LIRInstructionClass.create(LoadAddressOp.class);
 143 
 144         @Def protected AllocatableValue result;
 145         @Use(COMPOSITE) protected AArch64AddressValue address;
 146 
 147         public LoadAddressOp(AllocatableValue result, AArch64AddressValue address) {
 148             super(TYPE);
 149             this.result = result;
 150             this.address = address;
 151         }
 152 
 153         @Override
 154         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 155             Register dst = asRegister(result);
 156             AArch64Address adr = address.toAddress();
 157             masm.loadAddress(dst, adr, address.getScaleFactor());
 158         }
 159     }
 160 
 161     public static class LoadDataOp extends AArch64LIRInstruction {
 162         public static final LIRInstructionClass<LoadDataOp> TYPE = LIRInstructionClass.create(LoadDataOp.class);
 163 
 164         @Def protected AllocatableValue result;
 165         private final DataPointerConstant data;
 166 
 167         public LoadDataOp(AllocatableValue result, DataPointerConstant data) {
 168             super(TYPE);
 169             this.result = result;
 170             this.data = data;
 171         }
 172 
 173         @Override
 174         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 175             Register dst = asRegister(result);
 176             if (crb.compilationResult.isImmutablePIC()) {
 177                 crb.recordDataReferenceInCode(data);
 178                 masm.addressOf(dst);
 179             } else {
 180                 masm.loadAddress(dst, (AArch64Address) crb.recordDataReferenceInCode(data), data.getAlignment());
 181             }
 182         }
 183     }
 184 
 185     public static class StackLoadAddressOp extends AArch64LIRInstruction {
 186         public static final LIRInstructionClass<StackLoadAddressOp> TYPE = LIRInstructionClass.create(StackLoadAddressOp.class);
 187 
 188         @Def protected AllocatableValue result;
 189         @Use({STACK, UNINITIALIZED}) protected AllocatableValue slot;
 190 
 191         public StackLoadAddressOp(AllocatableValue result, AllocatableValue slot) {
 192             super(TYPE);
 193             assert slot instanceof VirtualStackSlot || slot instanceof StackSlot;
 194             this.result = result;
 195             this.slot = slot;
 196         }
 197 
 198         @Override
 199         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 200             try (ScratchRegister addrReg = masm.getScratchRegister()) {
 201                 AArch64Address address = loadStackSlotAddress(crb, masm, (StackSlot) slot, addrReg.getRegister());
 202                 PlatformKind kind = AArch64Kind.QWORD;
 203                 masm.loadAddress(asRegister(result, kind), address, kind.getSizeInBytes());
 204             }
 205         }
 206     }
 207 
 208     public static class MembarOp extends AArch64LIRInstruction {
 209         public static final LIRInstructionClass<MembarOp> TYPE = LIRInstructionClass.create(MembarOp.class);
 210 
 211         // For future use.
 212         @SuppressWarnings("unused") private final int barriers;
 213 
 214         public MembarOp(int barriers) {
 215             super(TYPE);
 216             this.barriers = barriers;
 217         }
 218 
 219         @Override
 220         // The odd-looking @SuppressWarnings("all") is here because of
 221         // a compiler bug which warns that crb is unused, and also
 222         // warns that @SuppressWarnings("unused") is unnecessary.
 223         public void emitCode(@SuppressWarnings("all") CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 224             assert barriers >= MemoryBarriers.LOAD_LOAD && barriers <= (MemoryBarriers.STORE_STORE | MemoryBarriers.STORE_LOAD | MemoryBarriers.LOAD_STORE | MemoryBarriers.LOAD_LOAD);
 225             switch (barriers) {
 226                 case MemoryBarriers.STORE_STORE:
 227                     masm.dmb(AArch64MacroAssembler.BarrierKind.STORE_STORE);
 228                     break;
 229                 case MemoryBarriers.LOAD_LOAD:
 230                 case MemoryBarriers.LOAD_STORE:
 231                 case MemoryBarriers.LOAD_LOAD | MemoryBarriers.LOAD_STORE:
 232                     masm.dmb(AArch64MacroAssembler.BarrierKind.LOAD_LOAD);
 233                     break;
 234                 default:
 235                     masm.dmb(AArch64MacroAssembler.BarrierKind.ANY_ANY);
 236                     break;
 237             }
 238         }
 239     }
 240 
 241     abstract static class MemOp extends AArch64LIRInstruction implements StandardOp.ImplicitNullCheck {
 242 
 243         protected final AArch64Kind kind;
 244         @Use({COMPOSITE}) protected AArch64AddressValue addressValue;
 245         @State protected LIRFrameState state;
 246 
 247         MemOp(LIRInstructionClass<? extends MemOp> c, AArch64Kind kind, AArch64AddressValue address, LIRFrameState state) {
 248             super(c);
 249             this.kind = kind;
 250             this.addressValue = address;
 251             this.state = state;
 252         }
 253 
 254         protected abstract void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm);
 255 
 256         @Override
 257         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 258             if (state != null) {
 259                 crb.recordImplicitException(masm.position(), state);
 260             }
 261             emitMemAccess(crb, masm);
 262         }
 263 
 264         @Override
 265         public boolean makeNullCheckFor(Value value, LIRFrameState nullCheckState, int implicitNullCheckLimit) {
 266             int displacement = addressValue.getDisplacement();
 267             if (state == null && value.equals(addressValue.getBase()) && addressValue.getOffset().equals(Value.ILLEGAL) && displacement >= 0 && displacement < implicitNullCheckLimit) {
 268                 state = nullCheckState;
 269                 return true;
 270             }
 271             return false;
 272         }
 273     }
 274 
 275     public static final class LoadOp extends MemOp {
 276         public static final LIRInstructionClass<LoadOp> TYPE = LIRInstructionClass.create(LoadOp.class);
 277 
 278         @Def protected AllocatableValue result;
 279 
 280         public LoadOp(AArch64Kind kind, AllocatableValue result, AArch64AddressValue address, LIRFrameState state) {
 281             super(TYPE, kind, address, state);
 282             this.result = result;
 283         }
 284 
 285         @Override
 286         protected void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 287             AArch64Address address = addressValue.toAddress();
 288             Register dst = asRegister(result);
 289 
 290             int destSize = result.getPlatformKind().getSizeInBytes() * Byte.SIZE;
 291             int srcSize = kind.getSizeInBytes() * Byte.SIZE;
 292             if (kind.isInteger()) {
 293                 masm.ldr(srcSize, dst, address);
 294             } else {
 295                 assert srcSize == destSize;
 296                 masm.fldr(srcSize, dst, address);
 297             }
 298         }
 299     }
 300 
 301     public static class StoreOp extends MemOp {
 302         public static final LIRInstructionClass<StoreOp> TYPE = LIRInstructionClass.create(StoreOp.class);
 303         @Use protected AllocatableValue input;
 304 
 305         public StoreOp(AArch64Kind kind, AArch64AddressValue address, AllocatableValue input, LIRFrameState state) {
 306             super(TYPE, kind, address, state);
 307             this.input = input;
 308         }
 309 
 310         @Override
 311         protected void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 312             emitStore(crb, masm, kind, addressValue.toAddress(), input);
 313         }
 314     }
 315 
 316     public static final class StoreConstantOp extends MemOp {
 317         public static final LIRInstructionClass<StoreConstantOp> TYPE = LIRInstructionClass.create(StoreConstantOp.class);
 318 
 319         protected final JavaConstant input;
 320 
 321         public StoreConstantOp(AArch64Kind kind, AArch64AddressValue address, JavaConstant input, LIRFrameState state) {
 322             super(TYPE, kind, address, state);
 323             this.input = input;
 324             if (!input.isDefaultForKind()) {
 325                 throw GraalError.shouldNotReachHere("Can only store null constants to memory");
 326             }
 327         }
 328 
 329         @Override
 330         public void emitMemAccess(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 331             emitStore(crb, masm, kind, addressValue.toAddress(), zr.asValue(LIRKind.combine(addressValue)));
 332         }
 333     }
 334 
 335     public static final class NullCheckOp extends AArch64LIRInstruction implements NullCheck {
 336         public static final LIRInstructionClass<NullCheckOp> TYPE = LIRInstructionClass.create(NullCheckOp.class);
 337 
 338         @Use(COMPOSITE) protected AArch64AddressValue address;
 339         @State protected LIRFrameState state;
 340 
 341         public NullCheckOp(AArch64AddressValue address, LIRFrameState state) {
 342             super(TYPE);
 343             this.address = address;
 344             this.state = state;
 345         }
 346 
 347         @Override
 348         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 349             crb.recordImplicitException(masm.position(), state);
 350             masm.ldr(64, zr, address.toAddress());
 351         }
 352 
 353         @Override
 354         public Value getCheckedValue() {
 355             return address.base;
 356         }
 357 
 358         @Override
 359         public LIRFrameState getState() {
 360             return state;
 361         }
 362     }
 363 
 364     private static void emitStore(@SuppressWarnings("unused") CompilationResultBuilder crb, AArch64MacroAssembler masm, AArch64Kind kind, AArch64Address dst, Value src) {
 365         int destSize = kind.getSizeInBytes() * Byte.SIZE;
 366         if (kind.isInteger()) {
 367             masm.str(destSize, asRegister(src), dst);
 368         } else {
 369             masm.fstr(destSize, asRegister(src), dst);
 370         }
 371     }
 372 
 373     public static void move(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, Value input) {
 374         if (isRegister(input)) {
 375             if (isRegister(result)) {
 376                 reg2reg(crb, masm, result, asAllocatableValue(input));
 377             } else if (isStackSlot(result)) {
 378                 reg2stack(crb, masm, result, asAllocatableValue(input));
 379             } else {
 380                 throw GraalError.shouldNotReachHere();
 381             }
 382         } else if (isStackSlot(input)) {
 383             if (isRegister(result)) {
 384                 stack2reg(crb, masm, result, asAllocatableValue(input));
 385             } else if (isStackSlot(result)) {
 386                 emitStackMove(crb, masm, result, input);
 387             } else {
 388                 throw GraalError.shouldNotReachHere();
 389             }
 390         } else if (isJavaConstant(input)) {
 391             if (isRegister(result)) {
 392                 const2reg(crb, masm, result, asJavaConstant(input));
 393             } else {
 394                 throw GraalError.shouldNotReachHere();
 395             }
 396         } else {
 397             throw GraalError.shouldNotReachHere();
 398         }
 399     }
 400 
 401     private static void emitStackMove(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, Value input) {
 402         try (ScratchRegister r1 = masm.getScratchRegister()) {
 403             try (ScratchRegister r2 = masm.getScratchRegister()) {
 404                 Register rscratch1 = r1.getRegister();
 405                 Register rscratch2 = r2.getRegister();
 406                 // use the slot kind to define the operand size
 407                 PlatformKind kind = input.getPlatformKind();
 408                 final int size = kind.getSizeInBytes() * Byte.SIZE;
 409 
 410                 // Always perform stack -> stack copies through integer registers
 411                 crb.blockComment("[stack -> stack copy]");
 412                 AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), rscratch2);
 413                 masm.ldr(size, rscratch1, src);
 414                 AArch64Address dst = loadStackSlotAddress(crb, masm, asStackSlot(result), rscratch2);
 415                 masm.str(size, rscratch1, dst);
 416             }
 417         }
 418     }
 419 
 420     private static void reg2reg(@SuppressWarnings("unused") CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) {
 421         Register dst = asRegister(result);
 422         Register src = asRegister(input);
 423         if (src.equals(dst)) {
 424             return;
 425         }
 426         AArch64Kind kind = (AArch64Kind) input.getPlatformKind();
 427         int size = kind.getSizeInBytes() * Byte.SIZE;
 428         if (kind.isInteger()) {
 429             masm.mov(size, dst, src);
 430         } else {
 431             masm.fmov(size, dst, src);
 432         }
 433     }
 434 
 435     static void reg2stack(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) {
 436         AArch64Address dest;
 437         try (ScratchRegister scratch = masm.getScratchRegister()) {
 438             dest = loadStackSlotAddress(crb, masm, asStackSlot(result), scratch.getRegister());
 439         }
 440         Register src = asRegister(input);
 441         // use the slot kind to define the operand size
 442         AArch64Kind kind = (AArch64Kind) result.getPlatformKind();
 443         final int size = kind.getSizeInBytes() * Byte.SIZE;
 444         if (kind.isInteger()) {
 445             masm.str(size, src, dest);
 446         } else {
 447             masm.fstr(size, src, dest);
 448         }
 449     }
 450 
 451     static void stack2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, AllocatableValue input) {
 452         AArch64Kind kind = (AArch64Kind) input.getPlatformKind();
 453         // use the slot kind to define the operand size
 454         final int size = kind.getSizeInBytes() * Byte.SIZE;
 455         if (kind.isInteger()) {
 456             AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), result);
 457             masm.ldr(size, asRegister(result), src);
 458         } else {
 459             try (ScratchRegister sc = masm.getScratchRegister()) {
 460                 AllocatableValue scratchRegisterValue = sc.getRegister().asValue(LIRKind.combine(input));
 461                 AArch64Address src = loadStackSlotAddress(crb, masm, asStackSlot(input), scratchRegisterValue);
 462                 masm.fldr(size, asRegister(result), src);
 463             }
 464         }
 465     }
 466 
 467     private static void const2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant input) {
 468         Register dst = asRegister(result);
 469         switch (input.getJavaKind().getStackKind()) {
 470             case Int:
 471                 final int value = input.asInt();
 472                 int maskedValue;
 473                 switch (input.getJavaKind()) {
 474                     case Boolean:
 475                     case Byte:
 476                         maskedValue = value & 0xFF;
 477                         break;
 478                     case Char:
 479                     case Short:
 480                         maskedValue = value & 0xFFFF;
 481                         break;
 482                     case Int:
 483                         maskedValue = value;
 484                         break;
 485                     default:
 486                         throw GraalError.shouldNotReachHere();
 487                 }
 488                 masm.mov(dst, maskedValue);
 489                 break;
 490             case Long:
 491                 masm.mov(dst, input.asLong());
 492                 break;
 493             case Float:
 494                 if (AArch64MacroAssembler.isFloatImmediate(input.asFloat())) {
 495                     masm.fmov(32, dst, input.asFloat());
 496                 } else if (crb.compilationResult.isImmutablePIC()) {
 497                     try (ScratchRegister scr = masm.getScratchRegister()) {
 498                         Register scratch = scr.getRegister();
 499                         masm.mov(scratch, Float.floatToRawIntBits(input.asFloat()));
 500                         masm.fmov(32, dst, scratch);
 501                     }
 502                 } else {
 503                     try (ScratchRegister scr = masm.getScratchRegister()) {
 504                         Register scratch = scr.getRegister();
 505                         crb.asFloatConstRef(input);
 506                         masm.addressOf(scratch);
 507                         masm.fldr(32, dst, AArch64Address.createBaseRegisterOnlyAddress(scratch));
 508                     }
 509                 }
 510                 break;
 511             case Double:
 512                 if (AArch64MacroAssembler.isDoubleImmediate(input.asDouble())) {
 513                     masm.fmov(64, dst, input.asDouble());
 514                 } else if (crb.compilationResult.isImmutablePIC()) {
 515                     try (ScratchRegister scr = masm.getScratchRegister()) {
 516                         Register scratch = scr.getRegister();
 517                         masm.mov(scratch, Double.doubleToRawLongBits(input.asDouble()));
 518                         masm.fmov(64, dst, scratch);
 519                     }
 520                 } else {
 521                     try (ScratchRegister scr = masm.getScratchRegister()) {
 522                         Register scratch = scr.getRegister();
 523                         crb.asDoubleConstRef(input);
 524                         masm.addressOf(scratch);
 525                         masm.fldr(64, dst, AArch64Address.createBaseRegisterOnlyAddress(scratch));
 526                     }
 527                 }
 528                 break;
 529             case Object:
 530                 if (input.isNull()) {
 531                     if (crb.mustReplaceWithUncompressedNullRegister(input)) {
 532                         masm.mov(64, dst, crb.uncompressedNullRegister);
 533                     } else {
 534                         masm.mov(dst, 0);
 535                     }
 536                 } else if (crb.target.inlineObjects) {
 537                     crb.recordInlineDataInCode(input);
 538                     masm.mov(dst, 0xDEADDEADDEADDEADL, true);
 539                 } else {
 540                     masm.ldr(64, dst, (AArch64Address) crb.recordDataReferenceInCode(input, 8));
 541                 }
 542                 break;
 543             default:
 544                 throw GraalError.shouldNotReachHere("kind=" + input.getJavaKind().getStackKind());
 545         }
 546     }
 547 
 548     private static void const2stack(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant constant) {
 549         try (ScratchRegister addrReg = masm.getScratchRegister()) {
 550             StackSlot slot = (StackSlot) result;
 551             AArch64Address resultAddress = loadStackSlotAddress(crb, masm, slot, addrReg.getRegister());
 552             if (constant.isNull() && !crb.mustReplaceWithUncompressedNullRegister(constant)) {
 553                 emitStore(crb, masm, (AArch64Kind) result.getPlatformKind(), resultAddress, zr.asValue(LIRKind.combine(result)));
 554             } else {
 555                 try (ScratchRegister sc = masm.getScratchRegister()) {
 556                     Value scratchRegisterValue = sc.getRegister().asValue(LIRKind.combine(result));
 557                     const2reg(crb, masm, scratchRegisterValue, constant);
 558                     emitStore(crb, masm, (AArch64Kind) result.getPlatformKind(), resultAddress, scratchRegisterValue);
 559                 }
 560             }
 561         }
 562     }
 563 
 564     /**
 565      * Returns AArch64Address of given StackSlot. We cannot use CompilationResultBuilder.asAddress
 566      * since this calls AArch64MacroAssembler.makeAddress with displacements that may be larger than
 567      * 9-bit signed, which cannot be handled by that method.
 568      *
 569      * Instead we create an address ourselves. We use scaled unsigned addressing since we know the
 570      * transfersize, which gives us a 15-bit address range (for longs/doubles) respectively a 14-bit
 571      * range (for everything else).
 572      *
 573      * @param scratch Scratch register that can be used to load address. If Value.ILLEGAL this
 574      *            instruction fails if we try to access a StackSlot that is too large to be loaded
 575      *            directly.
 576      * @return AArch64Address of given StackSlot. Uses scratch register if necessary to do so.
 577      */
 578     private static AArch64Address loadStackSlotAddress(CompilationResultBuilder crb, AArch64MacroAssembler masm, StackSlot slot, AllocatableValue scratch) {
 579         Register scratchReg = Value.ILLEGAL.equals(scratch) ? zr : asRegister(scratch);
 580         return loadStackSlotAddress(crb, masm, slot, scratchReg);
 581     }
 582 
 583     private static AArch64Address loadStackSlotAddress(CompilationResultBuilder crb, AArch64MacroAssembler masm, StackSlot slot, Register scratchReg) {
 584         int displacement = crb.frameMap.offsetForStackSlot(slot);
 585         int transferSize = slot.getPlatformKind().getSizeInBytes();
 586         return masm.makeAddress(sp, displacement, scratchReg, transferSize, /* allowOverwrite */false);
 587     }
 588 
 589     public abstract static class PointerCompressionOp extends AArch64LIRInstruction {
 590 
 591         @Def({REG, HINT}) private AllocatableValue result;
 592         @Use({REG, CONST}) private Value input;
 593         @Alive({REG, ILLEGAL, UNINITIALIZED}) private AllocatableValue baseRegister;
 594 
 595         protected final CompressEncoding encoding;
 596         protected final boolean nonNull;
 597         protected final LIRKindTool lirKindTool;
 598 
 599         protected PointerCompressionOp(LIRInstructionClass<? extends PointerCompressionOp> type, AllocatableValue result, Value input,
 600                         AllocatableValue baseRegister, CompressEncoding encoding, boolean nonNull, LIRKindTool lirKindTool) {
 601 
 602             super(type);
 603             this.result = result;
 604             this.input = input;
 605             this.baseRegister = baseRegister;
 606             this.encoding = encoding;
 607             this.nonNull = nonNull;
 608             this.lirKindTool = lirKindTool;
 609         }
 610 
 611         public static boolean hasBase(OptionValues options, CompressEncoding encoding) {
 612             return GeneratePIC.getValue(options) || encoding.hasBase();
 613         }
 614 
 615         public final Value getInput() {
 616             return input;
 617         }
 618 
 619         public final AllocatableValue getResult() {
 620             return result;
 621         }
 622 
 623         protected final Register getResultRegister() {
 624             return asRegister(result);
 625         }
 626 
 627         protected final Register getBaseRegister(CompilationResultBuilder crb) {
 628             return hasBase(crb.getOptions(), encoding) ? asRegister(baseRegister) : Register.None;
 629         }
 630 
 631         protected final int getShift() {
 632             return encoding.getShift();
 633         }
 634 
 635         protected final void move(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 636             AArch64Move.move(crb, masm, result, input);
 637         }
 638     }
 639 
 640     public static class CompressPointerOp extends PointerCompressionOp {
 641         public static final LIRInstructionClass<CompressPointerOp> TYPE = LIRInstructionClass.create(CompressPointerOp.class);
 642 
 643         public CompressPointerOp(AllocatableValue result, Value input, AllocatableValue baseRegister, CompressEncoding encoding, boolean nonNull, LIRKindTool lirKindTool) {
 644             this(TYPE, result, input, baseRegister, encoding, nonNull, lirKindTool);
 645         }
 646 
 647         private CompressPointerOp(LIRInstructionClass<? extends PointerCompressionOp> type, AllocatableValue result, Value input,
 648                         AllocatableValue baseRegister, CompressEncoding encoding, boolean nonNull, LIRKindTool lirKindTool) {
 649 
 650             super(type, result, input, baseRegister, encoding, nonNull, lirKindTool);
 651         }
 652 
 653         @Override
 654         protected void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 655             Register resultRegister = getResultRegister();
 656             Register ptr = asRegister(getInput());
 657             Register base = getBaseRegister(crb);
 658             // result = (ptr - base) >> shift
 659             if (!encoding.hasBase()) {
 660                 if (encoding.hasShift()) {
 661                     masm.lshr(64, resultRegister, ptr, encoding.getShift());
 662                 } else {
 663                     masm.movx(resultRegister, ptr);
 664                 }
 665             } else if (nonNull) {
 666                 masm.sub(64, resultRegister, ptr, base);
 667                 if (encoding.hasShift()) {
 668                     masm.lshr(64, resultRegister, resultRegister, encoding.getShift());
 669                 }
 670             } else {
 671                 // if ptr is null it still has to be null after compression
 672                 masm.cmp(64, ptr, 0);
 673                 masm.cmov(64, resultRegister, ptr, base, AArch64Assembler.ConditionFlag.NE);
 674                 masm.sub(64, resultRegister, resultRegister, base);
 675                 if (encoding.hasShift()) {
 676                     masm.lshr(64, resultRegister, resultRegister, encoding.getShift());
 677                 }
 678             }
 679         }
 680     }
 681 
 682     public static class UncompressPointerOp extends PointerCompressionOp {
 683         public static final LIRInstructionClass<UncompressPointerOp> TYPE = LIRInstructionClass.create(UncompressPointerOp.class);
 684 
 685         public UncompressPointerOp(AllocatableValue result, Value input, AllocatableValue baseRegister, CompressEncoding encoding, boolean nonNull, LIRKindTool lirKindTool) {
 686             this(TYPE, result, input, baseRegister, encoding, nonNull, lirKindTool);
 687         }
 688 
 689         private UncompressPointerOp(LIRInstructionClass<? extends PointerCompressionOp> type, AllocatableValue result, Value input,
 690                         AllocatableValue baseRegister, CompressEncoding encoding, boolean nonNull, LIRKindTool lirKindTool) {
 691             super(type, result, input, baseRegister, encoding, nonNull, lirKindTool);
 692         }
 693 
 694         @Override
 695         protected void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 696             Register inputRegister = asRegister(getInput());
 697             Register resultRegister = getResultRegister();
 698             Register base = encoding.hasBase() ? getBaseRegister(crb) : null;
 699 
 700             // result = base + (ptr << shift)
 701             if (nonNull || base == null) {
 702                 masm.add(64, resultRegister, base == null ? zr : base, inputRegister, AArch64Assembler.ShiftType.LSL, encoding.getShift());
 703             } else {
 704                 // if ptr is null it has to be null after decompression
 705                 Label done = new Label();
 706                 if (!resultRegister.equals(inputRegister)) {
 707                     masm.mov(32, resultRegister, inputRegister);
 708                 }
 709                 masm.cbz(32, resultRegister, done);
 710                 masm.add(64, resultRegister, base, resultRegister, AArch64Assembler.ShiftType.LSL, encoding.getShift());
 711                 masm.bind(done);
 712             }
 713         }
 714     }
 715 
 716     private abstract static class ZeroNullConversionOp extends AArch64LIRInstruction {
 717         @Def({REG, HINT}) protected AllocatableValue result;
 718         @Use({REG}) protected AllocatableValue input;
 719 
 720         protected ZeroNullConversionOp(LIRInstructionClass<? extends ZeroNullConversionOp> type, AllocatableValue result, AllocatableValue input) {
 721             super(type);
 722             this.result = result;
 723             this.input = input;
 724         }
 725 
 726         @Override
 727         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 728             Register nullRegister = crb.uncompressedNullRegister;
 729             if (!nullRegister.equals(Register.None)) {
 730                 emitConversion(asRegister(result), asRegister(input), nullRegister, masm);
 731             }
 732         }
 733 
 734         protected abstract void emitConversion(Register resultRegister, Register inputRegister, Register nullRegister, AArch64MacroAssembler masm);
 735     }
 736 
 737     public static class ConvertNullToZeroOp extends ZeroNullConversionOp {
 738         public static final LIRInstructionClass<ConvertNullToZeroOp> TYPE = LIRInstructionClass.create(ConvertNullToZeroOp.class);
 739 
 740         public ConvertNullToZeroOp(AllocatableValue result, AllocatableValue input) {
 741             super(TYPE, result, input);
 742         }
 743 
 744         @Override
 745         protected final void emitConversion(Register resultRegister, Register inputRegister, Register nullRegister, AArch64MacroAssembler masm) {
 746             if (inputRegister.equals(resultRegister)) {
 747                 masm.subs(64, inputRegister, inputRegister, nullRegister);
 748                 Label done = new Label();
 749                 masm.branchConditionally(AArch64Assembler.ConditionFlag.EQ, done);
 750                 masm.add(64, inputRegister, inputRegister, nullRegister);
 751                 masm.bind(done);
 752             } else {
 753                 masm.subs(64, resultRegister, resultRegister, resultRegister);
 754                 masm.cmp(64, inputRegister, nullRegister);
 755                 Label done = new Label();
 756                 masm.branchConditionally(AArch64Assembler.ConditionFlag.EQ, done);
 757                 masm.movx(resultRegister, inputRegister);
 758                 masm.bind(done);
 759             }
 760         }
 761     }
 762 
 763     public static class ConvertZeroToNullOp extends ZeroNullConversionOp {
 764         public static final LIRInstructionClass<ConvertZeroToNullOp> TYPE = LIRInstructionClass.create(ConvertZeroToNullOp.class);
 765 
 766         public ConvertZeroToNullOp(AllocatableValue result, AllocatableValue input) {
 767             super(TYPE, result, input);
 768         }
 769 
 770         @Override
 771         protected final void emitConversion(Register resultRegister, Register inputRegister, Register nullRegister, AArch64MacroAssembler masm) {
 772             if (!inputRegister.equals(resultRegister)) {
 773                 masm.movx(resultRegister, inputRegister);
 774             }
 775             Label done = new Label();
 776             masm.ands(64, zr, inputRegister, inputRegister);
 777             masm.branchConditionally(AArch64Assembler.ConditionFlag.NE, done);
 778             masm.movx(resultRegister, nullRegister);
 779             masm.bind(done);
 780         }
 781     }
 782 
 783 }