1 /*
   2  * Copyright (c) 2013, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 
  25 package org.graalvm.compiler.core.aarch64;
  26 
  27 import static jdk.vm.ci.aarch64.AArch64.sp;
  28 import static org.graalvm.compiler.lir.LIRValueUtil.asJavaConstant;
  29 import static org.graalvm.compiler.lir.LIRValueUtil.isIntConstant;
  30 import static org.graalvm.compiler.lir.LIRValueUtil.isJavaConstant;
  31 
  32 import java.util.function.Function;
  33 
  34 import org.graalvm.compiler.asm.aarch64.AArch64Address.AddressingMode;
  35 import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ConditionFlag;
  36 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
  37 import org.graalvm.compiler.core.common.LIRKind;
  38 import org.graalvm.compiler.core.common.calc.Condition;
  39 import org.graalvm.compiler.core.common.spi.LIRKindTool;
  40 import org.graalvm.compiler.debug.GraalError;
  41 import org.graalvm.compiler.lir.LIRFrameState;
  42 import org.graalvm.compiler.lir.LIRValueUtil;
  43 import org.graalvm.compiler.lir.LabelRef;
  44 import org.graalvm.compiler.lir.StandardOp;
  45 import org.graalvm.compiler.lir.SwitchStrategy;
  46 import org.graalvm.compiler.lir.Variable;
  47 import org.graalvm.compiler.lir.aarch64.AArch64AddressValue;
  48 import org.graalvm.compiler.lir.aarch64.AArch64ArithmeticOp;
  49 import org.graalvm.compiler.lir.aarch64.AArch64ArrayCompareToOp;
  50 import org.graalvm.compiler.lir.aarch64.AArch64ArrayEqualsOp;
  51 import org.graalvm.compiler.lir.aarch64.AArch64ByteSwapOp;
  52 import org.graalvm.compiler.lir.aarch64.AArch64Compare;
  53 import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow;
  54 import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.BranchOp;
  55 import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.CompareBranchZeroOp;
  56 import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.CondMoveOp;
  57 import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.CondSetOp;
  58 import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.StrategySwitchOp;
  59 import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.TableSwitchOp;
  60 import org.graalvm.compiler.lir.aarch64.AArch64LIRFlagsVersioned;
  61 import org.graalvm.compiler.lir.aarch64.AArch64Move;
  62 import org.graalvm.compiler.lir.aarch64.AArch64AtomicMove.AtomicReadAndAddOp;
  63 import org.graalvm.compiler.lir.aarch64.AArch64AtomicMove.AtomicReadAndAddLSEOp;
  64 import org.graalvm.compiler.lir.aarch64.AArch64AtomicMove.CompareAndSwapOp;
  65 import org.graalvm.compiler.lir.aarch64.AArch64AtomicMove.AtomicReadAndWriteOp;
  66 import org.graalvm.compiler.lir.aarch64.AArch64Move.MembarOp;
  67 import org.graalvm.compiler.lir.aarch64.AArch64PauseOp;
  68 import org.graalvm.compiler.lir.aarch64.AArch64SpeculativeBarrier;
  69 import org.graalvm.compiler.lir.aarch64.AArch64ZeroMemoryOp;
  70 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
  71 import org.graalvm.compiler.lir.gen.LIRGenerator;
  72 import org.graalvm.compiler.phases.util.Providers;
  73 
  74 import jdk.vm.ci.aarch64.AArch64;
  75 import jdk.vm.ci.aarch64.AArch64Kind;
  76 import jdk.vm.ci.code.CallingConvention;
  77 import jdk.vm.ci.code.RegisterValue;
  78 import jdk.vm.ci.meta.AllocatableValue;
  79 import jdk.vm.ci.meta.JavaConstant;
  80 import jdk.vm.ci.meta.JavaKind;
  81 import jdk.vm.ci.meta.PlatformKind;
  82 import jdk.vm.ci.meta.PrimitiveConstant;
  83 import jdk.vm.ci.meta.Value;
  84 import jdk.vm.ci.meta.ValueKind;
  85 
  86 public abstract class AArch64LIRGenerator extends LIRGenerator {
  87 
  88     public AArch64LIRGenerator(LIRKindTool lirKindTool, AArch64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, Providers providers, LIRGenerationResult lirGenRes) {
  89         super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes);
  90     }
  91 
  92     /**
  93      * Checks whether the supplied constant can be used without loading it into a register for store
  94      * operations, i.e., on the right hand side of a memory access.
  95      *
  96      * @param c The constant to check.
  97      * @return True if the constant can be used directly, false if the constant needs to be in a
  98      *         register.
  99      */
 100     protected static final boolean canStoreConstant(JavaConstant c) {
 101         // Our own code never calls this since we can't make a definite statement about whether or
 102         // not we can inline a constant without knowing what kind of operation we execute. Let's be
 103         // optimistic here and fix up mistakes later.
 104         return true;
 105     }
 106 
 107     /**
 108      * If val denotes the stackpointer, move it to another location. This is necessary since most
 109      * ops cannot handle the stackpointer as input or output.
 110      */
 111     public AllocatableValue moveSp(AllocatableValue val) {
 112         if (val instanceof RegisterValue && ((RegisterValue) val).getRegister().equals(sp)) {
 113             assert val.getPlatformKind() == AArch64Kind.QWORD : "Stackpointer must be long";
 114             return emitMove(val);
 115         }
 116         return val;
 117     }
 118 
 119     /**
 120      * AArch64 cannot use anything smaller than a word in any instruction other than load and store.
 121      */
 122     @Override
 123     public <K extends ValueKind<K>> K toRegisterKind(K kind) {
 124         switch ((AArch64Kind) kind.getPlatformKind()) {
 125             case BYTE:
 126             case WORD:
 127                 return kind.changeType(AArch64Kind.DWORD);
 128             default:
 129                 return kind;
 130         }
 131     }
 132 
 133     @Override
 134     public void emitNullCheck(Value address, LIRFrameState state) {
 135         append(new AArch64Move.NullCheckOp(asAddressValue(address), state));
 136     }
 137 
 138     @Override
 139     public Variable emitAddress(AllocatableValue stackslot) {
 140         Variable result = newVariable(LIRKind.value(target().arch.getWordKind()));
 141         append(new AArch64Move.StackLoadAddressOp(result, stackslot));
 142         return result;
 143     }
 144 
 145     public AArch64AddressValue asAddressValue(Value address) {
 146         if (address instanceof AArch64AddressValue) {
 147             return (AArch64AddressValue) address;
 148         } else {
 149             return new AArch64AddressValue(address.getValueKind(), asAllocatable(address), Value.ILLEGAL, 0, 1, AddressingMode.BASE_REGISTER_ONLY);
 150         }
 151     }
 152 
 153     @Override
 154     public Variable emitLogicCompareAndSwap(LIRKind accessKind, Value address, Value expectedValue, Value newValue, Value trueValue, Value falseValue) {
 155         Variable prevValue = newVariable(expectedValue.getValueKind());
 156         Variable scratch = newVariable(LIRKind.value(AArch64Kind.DWORD));
 157         append(new CompareAndSwapOp(prevValue, loadReg(expectedValue), loadReg(newValue), asAllocatable(address), scratch));
 158         assert trueValue.getValueKind().equals(falseValue.getValueKind());
 159         Variable result = newVariable(trueValue.getValueKind());
 160         append(new CondMoveOp(result, ConditionFlag.EQ, asAllocatable(trueValue), asAllocatable(falseValue)));
 161         return result;
 162     }
 163 
 164     @Override
 165     public Variable emitValueCompareAndSwap(LIRKind accessKind, Value address, Value expectedValue, Value newValue) {
 166         Variable result = newVariable(newValue.getValueKind());
 167         Variable scratch = newVariable(LIRKind.value(AArch64Kind.WORD));
 168         append(new CompareAndSwapOp(result, loadReg(expectedValue), loadReg(newValue), asAllocatable(address), scratch));
 169         return result;
 170     }
 171 
 172     @Override
 173     public Value emitAtomicReadAndWrite(Value address, ValueKind<?> kind, Value newValue) {
 174         Variable result = newVariable(kind);
 175         Variable scratch = newVariable(kind);
 176         append(new AtomicReadAndWriteOp((AArch64Kind) kind.getPlatformKind(), asAllocatable(result), asAllocatable(address), asAllocatable(newValue), asAllocatable(scratch)));
 177         return result;
 178     }
 179 
 180     @Override
 181     public Value emitAtomicReadAndAdd(Value address, ValueKind<?> kind, Value delta) {
 182         Variable result = newVariable(kind);
 183         if (AArch64LIRFlagsVersioned.useLSE(target().arch)) {
 184             append(new AtomicReadAndAddLSEOp((AArch64Kind) kind.getPlatformKind(), asAllocatable(result), asAllocatable(address), asAllocatable(delta)));
 185         } else {
 186             append(new AtomicReadAndAddOp((AArch64Kind) kind.getPlatformKind(), asAllocatable(result), asAllocatable(address), delta));
 187         }
 188         return result;
 189     }
 190 
 191     @Override
 192     public void emitMembar(int barriers) {
 193         int necessaryBarriers = target().arch.requiredBarriers(barriers);
 194         if (target().isMP && necessaryBarriers != 0) {
 195             append(new MembarOp(necessaryBarriers));
 196         }
 197     }
 198 
 199     @Override
 200     public void emitJump(LabelRef label) {
 201         assert label != null;
 202         append(new StandardOp.JumpOp(label));
 203     }
 204 
 205     @Override
 206     public void emitOverflowCheckBranch(LabelRef overflow, LabelRef noOverflow, LIRKind cmpKind, double overflowProbability) {
 207         append(new AArch64ControlFlow.BranchOp(ConditionFlag.VS, overflow, noOverflow, overflowProbability));
 208     }
 209 
 210     /**
 211      * Branches to label if (left & right) == 0. If negated is true branchse on non-zero instead.
 212      *
 213      * @param left Integer kind. Non null.
 214      * @param right Integer kind. Non null.
 215      * @param trueDestination destination if left & right == 0. Non null.
 216      * @param falseDestination destination if left & right != 0. Non null
 217      * @param trueSuccessorProbability hoistoric probability that comparison is true
 218      */
 219     @Override
 220     public void emitIntegerTestBranch(Value left, Value right, LabelRef trueDestination, LabelRef falseDestination, double trueSuccessorProbability) {
 221         assert ((AArch64Kind) left.getPlatformKind()).isInteger() && left.getPlatformKind() == right.getPlatformKind();
 222         ((AArch64ArithmeticLIRGenerator) getArithmetic()).emitBinary(LIRKind.combine(left, right), AArch64ArithmeticOp.ANDS, true, left, right);
 223         append(new AArch64ControlFlow.BranchOp(ConditionFlag.EQ, trueDestination, falseDestination, trueSuccessorProbability));
 224     }
 225 
 226     /**
 227      * Conditionally move trueValue into new variable if cond + unorderedIsTrue is true, else
 228      * falseValue.
 229      *
 230      * @param left Arbitrary value. Has to have same type as right. Non null.
 231      * @param right Arbitrary value. Has to have same type as left. Non null.
 232      * @param cond condition that decides whether to move trueValue or falseValue into result. Non
 233      *            null.
 234      * @param unorderedIsTrue defines whether floating-point comparisons consider unordered true or
 235      *            not. Ignored for integer comparisons.
 236      * @param trueValue arbitrary value same type as falseValue. Non null.
 237      * @param falseValue arbitrary value same type as trueValue. Non null.
 238      * @return value containing trueValue if cond + unorderedIsTrue is true, else falseValue. Non
 239      *         null.
 240      */
 241     @Override
 242     public Variable emitConditionalMove(PlatformKind cmpKind, Value left, final Value right, Condition cond, boolean unorderedIsTrue, Value trueValue, Value falseValue) {
 243         AArch64ArithmeticLIRGenerator arithLir = ((AArch64ArithmeticLIRGenerator) arithmeticLIRGen);
 244         Value actualRight = right;
 245         if (isJavaConstant(actualRight) && arithLir.mustReplaceNullWithNullRegister((asJavaConstant(actualRight)))) {
 246             actualRight = arithLir.getNullRegisterValue();
 247         }
 248         boolean mirrored = emitCompare(cmpKind, left, actualRight, cond, unorderedIsTrue);
 249         Condition finalCondition = mirrored ? cond.mirror() : cond;
 250         boolean finalUnorderedIsTrue = mirrored ? !unorderedIsTrue : unorderedIsTrue;
 251         ConditionFlag cmpCondition = toConditionFlag(((AArch64Kind) cmpKind).isInteger(), finalCondition, finalUnorderedIsTrue);
 252         Variable result = newVariable(trueValue.getValueKind());
 253 
 254         if (isIntConstant(trueValue, 1) && isIntConstant(falseValue, 0)) {
 255             append(new CondSetOp(result, cmpCondition));
 256         } else if (isIntConstant(trueValue, 0) && isIntConstant(falseValue, 1)) {
 257             append(new CondSetOp(result, cmpCondition.negate()));
 258         } else {
 259             append(new CondMoveOp(result, cmpCondition, loadReg(trueValue), loadReg(falseValue)));
 260         }
 261         return result;
 262     }
 263 
 264     @Override
 265     public void emitCompareBranch(PlatformKind cmpKind, Value left, final Value right, Condition cond, boolean unorderedIsTrue, LabelRef trueDestination, LabelRef falseDestination,
 266                     double trueDestinationProbability) {
 267         Value actualRight = right;
 268         if (cond == Condition.EQ) {
 269             // emit cbz instruction for IsNullNode.
 270             assert !LIRValueUtil.isNullConstant(left) : "emitNullCheckBranch()'s null input should be in right.";
 271             AArch64ArithmeticLIRGenerator arithLir = ((AArch64ArithmeticLIRGenerator) arithmeticLIRGen);
 272             if (LIRValueUtil.isNullConstant(actualRight)) {
 273                 JavaConstant rightConstant = asJavaConstant(actualRight);
 274                 if (arithLir.mustReplaceNullWithNullRegister(rightConstant)) {
 275                     actualRight = arithLir.getNullRegisterValue();
 276                 } else {
 277                     append(new CompareBranchZeroOp(asAllocatable(left), trueDestination, falseDestination,
 278                                     trueDestinationProbability));
 279                     return;
 280                 }
 281             }
 282 
 283             // emit cbz instruction for IntegerEquals when any of the inputs is zero.
 284             AArch64Kind kind = (AArch64Kind) cmpKind;
 285             if (kind.isInteger()) {
 286                 if (isIntConstant(left, 0)) {
 287                     append(new CompareBranchZeroOp(asAllocatable(actualRight), trueDestination, falseDestination, trueDestinationProbability));
 288                     return;
 289                 } else if (isIntConstant(actualRight, 0)) {
 290                     append(new CompareBranchZeroOp(asAllocatable(left), trueDestination, falseDestination, trueDestinationProbability));
 291                     return;
 292                 }
 293             }
 294         }
 295 
 296         boolean mirrored = emitCompare(cmpKind, left, actualRight, cond, unorderedIsTrue);
 297         Condition finalCondition = mirrored ? cond.mirror() : cond;
 298         boolean finalUnorderedIsTrue = mirrored ? !unorderedIsTrue : unorderedIsTrue;
 299         ConditionFlag cmpCondition = toConditionFlag(((AArch64Kind) cmpKind).isInteger(), finalCondition, finalUnorderedIsTrue);
 300         append(new BranchOp(cmpCondition, trueDestination, falseDestination, trueDestinationProbability));
 301     }
 302 
 303     private static ConditionFlag toConditionFlag(boolean isInt, Condition cond, boolean unorderedIsTrue) {
 304         return isInt ? toIntConditionFlag(cond) : toFloatConditionFlag(cond, unorderedIsTrue);
 305     }
 306 
 307     /**
 308      * Takes a Condition and unorderedIsTrue flag and returns the correct Aarch64 specific
 309      * ConditionFlag. Note: This is only correct if the emitCompare code for floats has correctly
 310      * handled the case of 'EQ && unorderedIsTrue', respectively 'NE && !unorderedIsTrue'!
 311      */
 312     private static ConditionFlag toFloatConditionFlag(Condition cond, boolean unorderedIsTrue) {
 313         switch (cond) {
 314             case LT:
 315                 return unorderedIsTrue ? ConditionFlag.LT : ConditionFlag.LO;
 316             case LE:
 317                 return unorderedIsTrue ? ConditionFlag.LE : ConditionFlag.LS;
 318             case GE:
 319                 return unorderedIsTrue ? ConditionFlag.PL : ConditionFlag.GE;
 320             case GT:
 321                 return unorderedIsTrue ? ConditionFlag.HI : ConditionFlag.GT;
 322             case EQ:
 323                 return ConditionFlag.EQ;
 324             case NE:
 325                 return ConditionFlag.NE;
 326             default:
 327                 throw GraalError.shouldNotReachHere();
 328         }
 329     }
 330 
 331     /**
 332      * Takes a Condition and returns the correct Aarch64 specific ConditionFlag.
 333      */
 334     private static ConditionFlag toIntConditionFlag(Condition cond) {
 335         switch (cond) {
 336             case EQ:
 337                 return ConditionFlag.EQ;
 338             case NE:
 339                 return ConditionFlag.NE;
 340             case LT:
 341                 return ConditionFlag.LT;
 342             case LE:
 343                 return ConditionFlag.LE;
 344             case GT:
 345                 return ConditionFlag.GT;
 346             case GE:
 347                 return ConditionFlag.GE;
 348             case AE:
 349                 return ConditionFlag.HS;
 350             case BE:
 351                 return ConditionFlag.LS;
 352             case AT:
 353                 return ConditionFlag.HI;
 354             case BT:
 355                 return ConditionFlag.LO;
 356             default:
 357                 throw GraalError.shouldNotReachHere();
 358         }
 359     }
 360 
 361     /**
 362      * This method emits the compare instruction, and may reorder the operands. It returns true if
 363      * it did so.
 364      *
 365      * @param a the left operand of the comparison. Has to have same type as b. Non null.
 366      * @param b the right operand of the comparison. Has to have same type as a. Non null.
 367      * @return true if mirrored (i.e. "b cmp a" instead of "a cmp b" was done).
 368      */
 369     protected boolean emitCompare(PlatformKind cmpKind, Value a, Value b, Condition condition, boolean unorderedIsTrue) {
 370         Value left;
 371         Value right;
 372         boolean mirrored;
 373         AArch64Kind kind = (AArch64Kind) cmpKind;
 374         if (kind.isInteger()) {
 375             Value aExt = a;
 376             Value bExt = b;
 377 
 378             int compareBytes = cmpKind.getSizeInBytes();
 379             // AArch64 compares 32 or 64 bits: sign extend a and b as required.
 380             if (compareBytes < a.getPlatformKind().getSizeInBytes()) {
 381                 aExt = arithmeticLIRGen.emitSignExtend(a, compareBytes * 8, 64);
 382             }
 383             if (compareBytes < b.getPlatformKind().getSizeInBytes()) {
 384                 bExt = arithmeticLIRGen.emitSignExtend(b, compareBytes * 8, 64);
 385             }
 386 
 387             if (LIRValueUtil.isVariable(bExt)) {
 388                 left = load(bExt);
 389                 right = loadNonConst(aExt);
 390                 mirrored = true;
 391             } else {
 392                 left = load(aExt);
 393                 right = loadNonConst(bExt);
 394                 mirrored = false;
 395             }
 396             append(new AArch64Compare.CompareOp(left, loadNonCompareConst(right)));
 397         } else if (kind.isSIMD()) {
 398             if (AArch64Compare.FloatCompareOp.isFloatCmpConstant(a, condition, unorderedIsTrue)) {
 399                 left = load(b);
 400                 right = a;
 401                 mirrored = true;
 402             } else if (AArch64Compare.FloatCompareOp.isFloatCmpConstant(b, condition, unorderedIsTrue)) {
 403                 left = load(a);
 404                 right = b;
 405                 mirrored = false;
 406             } else {
 407                 left = load(a);
 408                 right = loadReg(b);
 409                 mirrored = false;
 410             }
 411             append(new AArch64Compare.FloatCompareOp(left, asAllocatable(right), condition, unorderedIsTrue));
 412         } else {
 413             throw GraalError.shouldNotReachHere();
 414         }
 415         return mirrored;
 416     }
 417 
 418     /**
 419      * If value is a constant that cannot be used directly with a gpCompare instruction load it into
 420      * a register and return the register, otherwise return constant value unchanged.
 421      */
 422     protected Value loadNonCompareConst(Value value) {
 423         if (!isCompareConstant(value)) {
 424             return loadReg(value);
 425         }
 426         return value;
 427     }
 428 
 429     /**
 430      * Checks whether value can be used directly with a gpCompare instruction. This is <b>not</b>
 431      * the same as {@link AArch64ArithmeticLIRGenerator#isArithmeticConstant(JavaConstant)}, because
 432      * 0.0 is a valid compare constant for floats, while there are no arithmetic constants for
 433      * floats.
 434      *
 435      * @param value any type. Non null.
 436      * @return true if value can be used directly in comparison instruction, false otherwise.
 437      */
 438     public boolean isCompareConstant(Value value) {
 439         if (isJavaConstant(value)) {
 440             JavaConstant constant = asJavaConstant(value);
 441             if (constant instanceof PrimitiveConstant) {
 442                 final long longValue = constant.asLong();
 443                 long maskedValue;
 444                 switch (constant.getJavaKind()) {
 445                     case Boolean:
 446                     case Byte:
 447                         maskedValue = longValue & 0xFF;
 448                         break;
 449                     case Char:
 450                     case Short:
 451                         maskedValue = longValue & 0xFFFF;
 452                         break;
 453                     case Int:
 454                         maskedValue = longValue & 0xFFFF_FFFF;
 455                         break;
 456                     case Long:
 457                         maskedValue = longValue;
 458                         break;
 459                     default:
 460                         throw GraalError.shouldNotReachHere();
 461                 }
 462                 return AArch64MacroAssembler.isArithmeticImmediate(maskedValue);
 463             } else {
 464                 return constant.isDefaultForKind();
 465             }
 466         }
 467         return false;
 468     }
 469 
 470     /**
 471      * Moves trueValue into result if (left & right) == 0, else falseValue.
 472      *
 473      * @param left Integer kind. Non null.
 474      * @param right Integer kind. Non null.
 475      * @param trueValue Integer kind. Non null.
 476      * @param falseValue Integer kind. Non null.
 477      * @return virtual register containing trueValue if (left & right) == 0, else falseValue.
 478      */
 479     @Override
 480     public Variable emitIntegerTestMove(Value left, Value right, Value trueValue, Value falseValue) {
 481         assert ((AArch64Kind) left.getPlatformKind()).isInteger() && ((AArch64Kind) right.getPlatformKind()).isInteger();
 482         assert ((AArch64Kind) trueValue.getPlatformKind()).isInteger() && ((AArch64Kind) falseValue.getPlatformKind()).isInteger();
 483         ((AArch64ArithmeticLIRGenerator) getArithmetic()).emitBinary(left.getValueKind(), AArch64ArithmeticOp.ANDS, true, left, right);
 484         Variable result = newVariable(trueValue.getValueKind());
 485 
 486         if (isIntConstant(trueValue, 1) && isIntConstant(falseValue, 0)) {
 487             append(new CondSetOp(result, ConditionFlag.EQ));
 488         } else if (isIntConstant(trueValue, 0) && isIntConstant(falseValue, 1)) {
 489             append(new CondSetOp(result, ConditionFlag.NE));
 490         } else {
 491             append(new CondMoveOp(result, ConditionFlag.EQ, load(trueValue), load(falseValue)));
 492         }
 493         return result;
 494     }
 495 
 496     @Override
 497     public void emitStrategySwitch(SwitchStrategy strategy, Variable key, LabelRef[] keyTargets, LabelRef defaultTarget) {
 498         append(createStrategySwitchOp(strategy, keyTargets, defaultTarget, key, newVariable(key.getValueKind()), AArch64LIRGenerator::toIntConditionFlag));
 499     }
 500 
 501     protected StrategySwitchOp createStrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Variable key, AllocatableValue scratchValue,
 502                     Function<Condition, ConditionFlag> converter) {
 503         return new StrategySwitchOp(strategy, keyTargets, defaultTarget, key, scratchValue, converter);
 504     }
 505 
 506     @Override
 507     protected void emitTableSwitch(int lowKey, LabelRef defaultTarget, LabelRef[] targets, Value key) {
 508         append(new TableSwitchOp(lowKey, defaultTarget, targets, key, newVariable(LIRKind.value(target().arch.getWordKind())), newVariable(key.getValueKind())));
 509     }
 510 
 511     @Override
 512     public Variable emitByteSwap(Value input) {
 513         Variable result = newVariable(LIRKind.combine(input));
 514         append(new AArch64ByteSwapOp(result, input));
 515         return result;
 516     }
 517 
 518     @Override
 519     public Variable emitArrayCompareTo(JavaKind kind1, JavaKind kind2, Value array1, Value array2, Value length1, Value length2) {
 520         LIRKind resultKind = LIRKind.value(AArch64Kind.DWORD);
 521         // DMS TODO: check calling conversion and registers used
 522         RegisterValue res = AArch64.r0.asValue(resultKind);
 523         RegisterValue cnt1 = AArch64.r1.asValue(length1.getValueKind());
 524         RegisterValue cnt2 = AArch64.r2.asValue(length2.getValueKind());
 525         emitMove(cnt1, length1);
 526         emitMove(cnt2, length2);
 527         append(new AArch64ArrayCompareToOp(this, kind1, kind2, res, array1, array2, cnt1, cnt2));
 528         Variable result = newVariable(resultKind);
 529         emitMove(result, res);
 530         return result;
 531     }
 532 
 533     @Override
 534     public Variable emitArrayEquals(JavaKind kind, Value array1, Value array2, Value length, boolean directPointers) {
 535         Variable result = newVariable(LIRKind.value(AArch64Kind.DWORD));
 536         append(new AArch64ArrayEqualsOp(this, kind, result, array1, array2, asAllocatable(length), directPointers));
 537         return result;
 538     }
 539 
 540     @Override
 541     protected JavaConstant zapValueForKind(PlatformKind kind) {
 542         long dead = 0xDEADDEADDEADDEADL;
 543         switch ((AArch64Kind) kind) {
 544             case BYTE:
 545                 return JavaConstant.forByte((byte) dead);
 546             case WORD:
 547                 return JavaConstant.forShort((short) dead);
 548             case DWORD:
 549                 return JavaConstant.forInt((int) dead);
 550             case QWORD:
 551                 return JavaConstant.forLong(dead);
 552             case SINGLE:
 553                 return JavaConstant.forFloat(Float.intBitsToFloat((int) dead));
 554             case DOUBLE:
 555                 return JavaConstant.forDouble(Double.longBitsToDouble(dead));
 556             default:
 557                 throw GraalError.shouldNotReachHere();
 558         }
 559     }
 560 
 561     /**
 562      * Loads value into virtual register. Contrary to {@link #load(Value)} this handles
 563      * RegisterValues (i.e. values corresponding to fixed physical registers) correctly, by not
 564      * creating an unnecessary move into a virtual register.
 565      *
 566      * This avoids generating the following code: mov x0, x19 # x19 is fixed thread register ldr x0,
 567      * [x0] instead of: ldr x0, [x19].
 568      */
 569     protected AllocatableValue loadReg(Value val) {
 570         if (!(val instanceof Variable || val instanceof RegisterValue)) {
 571             return emitMove(val);
 572         }
 573         return (AllocatableValue) val;
 574     }
 575 
 576     @Override
 577     public void emitPause() {
 578         append(new AArch64PauseOp());
 579     }
 580 
 581     public abstract void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args);
 582 
 583     @Override
 584     public void emitSpeculationFence() {
 585         append(new AArch64SpeculativeBarrier());
 586     }
 587 
 588     @Override
 589     public void emitZeroMemory(Value address, Value length) {
 590         // Value address is 8-byte aligned; Value length is multiple of 8.
 591         append(new AArch64ZeroMemoryOp(asAllocatable(address), asAllocatable(length), false, -1));
 592     }
 593 }