1 /*
   2  * Copyright (c) 2013, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 
  25 package org.graalvm.compiler.lir.aarch64;
  26 
  27 import static jdk.vm.ci.aarch64.AArch64.lr;
  28 import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
  29 import static jdk.vm.ci.code.ValueUtil.asRegister;
  30 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.HINT;
  31 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.ILLEGAL;
  32 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.REG;
  33 
  34 import java.util.function.Function;
  35 
  36 import jdk.vm.ci.meta.AllocatableValue;
  37 import org.graalvm.compiler.asm.Label;
  38 import org.graalvm.compiler.asm.aarch64.AArch64Assembler;
  39 import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ConditionFlag;
  40 import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ExtendType;
  41 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
  42 import org.graalvm.compiler.code.CompilationResult.JumpTable;
  43 import org.graalvm.compiler.core.common.LIRKind;
  44 import org.graalvm.compiler.core.common.NumUtil;
  45 import org.graalvm.compiler.core.common.calc.Condition;
  46 import org.graalvm.compiler.debug.GraalError;
  47 import org.graalvm.compiler.lir.ConstantValue;
  48 import org.graalvm.compiler.lir.LIRInstructionClass;
  49 import org.graalvm.compiler.lir.LabelRef;
  50 import org.graalvm.compiler.lir.Opcode;
  51 import org.graalvm.compiler.lir.StandardOp;
  52 import org.graalvm.compiler.lir.StandardOp.BlockEndOp;
  53 import org.graalvm.compiler.lir.SwitchStrategy;
  54 import org.graalvm.compiler.lir.SwitchStrategy.BaseSwitchClosure;
  55 import org.graalvm.compiler.lir.Variable;
  56 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  57 
  58 import jdk.vm.ci.aarch64.AArch64Kind;
  59 import jdk.vm.ci.code.Register;
  60 import jdk.vm.ci.meta.Constant;
  61 import jdk.vm.ci.meta.JavaConstant;
  62 import jdk.vm.ci.meta.Value;
  63 
  64 public class AArch64ControlFlow {
  65     public static final class ReturnOp extends AArch64BlockEndOp implements BlockEndOp {
  66         public static final LIRInstructionClass<ReturnOp> TYPE = LIRInstructionClass.create(ReturnOp.class);
  67         @Use({REG, ILLEGAL}) protected Value x;
  68 
  69         public ReturnOp(Value x) {
  70             super(TYPE);
  71             this.x = x;
  72         }
  73 
  74         @Override
  75         protected void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
  76             crb.frameContext.leave(crb);
  77             masm.ret(lr);
  78         }
  79     }
  80 
  81     public abstract static class AbstractBranchOp extends AArch64BlockEndOp implements StandardOp.BranchOp {
  82         private final LabelRef trueDestination;
  83         private final LabelRef falseDestination;
  84 
  85         private final double trueDestinationProbability;
  86 
  87         private AbstractBranchOp(LIRInstructionClass<? extends AbstractBranchOp> c, LabelRef trueDestination, LabelRef falseDestination, double trueDestinationProbability) {
  88             super(c);
  89             this.trueDestination = trueDestination;
  90             this.falseDestination = falseDestination;
  91             this.trueDestinationProbability = trueDestinationProbability;
  92         }
  93 
  94         protected abstract void emitBranch(CompilationResultBuilder crb, AArch64MacroAssembler masm, LabelRef target, boolean negate);
  95 
  96         @Override
  97         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
  98             /*
  99              * Explanation: Depending on what the successor edge is, we can use the fall-through to
 100              * optimize the generated code. If neither is a successor edge, use the branch
 101              * probability to try to take the conditional jump as often as possible to avoid
 102              * executing two instructions instead of one.
 103              */
 104             if (crb.isSuccessorEdge(trueDestination)) {
 105                 emitBranch(crb, masm, falseDestination, true);
 106             } else if (crb.isSuccessorEdge(falseDestination)) {
 107                 emitBranch(crb, masm, trueDestination, false);
 108             } else if (trueDestinationProbability < 0.5) {
 109                 emitBranch(crb, masm, falseDestination, true);
 110                 masm.jmp(trueDestination.label());
 111             } else {
 112                 emitBranch(crb, masm, trueDestination, false);
 113                 masm.jmp(falseDestination.label());
 114             }
 115         }
 116     }
 117 
 118     public static class BranchOp extends AbstractBranchOp implements StandardOp.BranchOp {
 119         public static final LIRInstructionClass<BranchOp> TYPE = LIRInstructionClass.create(BranchOp.class);
 120 
 121         private final AArch64Assembler.ConditionFlag condition;
 122 
 123         public BranchOp(AArch64Assembler.ConditionFlag condition, LabelRef trueDestination, LabelRef falseDestination, double trueDestinationProbability) {
 124             super(TYPE, trueDestination, falseDestination, trueDestinationProbability);
 125             this.condition = condition;
 126         }
 127 
 128         @Override
 129         protected void emitBranch(CompilationResultBuilder crb, AArch64MacroAssembler masm, LabelRef target, boolean negate) {
 130             AArch64Assembler.ConditionFlag finalCond = negate ? condition.negate() : condition;
 131             masm.branchConditionally(finalCond, target.label());
 132         }
 133     }
 134 
 135     public static class CompareBranchZeroOp extends AbstractBranchOp implements StandardOp.BranchOp {
 136         public static final LIRInstructionClass<CompareBranchZeroOp> TYPE = LIRInstructionClass.create(CompareBranchZeroOp.class);
 137 
 138         @Use(REG) private AllocatableValue value;
 139 
 140         public CompareBranchZeroOp(AllocatableValue value, LabelRef trueDestination, LabelRef falseDestination, double trueDestinationProbability) {
 141             super(TYPE, trueDestination, falseDestination, trueDestinationProbability);
 142             this.value = value;
 143         }
 144 
 145         @Override
 146         protected void emitBranch(CompilationResultBuilder crb, AArch64MacroAssembler masm, LabelRef target, boolean negate) {
 147             AArch64Kind kind = (AArch64Kind) this.value.getPlatformKind();
 148             assert kind.isInteger();
 149             int size = kind.getSizeInBytes() * Byte.SIZE;
 150 
 151             if (negate) {
 152                 masm.cbnz(size, asRegister(this.value), target.label());
 153             } else {
 154                 masm.cbz(size, asRegister(this.value), target.label());
 155             }
 156         }
 157     }
 158 
 159     public static class BitTestAndBranchOp extends AbstractBranchOp implements StandardOp.BranchOp {
 160         public static final LIRInstructionClass<BitTestAndBranchOp> TYPE = LIRInstructionClass.create(BitTestAndBranchOp.class);
 161 
 162         @Use protected AllocatableValue value;
 163         private final int index;
 164 
 165         public BitTestAndBranchOp(LabelRef trueDestination, LabelRef falseDestination, AllocatableValue value, double trueDestinationProbability, int index) {
 166             super(TYPE, trueDestination, falseDestination, trueDestinationProbability);
 167             this.value = value;
 168             this.index = index;
 169         }
 170 
 171         @Override
 172         protected void emitBranch(CompilationResultBuilder crb, AArch64MacroAssembler masm, LabelRef target, boolean negate) {
 173             ConditionFlag cond = negate ? ConditionFlag.NE : ConditionFlag.EQ;
 174             Label label = target.label();
 175             boolean isFarBranch;
 176 
 177             if (label.isBound()) {
 178                 // The label.position() is a byte based index. The TBZ instruction has 14 bits for
 179                 // the offset and AArch64 instruction is 4 bytes aligned. So TBZ can encode 16 bits
 180                 // signed offset.
 181                 isFarBranch = !NumUtil.isSignedNbit(16, masm.position() - label.position());
 182             } else {
 183                 // Max range of tbz is +-2^13 instructions. We estimate that each LIR instruction
 184                 // emits 2 AArch64 instructions on average. Thus we test for maximum 2^12 LIR
 185                 // instruction offset.
 186                 int maxLIRDistance = (1 << 12);
 187                 isFarBranch = !crb.labelWithinRange(this, label, maxLIRDistance);
 188             }
 189 
 190             if (isFarBranch) {
 191                 cond = cond.negate();
 192                 label = new Label();
 193             }
 194 
 195             if (cond == ConditionFlag.EQ) {
 196                 masm.tbz(asRegister(value), index, label);
 197             } else {
 198                 masm.tbnz(asRegister(value), index, label);
 199             }
 200 
 201             if (isFarBranch) {
 202                 masm.jmp(target.label());
 203                 masm.bind(label);
 204             }
 205         }
 206     }
 207 
 208     @Opcode("CMOVE")
 209     public static class CondMoveOp extends AArch64LIRInstruction {
 210         public static final LIRInstructionClass<CondMoveOp> TYPE = LIRInstructionClass.create(CondMoveOp.class);
 211 
 212         @Def protected Value result;
 213         @Use protected Value trueValue;
 214         @Use protected Value falseValue;
 215         private final AArch64Assembler.ConditionFlag condition;
 216 
 217         public CondMoveOp(Variable result, AArch64Assembler.ConditionFlag condition, Value trueValue, Value falseValue) {
 218             super(TYPE);
 219             assert trueValue.getPlatformKind() == falseValue.getPlatformKind() && trueValue.getPlatformKind() == result.getPlatformKind();
 220             this.result = result;
 221             this.condition = condition;
 222             this.trueValue = trueValue;
 223             this.falseValue = falseValue;
 224         }
 225 
 226         @Override
 227         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 228             AArch64Kind kind = (AArch64Kind) trueValue.getPlatformKind();
 229             int size = kind.getSizeInBytes() * Byte.SIZE;
 230             if (kind.isInteger()) {
 231                 masm.cmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
 232             } else {
 233                 masm.fcmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
 234             }
 235         }
 236     }
 237 
 238     public static class CondSetOp extends AArch64LIRInstruction {
 239         public static final LIRInstructionClass<CondSetOp> TYPE = LIRInstructionClass.create(CondSetOp.class);
 240 
 241         @Def protected Value result;
 242         private final AArch64Assembler.ConditionFlag condition;
 243 
 244         public CondSetOp(Variable result, AArch64Assembler.ConditionFlag condition) {
 245             super(TYPE);
 246             this.result = result;
 247             this.condition = condition;
 248         }
 249 
 250         @Override
 251         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 252             int size = result.getPlatformKind().getSizeInBytes() * Byte.SIZE;
 253             masm.cset(size, asRegister(result), condition);
 254         }
 255     }
 256 
 257     public static class StrategySwitchOp extends AArch64BlockEndOp implements StandardOp.BlockEndOp {
 258         public static final LIRInstructionClass<StrategySwitchOp> TYPE = LIRInstructionClass.create(StrategySwitchOp.class);
 259 
 260         private final Constant[] keyConstants;
 261         protected final SwitchStrategy strategy;
 262         private final Function<Condition, ConditionFlag> converter;
 263         private final LabelRef[] keyTargets;
 264         private final LabelRef defaultTarget;
 265         @Alive protected Value key;
 266         // TODO (das) This could be optimized: We only need the scratch register in case of a
 267         // datapatch, or too large immediates.
 268         @Temp protected Value scratch;
 269 
 270         public StrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
 271                         Function<Condition, ConditionFlag> converter) {
 272             this(TYPE, strategy, keyTargets, defaultTarget, key, scratch, converter);
 273         }
 274 
 275         protected StrategySwitchOp(LIRInstructionClass<? extends StrategySwitchOp> c, SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
 276                         Function<Condition, ConditionFlag> converter) {
 277             super(c);
 278             this.strategy = strategy;
 279             this.converter = converter;
 280             this.keyConstants = strategy.getKeyConstants();
 281             this.keyTargets = keyTargets;
 282             this.defaultTarget = defaultTarget;
 283             this.key = key;
 284             this.scratch = scratch;
 285             assert keyConstants.length == keyTargets.length;
 286             assert keyConstants.length == strategy.keyProbabilities.length;
 287         }
 288 
 289         @Override
 290         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 291             strategy.run(new SwitchClosure(asRegister(key), crb, masm));
 292         }
 293 
 294         public class SwitchClosure extends BaseSwitchClosure {
 295 
 296             protected final Register keyRegister;
 297             protected final CompilationResultBuilder crb;
 298             protected final AArch64MacroAssembler masm;
 299 
 300             protected SwitchClosure(Register keyRegister, CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 301                 super(crb, masm, keyTargets, defaultTarget);
 302                 this.keyRegister = keyRegister;
 303                 this.crb = crb;
 304                 this.masm = masm;
 305             }
 306 
 307             protected void emitComparison(Constant c) {
 308                 JavaConstant jc = (JavaConstant) c;
 309                 ConstantValue constVal = new ConstantValue(LIRKind.value(key.getPlatformKind()), c);
 310                 switch (jc.getJavaKind()) {
 311                     case Int:
 312                         long lc = jc.asLong();
 313                         assert NumUtil.isInt(lc);
 314                         emitCompare(crb, masm, key, scratch, constVal);
 315                         break;
 316                     case Long:
 317                         emitCompare(crb, masm, key, scratch, constVal);
 318                         break;
 319                     case Object:
 320                         emitCompare(crb, masm, key, scratch, constVal);
 321                         break;
 322                     default:
 323                         throw new GraalError("switch only supported for int, long and object");
 324                 }
 325             }
 326 
 327             @Override
 328             protected void conditionalJump(int index, Condition condition, Label target) {
 329                 emitComparison(keyConstants[index]);
 330                 masm.branchConditionally(converter.apply(condition), target);
 331             }
 332         }
 333     }
 334 
 335     public static final class TableSwitchOp extends AArch64BlockEndOp {
 336         public static final LIRInstructionClass<TableSwitchOp> TYPE = LIRInstructionClass.create(TableSwitchOp.class);
 337         private final int lowKey;
 338         private final LabelRef defaultTarget;
 339         private final LabelRef[] targets;
 340         @Use protected Value index;
 341         @Temp({REG, HINT}) protected Value idxScratch;
 342         @Temp protected Value scratch;
 343 
 344         public TableSwitchOp(final int lowKey, final LabelRef defaultTarget, final LabelRef[] targets, Value index, Variable scratch, Variable idxScratch) {
 345             super(TYPE);
 346             this.lowKey = lowKey;
 347             this.defaultTarget = defaultTarget;
 348             this.targets = targets;
 349             this.index = index;
 350             this.scratch = scratch;
 351             this.idxScratch = idxScratch;
 352         }
 353 
 354         @Override
 355         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 356             Register indexReg = asRegister(index, AArch64Kind.DWORD);
 357             Register idxScratchReg = asRegister(idxScratch, AArch64Kind.DWORD);
 358             Register scratchReg = asRegister(scratch, AArch64Kind.QWORD);
 359 
 360             // Compare index against jump table bounds
 361             int highKey = lowKey + targets.length - 1;
 362             masm.sub(32, idxScratchReg, indexReg, lowKey);
 363             masm.cmp(32, idxScratchReg, highKey - lowKey);
 364 
 365             // Jump to default target if index is not within the jump table
 366             if (defaultTarget != null) {
 367                 masm.branchConditionally(ConditionFlag.HI, defaultTarget.label());
 368             }
 369 
 370             Label jumpTable = new Label();
 371             masm.adr(scratchReg, jumpTable);
 372             masm.add(64, scratchReg, scratchReg, idxScratchReg, ExtendType.UXTW, 2);
 373             masm.jmp(scratchReg);
 374             masm.bind(jumpTable);
 375             // emit jump table entries
 376             for (LabelRef target : targets) {
 377                 masm.jmp(target.label());
 378             }
 379             JumpTable jt = new JumpTable(jumpTable.position(), lowKey, highKey - 1, 4);
 380             crb.compilationResult.addAnnotation(jt);
 381         }
 382     }
 383 
 384     private static void emitCompare(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value key, Value scratchValue, ConstantValue c) {
 385         long imm = c.getJavaConstant().asLong();
 386         final int size = key.getPlatformKind().getSizeInBytes() * Byte.SIZE;
 387         if (AArch64MacroAssembler.isComparisonImmediate(imm)) {
 388             masm.cmp(size, asRegister(key), (int) imm);
 389         } else {
 390             AArch64Move.move(crb, masm, asAllocatableValue(scratchValue), c);
 391             masm.cmp(size, asRegister(key), asRegister(scratchValue));
 392         }
 393     }
 394 
 395 }