1 /*
   2  * Copyright (c) 2013, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 
  25 package org.graalvm.compiler.lir.aarch64;
  26 
  27 import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
  28 import static jdk.vm.ci.code.ValueUtil.asRegister;
  29 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.HINT;
  30 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.REG;
  31 
  32 import java.util.function.Function;
  33 
  34 import org.graalvm.compiler.asm.Label;
  35 import org.graalvm.compiler.core.common.NumUtil;
  36 import org.graalvm.compiler.asm.aarch64.AArch64Assembler;
  37 import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ConditionFlag;
  38 import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ExtendType;
  39 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
  40 import org.graalvm.compiler.code.CompilationResult.JumpTable;
  41 import org.graalvm.compiler.core.common.LIRKind;
  42 import org.graalvm.compiler.core.common.calc.Condition;
  43 import org.graalvm.compiler.debug.GraalError;
  44 import org.graalvm.compiler.lir.ConstantValue;
  45 import org.graalvm.compiler.lir.LIRInstructionClass;
  46 import org.graalvm.compiler.lir.LabelRef;
  47 import org.graalvm.compiler.lir.Opcode;
  48 import org.graalvm.compiler.lir.StandardOp;
  49 import org.graalvm.compiler.lir.SwitchStrategy;
  50 import org.graalvm.compiler.lir.SwitchStrategy.BaseSwitchClosure;
  51 import org.graalvm.compiler.lir.Variable;
  52 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  53 
  54 import jdk.vm.ci.aarch64.AArch64Kind;
  55 import jdk.vm.ci.code.Register;
  56 import jdk.vm.ci.meta.Constant;
  57 import jdk.vm.ci.meta.JavaConstant;
  58 import jdk.vm.ci.meta.Value;
  59 
  60 public class AArch64ControlFlow {
  61 
  62     /**
  63      * Compares integer register to 0 and branches if condition is true. Condition may only be equal
  64      * or non-equal.
  65      */
  66     // TODO (das) where do we need this?
  67     // public static class CompareAndBranchOp extends AArch64LIRInstruction implements
  68     // StandardOp.BranchOp {
  69     // private final ConditionFlag condition;
  70     // private final LabelRef destination;
  71     // @Use({REG}) private Value x;
  72     //
  73     // public CompareAndBranchOp(Condition condition, LabelRef destination, Value x) {
  74     // assert condition == Condition.EQ || condition == Condition.NE;
  75     // assert ARMv8.isGpKind(x.getKind());
  76     // this.condition = condition == Condition.EQ ? ConditionFlag.EQ : ConditionFlag.NE;
  77     // this.destination = destination;
  78     // this.x = x;
  79     // }
  80     //
  81     // @Override
  82     // public void emitCode(CompilationResultBuilder crb, ARMv8MacroAssembler masm) {
  83     // int size = ARMv8.bitsize(x.getKind());
  84     // if (condition == ConditionFlag.EQ) {
  85     // masm.cbz(size, asRegister(x), destination.label());
  86     // } else {
  87     // masm.cbnz(size, asRegister(x), destination.label());
  88     // }
  89     // }
  90     // }
  91 
  92     public static class BranchOp extends AArch64BlockEndOp implements StandardOp.BranchOp {
  93         public static final LIRInstructionClass<BranchOp> TYPE = LIRInstructionClass.create(BranchOp.class);
  94 
  95         private final AArch64Assembler.ConditionFlag condition;
  96         private final LabelRef trueDestination;
  97         private final LabelRef falseDestination;
  98 
  99         private final double trueDestinationProbability;
 100 
 101         public BranchOp(AArch64Assembler.ConditionFlag condition, LabelRef trueDestination, LabelRef falseDestination, double trueDestinationProbability) {
 102             super(TYPE);
 103             this.condition = condition;
 104             this.trueDestination = trueDestination;
 105             this.falseDestination = falseDestination;
 106             this.trueDestinationProbability = trueDestinationProbability;
 107         }
 108 
 109         @Override
 110         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 111             /*
 112              * Explanation: Depending on what the successor edge is, we can use the fall-through to
 113              * optimize the generated code. If neither is a successor edge, use the branch
 114              * probability to try to take the conditional jump as often as possible to avoid
 115              * executing two instructions instead of one.
 116              */
 117             if (crb.isSuccessorEdge(trueDestination)) {
 118                 masm.branchConditionally(condition.negate(), falseDestination.label());
 119             } else if (crb.isSuccessorEdge(falseDestination)) {
 120                 masm.branchConditionally(condition, trueDestination.label());
 121             } else if (trueDestinationProbability < 0.5) {
 122                 masm.branchConditionally(condition.negate(), falseDestination.label());
 123                 masm.jmp(trueDestination.label());
 124             } else {
 125                 masm.branchConditionally(condition, trueDestination.label());
 126                 masm.jmp(falseDestination.label());
 127             }
 128         }
 129 
 130     }
 131 
 132     @Opcode("CMOVE")
 133     public static class CondMoveOp extends AArch64LIRInstruction {
 134         public static final LIRInstructionClass<CondMoveOp> TYPE = LIRInstructionClass.create(CondMoveOp.class);
 135 
 136         @Def protected Value result;
 137         @Use protected Value trueValue;
 138         @Use protected Value falseValue;
 139         private final AArch64Assembler.ConditionFlag condition;
 140 
 141         public CondMoveOp(Variable result, AArch64Assembler.ConditionFlag condition, Value trueValue, Value falseValue) {
 142             super(TYPE);
 143             assert trueValue.getPlatformKind() == falseValue.getPlatformKind() && trueValue.getPlatformKind() == result.getPlatformKind();
 144             this.result = result;
 145             this.condition = condition;
 146             this.trueValue = trueValue;
 147             this.falseValue = falseValue;
 148         }
 149 
 150         @Override
 151         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 152             AArch64Kind kind = (AArch64Kind) trueValue.getPlatformKind();
 153             int size = kind.getSizeInBytes() * Byte.SIZE;
 154             if (kind.isInteger()) {
 155                 masm.cmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
 156             } else {
 157                 masm.fcmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
 158             }
 159         }
 160     }
 161 
 162     public static class CondSetOp extends AArch64LIRInstruction {
 163         public static final LIRInstructionClass<CondSetOp> TYPE = LIRInstructionClass.create(CondSetOp.class);
 164 
 165         @Def protected Value result;
 166         private final AArch64Assembler.ConditionFlag condition;
 167 
 168         public CondSetOp(Variable result, AArch64Assembler.ConditionFlag condition) {
 169             super(TYPE);
 170             this.result = result;
 171             this.condition = condition;
 172         }
 173 
 174         @Override
 175         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 176             int size = result.getPlatformKind().getSizeInBytes() * Byte.SIZE;
 177             masm.cset(size, asRegister(result), condition);
 178         }
 179     }
 180 
 181     public static class StrategySwitchOp extends AArch64BlockEndOp implements StandardOp.BlockEndOp {
 182         public static final LIRInstructionClass<StrategySwitchOp> TYPE = LIRInstructionClass.create(StrategySwitchOp.class);
 183 
 184         private final Constant[] keyConstants;
 185         protected final SwitchStrategy strategy;
 186         private final Function<Condition, ConditionFlag> converter;
 187         private final LabelRef[] keyTargets;
 188         private final LabelRef defaultTarget;
 189         @Alive protected Value key;
 190         // TODO (das) This could be optimized: We only need the scratch register in case of a
 191         // datapatch, or too large immediates.
 192         @Temp protected Value scratch;
 193 
 194         public StrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
 195                         Function<Condition, ConditionFlag> converter) {
 196             this(TYPE, strategy, keyTargets, defaultTarget, key, scratch, converter);
 197         }
 198 
 199         protected StrategySwitchOp(LIRInstructionClass<? extends StrategySwitchOp> c, SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
 200                         Function<Condition, ConditionFlag> converter) {
 201             super(c);
 202             this.strategy = strategy;
 203             this.converter = converter;
 204             this.keyConstants = strategy.getKeyConstants();
 205             this.keyTargets = keyTargets;
 206             this.defaultTarget = defaultTarget;
 207             this.key = key;
 208             this.scratch = scratch;
 209             assert keyConstants.length == keyTargets.length;
 210             assert keyConstants.length == strategy.keyProbabilities.length;
 211         }
 212 
 213         @Override
 214         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 215             strategy.run(new SwitchClosure(asRegister(key), crb, masm));
 216         }
 217 
 218         public class SwitchClosure extends BaseSwitchClosure {
 219 
 220             protected final Register keyRegister;
 221             protected final CompilationResultBuilder crb;
 222             protected final AArch64MacroAssembler masm;
 223 
 224             protected SwitchClosure(Register keyRegister, CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 225                 super(crb, masm, keyTargets, defaultTarget);
 226                 this.keyRegister = keyRegister;
 227                 this.crb = crb;
 228                 this.masm = masm;
 229             }
 230 
 231             protected void emitComparison(Constant c) {
 232                 JavaConstant jc = (JavaConstant) c;
 233                 ConstantValue constVal = new ConstantValue(LIRKind.value(key.getPlatformKind()), c);
 234                 switch (jc.getJavaKind()) {
 235                     case Int:
 236                         long lc = jc.asLong();
 237                         assert NumUtil.isInt(lc);
 238                         emitCompare(crb, masm, key, scratch, constVal);
 239                         break;
 240                     case Long:
 241                         emitCompare(crb, masm, key, scratch, constVal);
 242                         break;
 243                     case Object:
 244                         emitCompare(crb, masm, key, scratch, constVal);
 245                         break;
 246                     default:
 247                         throw new GraalError("switch only supported for int, long and object");
 248                 }
 249             }
 250 
 251             @Override
 252             protected void conditionalJump(int index, Condition condition, Label target) {
 253                 emitComparison(keyConstants[index]);
 254                 masm.branchConditionally(converter.apply(condition), target);
 255             }
 256         }
 257     }
 258 
 259     public static final class TableSwitchOp extends AArch64BlockEndOp {
 260         public static final LIRInstructionClass<TableSwitchOp> TYPE = LIRInstructionClass.create(TableSwitchOp.class);
 261         private final int lowKey;
 262         private final LabelRef defaultTarget;
 263         private final LabelRef[] targets;
 264         @Use protected Value index;
 265         @Temp({REG, HINT}) protected Value idxScratch;
 266         @Temp protected Value scratch;
 267 
 268         public TableSwitchOp(final int lowKey, final LabelRef defaultTarget, final LabelRef[] targets, Value index, Variable scratch, Variable idxScratch) {
 269             super(TYPE);
 270             this.lowKey = lowKey;
 271             this.defaultTarget = defaultTarget;
 272             this.targets = targets;
 273             this.index = index;
 274             this.scratch = scratch;
 275             this.idxScratch = idxScratch;
 276         }
 277 
 278         @Override
 279         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
 280             Register indexReg = asRegister(index, AArch64Kind.DWORD);
 281             Register idxScratchReg = asRegister(idxScratch, AArch64Kind.DWORD);
 282             Register scratchReg = asRegister(scratch, AArch64Kind.QWORD);
 283 
 284             // Compare index against jump table bounds
 285             int highKey = lowKey + targets.length - 1;
 286             masm.sub(32, idxScratchReg, indexReg, lowKey);
 287             masm.cmp(32, idxScratchReg, highKey - lowKey);
 288 
 289             // Jump to default target if index is not within the jump table
 290             if (defaultTarget != null) {
 291                 masm.branchConditionally(ConditionFlag.HI, defaultTarget.label());
 292             }
 293 
 294             Label jumpTable = new Label();
 295             masm.adr(scratchReg, jumpTable);
 296             masm.add(64, scratchReg, scratchReg, idxScratchReg, ExtendType.UXTW, 2);
 297             masm.jmp(scratchReg);
 298             masm.bind(jumpTable);
 299             // emit jump table entries
 300             for (LabelRef target : targets) {
 301                 masm.jmp(target.label());
 302             }
 303             JumpTable jt = new JumpTable(jumpTable.position(), lowKey, highKey - 1, 4);
 304             crb.compilationResult.addAnnotation(jt);
 305         }
 306     }
 307 
 308     private static void emitCompare(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value key, Value scratchValue, ConstantValue c) {
 309         long imm = c.getJavaConstant().asLong();
 310         final int size = key.getPlatformKind().getSizeInBytes() * Byte.SIZE;
 311         if (AArch64MacroAssembler.isComparisonImmediate(imm)) {
 312             masm.cmp(size, asRegister(key), (int) imm);
 313         } else {
 314             AArch64Move.move(crb, masm, asAllocatableValue(scratchValue), c);
 315             masm.cmp(size, asRegister(key), asRegister(scratchValue));
 316         }
 317     }
 318 
 319 }