1 /*
   2  * Copyright (c) 2015, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.lir.alloc.trace.lsra;
  24 
  25 import static jdk.vm.ci.code.ValueUtil.isRegister;
  26 import static org.graalvm.compiler.core.common.GraalOptions.DetailedAsserts;
  27 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
  28 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
  29 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
  30 
  31 import java.util.ArrayList;
  32 
  33 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig;
  34 import org.graalvm.compiler.core.common.alloc.Trace;
  35 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult;
  36 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
  37 import org.graalvm.compiler.debug.DebugContext;
  38 import org.graalvm.compiler.debug.Indent;
  39 import org.graalvm.compiler.lir.LIRInsertionBuffer;
  40 import org.graalvm.compiler.lir.LIRInstruction;
  41 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
  42 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
  43 import org.graalvm.compiler.lir.StandardOp.MoveOp;
  44 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
  45 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState;
  46 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.IntervalPredicate;
  47 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan;
  48 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
  49 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory;
  50 
  51 import jdk.vm.ci.code.TargetDescription;
  52 import jdk.vm.ci.meta.AllocatableValue;
  53 
  54 final class TraceLinearScanEliminateSpillMovePhase extends TraceLinearScanAllocationPhase {
  55 
  56     private static final IntervalPredicate spilledIntervals = new TraceLinearScanPhase.IntervalPredicate() {
  57 
  58         @Override
  59         public boolean apply(TraceInterval i) {
  60             return i.isSplitParent() && SpillState.IN_MEMORY.contains(i.spillState());
  61         }
  62     };
  63 
  64     @Override
  65     protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, MoveFactory spillMoveFactory, RegisterAllocationConfig registerAllocationConfig,
  66                     TraceBuilderResult traceBuilderResult, TraceLinearScan allocator) {
  67         boolean shouldEliminateSpillMoves = shouldEliminateSpillMoves(traceBuilderResult, allocator);
  68         eliminateSpillMoves(allocator, shouldEliminateSpillMoves, traceBuilderResult, lirGenRes);
  69     }
  70 
  71     private static boolean shouldEliminateSpillMoves(TraceBuilderResult traceBuilderResult, TraceLinearScan allocator) {
  72         return !traceBuilderResult.incomingSideEdges(traceBuilderResult.getTraceForBlock(allocator.blockAt(0)));
  73     }
  74 
  75     // called once before assignment of register numbers
  76     @SuppressWarnings("try")
  77     private static void eliminateSpillMoves(TraceLinearScan allocator, boolean shouldEliminateSpillMoves, TraceBuilderResult traceBuilderResult, LIRGenerationResult res) {
  78         DebugContext debug = allocator.getDebug();
  79         try (Indent indent = debug.logAndIndent("Eliminating unnecessary spill moves: Trace%d", traceBuilderResult.getTraceForBlock(allocator.blockAt(0)).getId())) {
  80             allocator.sortIntervalsBySpillPos();
  81 
  82             /*
  83              * collect all intervals that must be stored after their definition. The list is sorted
  84              * by Interval.spillDefinitionPos.
  85              */
  86             TraceInterval interval = allocator.createUnhandledListBySpillPos(spilledIntervals);
  87             if (DetailedAsserts.getValue(allocator.getOptions())) {
  88                 checkIntervals(debug, interval);
  89             }
  90             if (debug.isLogEnabled()) {
  91                 try (Indent indent2 = debug.logAndIndent("Sorted intervals")) {
  92                     for (TraceInterval i = interval; i != null; i = i.next) {
  93                         debug.log("%5d: %s", i.spillDefinitionPos(), i);
  94                     }
  95                 }
  96             }
  97 
  98             LIRInsertionBuffer insertionBuffer = new LIRInsertionBuffer();
  99             for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 100                 try (Indent indent1 = debug.logAndIndent("Handle %s", block)) {
 101                     ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
 102                     int numInst = instructions.size();
 103 
 104                     int lastOpId = -1;
 105                     // iterate all instructions of the block.
 106                     for (int j = 0; j < numInst; j++) {
 107                         LIRInstruction op = instructions.get(j);
 108                         int opId = op.id();
 109                         try (Indent indent2 = debug.logAndIndent("%5d %s", opId, op)) {
 110 
 111                             if (opId == -1) {
 112                                 MoveOp move = MoveOp.asMoveOp(op);
 113                                 /*
 114                                  * Remove move from register to stack if the stack slot is
 115                                  * guaranteed to be correct. Only moves that have been inserted by
 116                                  * LinearScan can be removed.
 117                                  */
 118                                 if (shouldEliminateSpillMoves && canEliminateSpillMove(allocator, block, move, lastOpId)) {
 119                                     /*
 120                                      * Move target is a stack slot that is always correct, so
 121                                      * eliminate instruction.
 122                                      */
 123                                     if (debug.isLogEnabled()) {
 124                                         if (ValueMoveOp.isValueMoveOp(op)) {
 125                                             ValueMoveOp vmove = ValueMoveOp.asValueMoveOp(op);
 126                                             debug.log("eliminating move from interval %s to %s in block %s", vmove.getInput(), vmove.getResult(), block);
 127                                         } else {
 128                                             LoadConstantOp load = LoadConstantOp.asLoadConstantOp(op);
 129                                             debug.log("eliminating constant load from %s to %s in block %s", load.getConstant(), load.getResult(),
 130                                                             block);
 131                                         }
 132                                     }
 133 
 134                                     // null-instructions are deleted by assignRegNum
 135                                     instructions.set(j, null);
 136                                 }
 137 
 138                             } else {
 139                                 lastOpId = opId;
 140                                 /*
 141                                  * Insert move from register to stack just after the beginning of
 142                                  * the interval.
 143                                  */
 144                                 // assert interval == TraceInterval.EndMarker ||
 145                                 // interval.spillDefinitionPos() >= opId : "invalid order";
 146                                 assert interval == TraceInterval.EndMarker || (interval.isSplitParent() && SpillState.IN_MEMORY.contains(interval.spillState())) : "invalid interval";
 147 
 148                                 while (interval != TraceInterval.EndMarker && interval.spillDefinitionPos() == opId) {
 149                                     debug.log("handle %s", interval);
 150                                     if (!interval.canMaterialize() && interval.spillState() != SpillState.StartInMemory) {
 151 
 152                                         AllocatableValue fromLocation = interval.getSplitChildAtOpId(opId, OperandMode.DEF).location();
 153                                         AllocatableValue toLocation = allocator.canonicalSpillOpr(interval);
 154                                         if (!fromLocation.equals(toLocation)) {
 155 
 156                                             if (!insertionBuffer.initialized()) {
 157                                                 /*
 158                                                  * prepare insertion buffer (appended when all
 159                                                  * instructions in the block are processed)
 160                                                  */
 161                                                 insertionBuffer.init(instructions);
 162                                             }
 163 
 164                                             assert isRegister(fromLocation) : "from operand must be a register but is: " + fromLocation + " toLocation=" + toLocation + " spillState=" +
 165                                                             interval.spillState();
 166                                             assert isStackSlotValue(toLocation) : "to operand must be a stack slot";
 167 
 168                                             LIRInstruction move = allocator.getSpillMoveFactory().createMove(toLocation, fromLocation);
 169                                             insertionBuffer.append(j + 1, move);
 170                                             move.setComment(res, "TraceLSRAEliminateSpillMove: spill def pos");
 171 
 172                                             if (debug.isLogEnabled()) {
 173                                                 debug.log("inserting move after definition of interval %d to stack slot %s at opId %d", interval.operandNumber, interval.spillSlot(), opId);
 174                                             }
 175                                         }
 176                                     }
 177                                     interval = interval.next;
 178                                 }
 179                             }
 180                         }
 181                     }   // end of instruction iteration
 182 
 183                     if (insertionBuffer.initialized()) {
 184                         insertionBuffer.finish();
 185                     }
 186                 }
 187             }   // end of block iteration
 188 
 189             assert interval == TraceInterval.EndMarker : "missed an interval";
 190         }
 191     }
 192 
 193     /**
 194      * @param allocator
 195      * @param block The block {@code move} is located in.
 196      * @param move Spill move.
 197      * @param lastOpId The id of last "normal" instruction before the spill move. (Spill moves have
 198      *            no valid opId but -1.)
 199      */
 200     private static boolean canEliminateSpillMove(TraceLinearScan allocator, AbstractBlockBase<?> block, MoveOp move, int lastOpId) {
 201         assert ((LIRInstruction) move).id() == -1 : "Not a spill move: " + move;
 202         assert isVariable(move.getResult()) : "LinearScan inserts only moves to variables: " + move;
 203         assert lastOpId >= 0 : "Invalid lastOpId: " + lastOpId;
 204 
 205         TraceInterval curInterval = allocator.intervalFor(asVariable(move.getResult()));
 206 
 207         if (!isRegister(curInterval.location()) && curInterval.inMemoryAt(lastOpId) && !isPhiResolutionMove(allocator, move)) {
 208             /* Phi resolution moves cannot be removed because they define the value. */
 209             // TODO (je) check if the comment is still valid!
 210             assert isStackSlotValue(curInterval.location()) : "Not a stack slot: " + curInterval.location();
 211             return true;
 212         }
 213         return false;
 214     }
 215 
 216     /**
 217      * Checks if a (spill or split) move is a Phi resolution move.
 218      *
 219      * A spill or split move connects a split parent or a split child with another split child.
 220      * Therefore the destination of the move is always a split child. Phi resolution moves look like
 221      * spill moves (i.e. {@link LIRInstruction#id() id} is {@code 0}, but they define a new
 222      * variable. As a result the destination interval is a split parent.
 223      */
 224     private static boolean isPhiResolutionMove(TraceLinearScan allocator, MoveOp move) {
 225         assert ((LIRInstruction) move).id() == -1 : "Not a spill move: " + move;
 226         TraceInterval curInterval = allocator.intervalFor(asVariable(move.getResult()));
 227         return curInterval.isSplitParent();
 228     }
 229 
 230     private static void checkIntervals(DebugContext debug, TraceInterval interval) {
 231         TraceInterval prev = null;
 232         TraceInterval temp = interval;
 233         while (temp != TraceInterval.EndMarker) {
 234             assert temp.spillDefinitionPos() >= 0 : "invalid spill definition pos " + temp;
 235             if (prev != null) {
 236                 // assert temp.from() >= prev.from() : "intervals not sorted";
 237                 assert temp.spillDefinitionPos() >= prev.spillDefinitionPos() : "when intervals are sorted by from :  then they must also be sorted by spillDefinitionPos";
 238             }
 239 
 240             assert temp.spillSlot() != null || temp.canMaterialize() : "interval has no spill slot assigned";
 241             assert temp.spillDefinitionPos() >= temp.from() : "invalid order";
 242             // assert temp.spillDefinitionPos() <= temp.from() + 2 :
 243             // "only intervals defined once at their start-pos can be optimized";
 244 
 245             if (debug.isLogEnabled()) {
 246                 debug.log("interval %d (from %d to %d) must be stored at %d", temp.operandNumber, temp.from(), temp.to(), temp.spillDefinitionPos());
 247             }
 248 
 249             prev = temp;
 250             temp = temp.next;
 251         }
 252     }
 253 
 254 }