1 /*
   2  * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.lir.alloc.lsra;
  24 
  25 import static org.graalvm.compiler.core.common.GraalOptions.DetailedAsserts;
  26 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
  27 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
  28 import static org.graalvm.compiler.lir.debug.LIRGenerationDebugContext.getSourceForOperandFromDebugContext;
  29 import static jdk.vm.ci.code.ValueUtil.asRegister;
  30 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
  31 import static jdk.vm.ci.code.ValueUtil.isRegister;
  32 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
  33 
  34 import java.util.ArrayDeque;
  35 import java.util.BitSet;
  36 import java.util.Deque;
  37 import java.util.EnumSet;
  38 import java.util.HashSet;
  39 import java.util.List;
  40 
  41 import org.graalvm.compiler.common.PermanentBailoutException;
  42 import org.graalvm.compiler.core.common.LIRKind;
  43 import org.graalvm.compiler.core.common.alloc.ComputeBlockOrder;
  44 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
  45 import org.graalvm.compiler.core.common.util.BitMap2D;
  46 import org.graalvm.compiler.debug.Debug;
  47 import org.graalvm.compiler.debug.Debug.Scope;
  48 import org.graalvm.compiler.debug.GraalError;
  49 import org.graalvm.compiler.debug.Indent;
  50 import org.graalvm.compiler.lir.InstructionValueConsumer;
  51 import org.graalvm.compiler.lir.LIRInstruction;
  52 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag;
  53 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
  54 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
  55 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
  56 import org.graalvm.compiler.lir.ValueConsumer;
  57 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterPriority;
  58 import org.graalvm.compiler.lir.alloc.lsra.Interval.SpillState;
  59 import org.graalvm.compiler.lir.alloc.lsra.LinearScan.BlockData;
  60 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
  61 import org.graalvm.compiler.lir.phases.AllocationPhase;
  62 
  63 import jdk.vm.ci.code.Register;
  64 import jdk.vm.ci.code.RegisterArray;
  65 import jdk.vm.ci.code.StackSlot;
  66 import jdk.vm.ci.code.TargetDescription;
  67 import jdk.vm.ci.meta.AllocatableValue;
  68 import jdk.vm.ci.meta.Constant;
  69 import jdk.vm.ci.meta.JavaConstant;
  70 import jdk.vm.ci.meta.Value;
  71 import jdk.vm.ci.meta.ValueKind;
  72 
  73 public class LinearScanLifetimeAnalysisPhase extends AllocationPhase {
  74 
  75     protected final LinearScan allocator;
  76 
  77     /**
  78      * @param linearScan
  79      */
  80     protected LinearScanLifetimeAnalysisPhase(LinearScan linearScan) {
  81         allocator = linearScan;
  82     }
  83 
  84     @Override
  85     protected void run(TargetDescription target, LIRGenerationResult lirGenRes, AllocationContext context) {
  86         numberInstructions();
  87         allocator.printLir("Before register allocation", true);
  88         computeLocalLiveSets();
  89         computeGlobalLiveSets();
  90         buildIntervals();
  91     }
  92 
  93     /**
  94      * Bit set for each variable that is contained in each loop.
  95      */
  96     private BitMap2D intervalInLoop;
  97 
  98     boolean isIntervalInLoop(int interval, int loop) {
  99         return intervalInLoop.at(interval, loop);
 100     }
 101 
 102     /**
 103      * Numbers all instructions in all blocks. The numbering follows the
 104      * {@linkplain ComputeBlockOrder linear scan order}.
 105      */
 106     protected void numberInstructions() {
 107 
 108         allocator.initIntervals();
 109 
 110         ValueConsumer setVariableConsumer = (value, mode, flags) -> {
 111             if (isVariable(value)) {
 112                 allocator.getOrCreateInterval(asVariable(value));
 113             }
 114         };
 115 
 116         // Assign IDs to LIR nodes and build a mapping, lirOps, from ID to LIRInstruction node.
 117         int numInstructions = 0;
 118         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 119             numInstructions += allocator.getLIR().getLIRforBlock(block).size();
 120         }
 121 
 122         // initialize with correct length
 123         allocator.initOpIdMaps(numInstructions);
 124 
 125         int opId = 0;
 126         int index = 0;
 127         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 128             allocator.initBlockData(block);
 129 
 130             List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
 131 
 132             int numInst = instructions.size();
 133             for (int j = 0; j < numInst; j++) {
 134                 LIRInstruction op = instructions.get(j);
 135                 op.setId(opId);
 136 
 137                 allocator.putOpIdMaps(index, op, block);
 138                 assert allocator.instructionForId(opId) == op : "must match";
 139 
 140                 op.visitEachTemp(setVariableConsumer);
 141                 op.visitEachOutput(setVariableConsumer);
 142 
 143                 index++;
 144                 opId += 2; // numbering of lirOps by two
 145             }
 146         }
 147         assert index == numInstructions : "must match";
 148         assert (index << 1) == opId : "must match: " + (index << 1);
 149     }
 150 
 151     /**
 152      * Computes local live sets (i.e. {@link BlockData#liveGen} and {@link BlockData#liveKill})
 153      * separately for each block.
 154      */
 155     @SuppressWarnings("try")
 156     void computeLocalLiveSets() {
 157         int liveSize = allocator.liveSetSize();
 158 
 159         intervalInLoop = new BitMap2D(allocator.operandSize(), allocator.numLoops());
 160 
 161         // iterate all blocks
 162         for (final AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 163             try (Indent indent = Debug.logAndIndent("compute local live sets for block %s", block)) {
 164 
 165                 final BitSet liveGen = new BitSet(liveSize);
 166                 final BitSet liveKill = new BitSet(liveSize);
 167 
 168                 List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
 169                 int numInst = instructions.size();
 170 
 171                 ValueConsumer useConsumer = (operand, mode, flags) -> {
 172                     if (isVariable(operand)) {
 173                         int operandNum = allocator.operandNumber(operand);
 174                         if (!liveKill.get(operandNum)) {
 175                             liveGen.set(operandNum);
 176                             if (Debug.isLogEnabled()) {
 177                                 Debug.log("liveGen for operand %d(%s)", operandNum, operand);
 178                             }
 179                         }
 180                         if (block.getLoop() != null) {
 181                             intervalInLoop.setBit(operandNum, block.getLoop().getIndex());
 182                         }
 183                     }
 184 
 185                     if (DetailedAsserts.getValue()) {
 186                         verifyInput(block, liveKill, operand);
 187                     }
 188                 };
 189                 ValueConsumer stateConsumer = (operand, mode, flags) -> {
 190                     if (LinearScan.isVariableOrRegister(operand)) {
 191                         int operandNum = allocator.operandNumber(operand);
 192                         if (!liveKill.get(operandNum)) {
 193                             liveGen.set(operandNum);
 194                             if (Debug.isLogEnabled()) {
 195                                 Debug.log("liveGen in state for operand %d(%s)", operandNum, operand);
 196                             }
 197                         }
 198                     }
 199                 };
 200                 ValueConsumer defConsumer = (operand, mode, flags) -> {
 201                     if (isVariable(operand)) {
 202                         int varNum = allocator.operandNumber(operand);
 203                         liveKill.set(varNum);
 204                         if (Debug.isLogEnabled()) {
 205                             Debug.log("liveKill for operand %d(%s)", varNum, operand);
 206                         }
 207                         if (block.getLoop() != null) {
 208                             intervalInLoop.setBit(varNum, block.getLoop().getIndex());
 209                         }
 210                     }
 211 
 212                     if (DetailedAsserts.getValue()) {
 213                         /*
 214                          * Fixed intervals are never live at block boundaries, so they need not be
 215                          * processed in live sets. Process them only in debug mode so that this can
 216                          * be checked
 217                          */
 218                         verifyTemp(liveKill, operand);
 219                     }
 220                 };
 221 
 222                 // iterate all instructions of the block
 223                 for (int j = 0; j < numInst; j++) {
 224                     final LIRInstruction op = instructions.get(j);
 225 
 226                     try (Indent indent2 = Debug.logAndIndent("handle op %d: %s", op.id(), op)) {
 227                         op.visitEachInput(useConsumer);
 228                         op.visitEachAlive(useConsumer);
 229                         /*
 230                          * Add uses of live locals from interpreter's point of view for proper debug
 231                          * information generation.
 232                          */
 233                         op.visitEachState(stateConsumer);
 234                         op.visitEachTemp(defConsumer);
 235                         op.visitEachOutput(defConsumer);
 236                     }
 237                 } // end of instruction iteration
 238 
 239                 BlockData blockSets = allocator.getBlockData(block);
 240                 blockSets.liveGen = liveGen;
 241                 blockSets.liveKill = liveKill;
 242                 blockSets.liveIn = new BitSet(liveSize);
 243                 blockSets.liveOut = new BitSet(liveSize);
 244 
 245                 if (Debug.isLogEnabled()) {
 246                     Debug.log("liveGen  B%d %s", block.getId(), blockSets.liveGen);
 247                     Debug.log("liveKill B%d %s", block.getId(), blockSets.liveKill);
 248                 }
 249 
 250             }
 251         } // end of block iteration
 252     }
 253 
 254     private void verifyTemp(BitSet liveKill, Value operand) {
 255         /*
 256          * Fixed intervals are never live at block boundaries, so they need not be processed in live
 257          * sets. Process them only in debug mode so that this can be checked
 258          */
 259         if (isRegister(operand)) {
 260             if (allocator.isProcessed(operand)) {
 261                 liveKill.set(allocator.operandNumber(operand));
 262             }
 263         }
 264     }
 265 
 266     private void verifyInput(AbstractBlockBase<?> block, BitSet liveKill, Value operand) {
 267         /*
 268          * Fixed intervals are never live at block boundaries, so they need not be processed in live
 269          * sets. This is checked by these assertions to be sure about it. The entry block may have
 270          * incoming values in registers, which is ok.
 271          */
 272         if (isRegister(operand) && block != allocator.getLIR().getControlFlowGraph().getStartBlock()) {
 273             if (allocator.isProcessed(operand)) {
 274                 assert liveKill.get(allocator.operandNumber(operand)) : "using fixed register " + asRegister(operand) + " that is not defined in this block " + block;
 275             }
 276         }
 277     }
 278 
 279     /**
 280      * Performs a backward dataflow analysis to compute global live sets (i.e.
 281      * {@link BlockData#liveIn} and {@link BlockData#liveOut}) for each block.
 282      */
 283     @SuppressWarnings("try")
 284     protected void computeGlobalLiveSets() {
 285         try (Indent indent = Debug.logAndIndent("compute global live sets")) {
 286             int numBlocks = allocator.blockCount();
 287             boolean changeOccurred;
 288             boolean changeOccurredInBlock;
 289             int iterationCount = 0;
 290             BitSet liveOut = new BitSet(allocator.liveSetSize()); // scratch set for calculations
 291 
 292             /*
 293              * Perform a backward dataflow analysis to compute liveOut and liveIn for each block.
 294              * The loop is executed until a fixpoint is reached (no changes in an iteration).
 295              */
 296             do {
 297                 changeOccurred = false;
 298 
 299                 try (Indent indent2 = Debug.logAndIndent("new iteration %d", iterationCount)) {
 300 
 301                     // iterate all blocks in reverse order
 302                     for (int i = numBlocks - 1; i >= 0; i--) {
 303                         AbstractBlockBase<?> block = allocator.blockAt(i);
 304                         BlockData blockSets = allocator.getBlockData(block);
 305 
 306                         changeOccurredInBlock = false;
 307 
 308                         /*
 309                          * liveOut(block) is the union of liveIn(sux), for successors sux of block.
 310                          */
 311                         int n = block.getSuccessorCount();
 312                         if (n > 0) {
 313                             liveOut.clear();
 314                             // block has successors
 315                             if (n > 0) {
 316                                 for (AbstractBlockBase<?> successor : block.getSuccessors()) {
 317                                     liveOut.or(allocator.getBlockData(successor).liveIn);
 318                                 }
 319                             }
 320 
 321                             if (!blockSets.liveOut.equals(liveOut)) {
 322                                 /*
 323                                  * A change occurred. Swap the old and new live out sets to avoid
 324                                  * copying.
 325                                  */
 326                                 BitSet temp = blockSets.liveOut;
 327                                 blockSets.liveOut = liveOut;
 328                                 liveOut = temp;
 329 
 330                                 changeOccurred = true;
 331                                 changeOccurredInBlock = true;
 332                             }
 333                         }
 334 
 335                         if (iterationCount == 0 || changeOccurredInBlock) {
 336                             /*
 337                              * liveIn(block) is the union of liveGen(block) with (liveOut(block) &
 338                              * !liveKill(block)).
 339                              *
 340                              * Note: liveIn has to be computed only in first iteration or if liveOut
 341                              * has changed!
 342                              */
 343                             BitSet liveIn = blockSets.liveIn;
 344                             liveIn.clear();
 345                             liveIn.or(blockSets.liveOut);
 346                             liveIn.andNot(blockSets.liveKill);
 347                             liveIn.or(blockSets.liveGen);
 348 
 349                             if (Debug.isLogEnabled()) {
 350                                 Debug.log("block %d: livein = %s,  liveout = %s", block.getId(), liveIn, blockSets.liveOut);
 351                             }
 352                         }
 353                     }
 354                     iterationCount++;
 355 
 356                     if (changeOccurred && iterationCount > 50) {
 357                         /*
 358                          * Very unlikely should never happen: If it happens we cannot guarantee it
 359                          * won't happen again.
 360                          */
 361                         throw new PermanentBailoutException("too many iterations in computeGlobalLiveSets");
 362                     }
 363                 }
 364             } while (changeOccurred);
 365 
 366             if (DetailedAsserts.getValue()) {
 367                 verifyLiveness();
 368             }
 369 
 370             // check that the liveIn set of the first block is empty
 371             AbstractBlockBase<?> startBlock = allocator.getLIR().getControlFlowGraph().getStartBlock();
 372             if (allocator.getBlockData(startBlock).liveIn.cardinality() != 0) {
 373                 if (DetailedAsserts.getValue()) {
 374                     reportFailure(numBlocks);
 375                 }
 376                 // bailout if this occurs in product mode.
 377                 throw new GraalError("liveIn set of first block must be empty: " + allocator.getBlockData(startBlock).liveIn);
 378             }
 379         }
 380     }
 381 
 382     @SuppressWarnings("try")
 383     protected void reportFailure(int numBlocks) {
 384         try (Scope s = Debug.forceLog()) {
 385             try (Indent indent = Debug.logAndIndent("report failure")) {
 386 
 387                 BitSet startBlockLiveIn = allocator.getBlockData(allocator.getLIR().getControlFlowGraph().getStartBlock()).liveIn;
 388                 try (Indent indent2 = Debug.logAndIndent("Error: liveIn set of first block must be empty (when this fails, variables are used before they are defined):")) {
 389                     for (int operandNum = startBlockLiveIn.nextSetBit(0); operandNum >= 0; operandNum = startBlockLiveIn.nextSetBit(operandNum + 1)) {
 390                         Interval interval = allocator.intervalFor(operandNum);
 391                         if (interval != null) {
 392                             Value operand = interval.operand;
 393                             Debug.log("var %d; operand=%s; node=%s", operandNum, operand, getSourceForOperandFromDebugContext(operand));
 394                         } else {
 395                             Debug.log("var %d; missing operand", operandNum);
 396                         }
 397                     }
 398                 }
 399 
 400                 // print some additional information to simplify debugging
 401                 for (int operandNum = startBlockLiveIn.nextSetBit(0); operandNum >= 0; operandNum = startBlockLiveIn.nextSetBit(operandNum + 1)) {
 402                     Interval interval = allocator.intervalFor(operandNum);
 403                     Value operand = null;
 404                     Object valueForOperandFromDebugContext = null;
 405                     if (interval != null) {
 406                         operand = interval.operand;
 407                         valueForOperandFromDebugContext = getSourceForOperandFromDebugContext(operand);
 408                     }
 409                     try (Indent indent2 = Debug.logAndIndent("---- Detailed information for var %d; operand=%s; node=%s ----", operandNum, operand, valueForOperandFromDebugContext)) {
 410 
 411                         Deque<AbstractBlockBase<?>> definedIn = new ArrayDeque<>();
 412                         HashSet<AbstractBlockBase<?>> usedIn = new HashSet<>();
 413                         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 414                             if (allocator.getBlockData(block).liveGen.get(operandNum)) {
 415                                 usedIn.add(block);
 416                                 try (Indent indent3 = Debug.logAndIndent("used in block B%d", block.getId())) {
 417                                     for (LIRInstruction ins : allocator.getLIR().getLIRforBlock(block)) {
 418                                         try (Indent indent4 = Debug.logAndIndent("%d: %s", ins.id(), ins)) {
 419                                             ins.forEachState((liveStateOperand, mode, flags) -> {
 420                                                 Debug.log("operand=%s", liveStateOperand);
 421                                                 return liveStateOperand;
 422                                             });
 423                                         }
 424                                     }
 425                                 }
 426                             }
 427                             if (allocator.getBlockData(block).liveKill.get(operandNum)) {
 428                                 definedIn.add(block);
 429                                 try (Indent indent3 = Debug.logAndIndent("defined in block B%d", block.getId())) {
 430                                     for (LIRInstruction ins : allocator.getLIR().getLIRforBlock(block)) {
 431                                         Debug.log("%d: %s", ins.id(), ins);
 432                                     }
 433                                 }
 434                             }
 435                         }
 436 
 437                         int[] hitCount = new int[numBlocks];
 438 
 439                         while (!definedIn.isEmpty()) {
 440                             AbstractBlockBase<?> block = definedIn.removeFirst();
 441                             usedIn.remove(block);
 442                             for (AbstractBlockBase<?> successor : block.getSuccessors()) {
 443                                 if (successor.isLoopHeader()) {
 444                                     if (!block.isLoopEnd()) {
 445                                         definedIn.add(successor);
 446                                     }
 447                                 } else {
 448                                     if (++hitCount[successor.getId()] == successor.getPredecessorCount()) {
 449                                         definedIn.add(successor);
 450                                     }
 451                                 }
 452                             }
 453                         }
 454                         try (Indent indent3 = Debug.logAndIndent("**** offending usages are in: ")) {
 455                             for (AbstractBlockBase<?> block : usedIn) {
 456                                 Debug.log("B%d", block.getId());
 457                             }
 458                         }
 459                     }
 460                 }
 461             }
 462         } catch (Throwable e) {
 463             throw Debug.handle(e);
 464         }
 465     }
 466 
 467     protected void verifyLiveness() {
 468         /*
 469          * Check that fixed intervals are not live at block boundaries (live set must be empty at
 470          * fixed intervals).
 471          */
 472         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 473             for (int j = 0; j <= allocator.maxRegisterNumber(); j++) {
 474                 assert !allocator.getBlockData(block).liveIn.get(j) : "liveIn  set of fixed register must be empty";
 475                 assert !allocator.getBlockData(block).liveOut.get(j) : "liveOut set of fixed register must be empty";
 476                 assert !allocator.getBlockData(block).liveGen.get(j) : "liveGen set of fixed register must be empty";
 477             }
 478         }
 479     }
 480 
 481     protected void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority, ValueKind<?> kind) {
 482         if (!allocator.isProcessed(operand)) {
 483             return;
 484         }
 485 
 486         Interval interval = allocator.getOrCreateInterval(operand);
 487         if (!kind.equals(LIRKind.Illegal)) {
 488             interval.setKind(kind);
 489         }
 490 
 491         interval.addRange(from, to);
 492 
 493         // Register use position at even instruction id.
 494         interval.addUsePos(to & ~1, registerPriority);
 495 
 496         if (Debug.isLogEnabled()) {
 497             Debug.log("add use: %s, from %d to %d (%s)", interval, from, to, registerPriority.name());
 498         }
 499     }
 500 
 501     protected void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority, ValueKind<?> kind) {
 502         if (!allocator.isProcessed(operand)) {
 503             return;
 504         }
 505 
 506         Interval interval = allocator.getOrCreateInterval(operand);
 507         if (!kind.equals(LIRKind.Illegal)) {
 508             interval.setKind(kind);
 509         }
 510 
 511         interval.addRange(tempPos, tempPos + 1);
 512         interval.addUsePos(tempPos, registerPriority);
 513         interval.addMaterializationValue(null);
 514 
 515         if (Debug.isLogEnabled()) {
 516             Debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name());
 517         }
 518     }
 519 
 520     protected void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority, ValueKind<?> kind) {
 521         if (!allocator.isProcessed(operand)) {
 522             return;
 523         }
 524         int defPos = op.id();
 525 
 526         Interval interval = allocator.getOrCreateInterval(operand);
 527         if (!kind.equals(LIRKind.Illegal)) {
 528             interval.setKind(kind);
 529         }
 530 
 531         Range r = interval.first();
 532         if (r.from <= defPos) {
 533             /*
 534              * Update the starting point (when a range is first created for a use, its start is the
 535              * beginning of the current block until a def is encountered).
 536              */
 537             r.from = defPos;
 538             interval.addUsePos(defPos, registerPriority);
 539 
 540         } else {
 541             /*
 542              * Dead value - make vacuous interval also add register priority for dead intervals
 543              */
 544             interval.addRange(defPos, defPos + 1);
 545             interval.addUsePos(defPos, registerPriority);
 546             if (Debug.isLogEnabled()) {
 547                 Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
 548             }
 549         }
 550 
 551         changeSpillDefinitionPos(op, operand, interval, defPos);
 552         if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
 553             // detection of method-parameters and roundfp-results
 554             interval.setSpillState(SpillState.StartInMemory);
 555         }
 556         interval.addMaterializationValue(getMaterializedValue(op, operand, interval));
 557 
 558         if (Debug.isLogEnabled()) {
 559             Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
 560         }
 561     }
 562 
 563     /**
 564      * Optimizes moves related to incoming stack based arguments. The interval for the destination
 565      * of such moves is assigned the stack slot (which is in the caller's frame) as its spill slot.
 566      */
 567     protected void handleMethodArguments(LIRInstruction op) {
 568         if (op instanceof ValueMoveOp) {
 569             ValueMoveOp move = (ValueMoveOp) op;
 570             if (optimizeMethodArgument(move.getInput())) {
 571                 StackSlot slot = asStackSlot(move.getInput());
 572                 if (DetailedAsserts.getValue()) {
 573                     assert op.id() > 0 : "invalid id";
 574                     assert allocator.blockForId(op.id()).getPredecessorCount() == 0 : "move from stack must be in first block";
 575                     assert isVariable(move.getResult()) : "result of move must be a variable";
 576 
 577                     if (Debug.isLogEnabled()) {
 578                         Debug.log("found move from stack slot %s to %s", slot, move.getResult());
 579                     }
 580                 }
 581 
 582                 Interval interval = allocator.intervalFor(move.getResult());
 583                 interval.setSpillSlot(slot);
 584                 interval.assignLocation(slot);
 585             }
 586         }
 587     }
 588 
 589     protected void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) {
 590         if (flags.contains(OperandFlag.HINT) && LinearScan.isVariableOrRegister(targetValue)) {
 591 
 592             op.forEachRegisterHint(targetValue, mode, (registerHint, valueMode, valueFlags) -> {
 593                 if (LinearScan.isVariableOrRegister(registerHint)) {
 594                     Interval from = allocator.getOrCreateInterval((AllocatableValue) registerHint);
 595                     Interval to = allocator.getOrCreateInterval((AllocatableValue) targetValue);
 596 
 597                     /* hints always point from def to use */
 598                     if (hintAtDef) {
 599                         to.setLocationHint(from);
 600                     } else {
 601                         from.setLocationHint(to);
 602                     }
 603                     if (Debug.isLogEnabled()) {
 604                         Debug.log("operation at opId %d: added hint from interval %d to %d", op.id(), from.operandNumber, to.operandNumber);
 605                     }
 606 
 607                     return registerHint;
 608                 }
 609                 return null;
 610             });
 611         }
 612     }
 613 
 614     /**
 615      * Eliminates moves from register to stack if the stack slot is known to be correct.
 616      *
 617      * @param op
 618      * @param operand
 619      */
 620     protected void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, Interval interval, int defPos) {
 621         assert interval.isSplitParent() : "can only be called for split parents";
 622 
 623         switch (interval.spillState()) {
 624             case NoDefinitionFound:
 625                 assert interval.spillDefinitionPos() == -1 : "must no be set before";
 626                 interval.setSpillDefinitionPos(defPos);
 627                 interval.setSpillState(SpillState.NoSpillStore);
 628                 break;
 629 
 630             case NoSpillStore:
 631                 assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created";
 632                 if (defPos < interval.spillDefinitionPos() - 2) {
 633                     // second definition found, so no spill optimization possible for this interval
 634                     interval.setSpillState(SpillState.NoOptimization);
 635                 } else {
 636                     // two consecutive definitions (because of two-operand LIR form)
 637                     assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal";
 638                 }
 639                 break;
 640 
 641             case NoOptimization:
 642                 // nothing to do
 643                 break;
 644 
 645             default:
 646                 throw GraalError.shouldNotReachHere("other states not allowed at this time");
 647         }
 648     }
 649 
 650     private static boolean optimizeMethodArgument(Value value) {
 651         /*
 652          * Object method arguments that are passed on the stack are currently not optimized because
 653          * this requires that the runtime visits method arguments during stack walking.
 654          */
 655         return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value);
 656     }
 657 
 658     /**
 659      * Determines the register priority for an instruction's output/result operand.
 660      */
 661     protected RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) {
 662         if (op instanceof ValueMoveOp) {
 663             ValueMoveOp move = (ValueMoveOp) op;
 664             if (optimizeMethodArgument(move.getInput())) {
 665                 return RegisterPriority.None;
 666             }
 667         }
 668 
 669         // all other operands require a register
 670         return RegisterPriority.MustHaveRegister;
 671     }
 672 
 673     /**
 674      * Determines the priority which with an instruction's input operand will be allocated a
 675      * register.
 676      */
 677     protected static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) {
 678         if (flags.contains(OperandFlag.STACK)) {
 679             return RegisterPriority.ShouldHaveRegister;
 680         }
 681         // all other operands require a register
 682         return RegisterPriority.MustHaveRegister;
 683     }
 684 
 685     @SuppressWarnings("try")
 686     protected void buildIntervals() {
 687 
 688         try (Indent indent = Debug.logAndIndent("build intervals")) {
 689             InstructionValueConsumer outputConsumer = (op, operand, mode, flags) -> {
 690                 if (LinearScan.isVariableOrRegister(operand)) {
 691                     addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op), operand.getValueKind());
 692                     addRegisterHint(op, operand, mode, flags, true);
 693                 }
 694             };
 695 
 696             InstructionValueConsumer tempConsumer = (op, operand, mode, flags) -> {
 697                 if (LinearScan.isVariableOrRegister(operand)) {
 698                     addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister, operand.getValueKind());
 699                     addRegisterHint(op, operand, mode, flags, false);
 700                 }
 701             };
 702 
 703             InstructionValueConsumer aliveConsumer = (op, operand, mode, flags) -> {
 704                 if (LinearScan.isVariableOrRegister(operand)) {
 705                     RegisterPriority p = registerPriorityOfInputOperand(flags);
 706                     int opId = op.id();
 707                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
 708                     addUse((AllocatableValue) operand, blockFrom, opId + 1, p, operand.getValueKind());
 709                     addRegisterHint(op, operand, mode, flags, false);
 710                 }
 711             };
 712 
 713             InstructionValueConsumer inputConsumer = (op, operand, mode, flags) -> {
 714                 if (LinearScan.isVariableOrRegister(operand)) {
 715                     int opId = op.id();
 716                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
 717                     RegisterPriority p = registerPriorityOfInputOperand(flags);
 718                     addUse((AllocatableValue) operand, blockFrom, opId, p, operand.getValueKind());
 719                     addRegisterHint(op, operand, mode, flags, false);
 720                 }
 721             };
 722 
 723             InstructionValueConsumer stateProc = (op, operand, mode, flags) -> {
 724                 if (LinearScan.isVariableOrRegister(operand)) {
 725                     int opId = op.id();
 726                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
 727                     addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None, operand.getValueKind());
 728                 }
 729             };
 730 
 731             // create a list with all caller-save registers (cpu, fpu, xmm)
 732             RegisterArray callerSaveRegs = allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
 733 
 734             // iterate all blocks in reverse order
 735             for (int i = allocator.blockCount() - 1; i >= 0; i--) {
 736 
 737                 AbstractBlockBase<?> block = allocator.blockAt(i);
 738                 try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) {
 739 
 740                     List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
 741                     final int blockFrom = allocator.getFirstLirInstructionId(block);
 742                     int blockTo = allocator.getLastLirInstructionId(block);
 743 
 744                     assert blockFrom == instructions.get(0).id();
 745                     assert blockTo == instructions.get(instructions.size() - 1).id();
 746 
 747                     // Update intervals for operands live at the end of this block;
 748                     BitSet live = allocator.getBlockData(block).liveOut;
 749                     for (int operandNum = live.nextSetBit(0); operandNum >= 0; operandNum = live.nextSetBit(operandNum + 1)) {
 750                         assert live.get(operandNum) : "should not stop here otherwise";
 751                         AllocatableValue operand = allocator.intervalFor(operandNum).operand;
 752                         if (Debug.isLogEnabled()) {
 753                             Debug.log("live in %d: %s", operandNum, operand);
 754                         }
 755 
 756                         addUse(operand, blockFrom, blockTo + 2, RegisterPriority.None, LIRKind.Illegal);
 757 
 758                         /*
 759                          * Add special use positions for loop-end blocks when the interval is used
 760                          * anywhere inside this loop. It's possible that the block was part of a
 761                          * non-natural loop, so it might have an invalid loop index.
 762                          */
 763                         if (block.isLoopEnd() && block.getLoop() != null && isIntervalInLoop(operandNum, block.getLoop().getIndex())) {
 764                             allocator.intervalFor(operandNum).addUsePos(blockTo + 1, RegisterPriority.LiveAtLoopEnd);
 765                         }
 766                     }
 767 
 768                     /*
 769                      * Iterate all instructions of the block in reverse order. definitions of
 770                      * intervals are processed before uses.
 771                      */
 772                     for (int j = instructions.size() - 1; j >= 0; j--) {
 773                         final LIRInstruction op = instructions.get(j);
 774                         final int opId = op.id();
 775 
 776                         try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) {
 777 
 778                             // add a temp range for each register if operation destroys
 779                             // caller-save registers
 780                             if (op.destroysCallerSavedRegisters()) {
 781                                 for (Register r : callerSaveRegs) {
 782                                     if (allocator.attributes(r).isAllocatable()) {
 783                                         addTemp(r.asValue(), opId, RegisterPriority.None, LIRKind.Illegal);
 784                                     }
 785                                 }
 786                                 if (Debug.isLogEnabled()) {
 787                                     Debug.log("operation destroys all caller-save registers");
 788                                 }
 789                             }
 790 
 791                             op.visitEachOutput(outputConsumer);
 792                             op.visitEachTemp(tempConsumer);
 793                             op.visitEachAlive(aliveConsumer);
 794                             op.visitEachInput(inputConsumer);
 795 
 796                             /*
 797                              * Add uses of live locals from interpreter's point of view for proper
 798                              * debug information generation. Treat these operands as temp values (if
 799                              * the live range is extended to a call site, the value would be in a
 800                              * register at the call otherwise).
 801                              */
 802                             op.visitEachState(stateProc);
 803 
 804                             // special steps for some instructions (especially moves)
 805                             handleMethodArguments(op);
 806 
 807                         }
 808 
 809                     } // end of instruction iteration
 810                 }
 811             } // end of block iteration
 812 
 813             /*
 814              * Add the range [0, 1] to all fixed intervals. the register allocator need not handle
 815              * unhandled fixed intervals.
 816              */
 817             for (Interval interval : allocator.intervals()) {
 818                 if (interval != null && isRegister(interval.operand)) {
 819                     interval.addRange(0, 1);
 820                 }
 821             }
 822         }
 823     }
 824 
 825     /**
 826      * Returns a value for a interval definition, which can be used for re-materialization.
 827      *
 828      * @param op An instruction which defines a value
 829      * @param operand The destination operand of the instruction
 830      * @param interval The interval for this defined value.
 831      * @return Returns the value which is moved to the instruction and which can be reused at all
 832      *         reload-locations in case the interval of this instruction is spilled. Currently this
 833      *         can only be a {@link JavaConstant}.
 834      */
 835     protected Constant getMaterializedValue(LIRInstruction op, Value operand, Interval interval) {
 836         if (op instanceof LoadConstantOp) {
 837             LoadConstantOp move = (LoadConstantOp) op;
 838 
 839             if (!allocator.neverSpillConstants()) {
 840                 /*
 841                  * Check if the interval has any uses which would accept an stack location (priority
 842                  * == ShouldHaveRegister). Rematerialization of such intervals can result in a
 843                  * degradation, because rematerialization always inserts a constant load, even if
 844                  * the value is not needed in a register.
 845                  */
 846                 Interval.UsePosList usePosList = interval.usePosList();
 847                 int numUsePos = usePosList.size();
 848                 for (int useIdx = 0; useIdx < numUsePos; useIdx++) {
 849                     Interval.RegisterPriority priority = usePosList.registerPriority(useIdx);
 850                     if (priority == Interval.RegisterPriority.ShouldHaveRegister) {
 851                         return null;
 852                     }
 853                 }
 854             }
 855             return move.getConstant();
 856         }
 857         return null;
 858     }
 859 }