1 /*
   2  * Copyright (c) 2015, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 
  25 package org.graalvm.compiler.lir.alloc.lsra;
  26 
  27 import static jdk.vm.ci.code.ValueUtil.asRegister;
  28 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
  29 import static jdk.vm.ci.code.ValueUtil.isRegister;
  30 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
  31 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
  32 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
  33 import static org.graalvm.compiler.lir.debug.LIRGenerationDebugContext.getSourceForOperandFromDebugContext;
  34 
  35 import java.util.ArrayDeque;
  36 import java.util.ArrayList;
  37 import java.util.BitSet;
  38 import java.util.EnumSet;
  39 
  40 import jdk.internal.vm.compiler.collections.EconomicSet;
  41 import jdk.internal.vm.compiler.collections.Equivalence;
  42 import org.graalvm.compiler.core.common.LIRKind;
  43 import org.graalvm.compiler.core.common.PermanentBailoutException;
  44 import org.graalvm.compiler.core.common.alloc.ComputeBlockOrder;
  45 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
  46 import org.graalvm.compiler.core.common.util.BitMap2D;
  47 import org.graalvm.compiler.debug.Assertions;
  48 import org.graalvm.compiler.debug.DebugContext;
  49 import org.graalvm.compiler.debug.GraalError;
  50 import org.graalvm.compiler.debug.Indent;
  51 import org.graalvm.compiler.lir.InstructionValueConsumer;
  52 import org.graalvm.compiler.lir.LIRInstruction;
  53 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag;
  54 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
  55 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
  56 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
  57 import org.graalvm.compiler.lir.ValueConsumer;
  58 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterPriority;
  59 import org.graalvm.compiler.lir.alloc.lsra.Interval.SpillState;
  60 import org.graalvm.compiler.lir.alloc.lsra.LinearScan.BlockData;
  61 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
  62 import org.graalvm.compiler.lir.phases.AllocationPhase.AllocationContext;
  63 
  64 import jdk.vm.ci.code.Register;
  65 import jdk.vm.ci.code.RegisterArray;
  66 import jdk.vm.ci.code.StackSlot;
  67 import jdk.vm.ci.code.TargetDescription;
  68 import jdk.vm.ci.meta.AllocatableValue;
  69 import jdk.vm.ci.meta.Constant;
  70 import jdk.vm.ci.meta.JavaConstant;
  71 import jdk.vm.ci.meta.Value;
  72 import jdk.vm.ci.meta.ValueKind;
  73 
  74 public class LinearScanLifetimeAnalysisPhase extends LinearScanAllocationPhase {
  75 
  76     protected final LinearScan allocator;
  77     protected final DebugContext debug;
  78 
  79     /**
  80      * @param linearScan
  81      */
  82     protected LinearScanLifetimeAnalysisPhase(LinearScan linearScan) {
  83         allocator = linearScan;
  84         debug = allocator.getDebug();
  85     }
  86 
  87     @Override
  88     protected void run(TargetDescription target, LIRGenerationResult lirGenRes, AllocationContext context) {
  89         numberInstructions();
  90         debug.dump(DebugContext.VERBOSE_LEVEL, lirGenRes.getLIR(), "Before register allocation");
  91         computeLocalLiveSets();
  92         computeGlobalLiveSets();
  93         buildIntervals(Assertions.detailedAssertionsEnabled(allocator.getOptions()));
  94     }
  95 
  96     /**
  97      * Bit set for each variable that is contained in each loop.
  98      */
  99     private BitMap2D intervalInLoop;
 100 
 101     boolean isIntervalInLoop(int interval, int loop) {
 102         return intervalInLoop.at(interval, loop);
 103     }
 104 
 105     /**
 106      * Numbers all instructions in all blocks. The numbering follows the
 107      * {@linkplain ComputeBlockOrder linear scan order}.
 108      */
 109     protected void numberInstructions() {
 110 
 111         allocator.initIntervals();
 112 
 113         ValueConsumer setVariableConsumer = (value, mode, flags) -> {
 114             if (isVariable(value)) {
 115                 allocator.getOrCreateInterval(asVariable(value));
 116             }
 117         };
 118 
 119         // Assign IDs to LIR nodes and build a mapping, lirOps, from ID to LIRInstruction node.
 120         int numInstructions = 0;
 121         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 122             numInstructions += allocator.getLIR().getLIRforBlock(block).size();
 123         }
 124 
 125         // initialize with correct length
 126         allocator.initOpIdMaps(numInstructions);
 127 
 128         int opId = 0;
 129         int index = 0;
 130         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 131             allocator.initBlockData(block);
 132 
 133             ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
 134 
 135             int numInst = instructions.size();
 136             for (int j = 0; j < numInst; j++) {
 137                 LIRInstruction op = instructions.get(j);
 138                 op.setId(opId);
 139 
 140                 allocator.putOpIdMaps(index, op, block);
 141                 assert allocator.instructionForId(opId) == op : "must match";
 142 
 143                 op.visitEachTemp(setVariableConsumer);
 144                 op.visitEachOutput(setVariableConsumer);
 145 
 146                 index++;
 147                 opId += 2; // numbering of lirOps by two
 148             }
 149         }
 150         assert index == numInstructions : "must match";
 151         assert (index << 1) == opId : "must match: " + (index << 1);
 152     }
 153 
 154     /**
 155      * Computes local live sets (i.e. {@link BlockData#liveGen} and {@link BlockData#liveKill})
 156      * separately for each block.
 157      */
 158     @SuppressWarnings("try")
 159     void computeLocalLiveSets() {
 160         int liveSize = allocator.liveSetSize();
 161 
 162         intervalInLoop = new BitMap2D(allocator.operandSize(), allocator.numLoops());
 163 
 164         try {
 165             final BitSet liveGenScratch = new BitSet(liveSize);
 166             final BitSet liveKillScratch = new BitSet(liveSize);
 167             // iterate all blocks
 168             for (final AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 169                 try (Indent indent = debug.logAndIndent("compute local live sets for block %s", block)) {
 170 
 171                     liveGenScratch.clear();
 172                     liveKillScratch.clear();
 173 
 174                     ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
 175                     int numInst = instructions.size();
 176 
 177                     ValueConsumer useConsumer = (operand, mode, flags) -> {
 178                         if (isVariable(operand)) {
 179                             int operandNum = getOperandNumber(operand);
 180                             if (!liveKillScratch.get(operandNum)) {
 181                                 liveGenScratch.set(operandNum);
 182                                 if (debug.isLogEnabled()) {
 183                                     debug.log("liveGen for operand %d(%s)", operandNum, operand);
 184                                 }
 185                             }
 186                             if (block.getLoop() != null) {
 187                                 intervalInLoop.setBit(operandNum, block.getLoop().getIndex());
 188                             }
 189                         }
 190 
 191                         if (allocator.detailedAsserts) {
 192                             verifyInput(block, liveKillScratch, operand);
 193                         }
 194                     };
 195                     ValueConsumer stateConsumer = (operand, mode, flags) -> {
 196                         if (LinearScan.isVariableOrRegister(operand)) {
 197                             int operandNum = getOperandNumber(operand);
 198                             if (!liveKillScratch.get(operandNum)) {
 199                                 liveGenScratch.set(operandNum);
 200                                 if (debug.isLogEnabled()) {
 201                                     debug.log("liveGen in state for operand %d(%s)", operandNum, operand);
 202                                 }
 203                             }
 204                         }
 205                     };
 206                     ValueConsumer defConsumer = (operand, mode, flags) -> {
 207                         if (isVariable(operand)) {
 208                             int varNum = getOperandNumber(operand);
 209                             liveKillScratch.set(varNum);
 210                             if (debug.isLogEnabled()) {
 211                                 debug.log("liveKill for operand %d(%s)", varNum, operand);
 212                             }
 213                             if (block.getLoop() != null) {
 214                                 intervalInLoop.setBit(varNum, block.getLoop().getIndex());
 215                             }
 216                         }
 217 
 218                         if (allocator.detailedAsserts) {
 219                             /*
 220                              * Fixed intervals are never live at block boundaries, so they need not
 221                              * be processed in live sets. Process them only in debug mode so that
 222                              * this can be checked
 223                              */
 224                             verifyTemp(liveKillScratch, operand);
 225                         }
 226                     };
 227 
 228                     // iterate all instructions of the block
 229                     for (int j = 0; j < numInst; j++) {
 230                         final LIRInstruction op = instructions.get(j);
 231 
 232                         try (Indent indent2 = debug.logAndIndent("handle op %d: %s", op.id(), op)) {
 233                             op.visitEachInput(useConsumer);
 234                             op.visitEachAlive(useConsumer);
 235                             /*
 236                              * Add uses of live locals from interpreter's point of view for proper
 237                              * debug information generation.
 238                              */
 239                             op.visitEachState(stateConsumer);
 240                             op.visitEachTemp(defConsumer);
 241                             op.visitEachOutput(defConsumer);
 242                         }
 243                     } // end of instruction iteration
 244 
 245                     BlockData blockSets = allocator.getBlockData(block);
 246                     blockSets.liveGen = trimClone(liveGenScratch);
 247                     blockSets.liveKill = trimClone(liveKillScratch);
 248                     // sticky size, will get non-sticky in computeGlobalLiveSets
 249                     blockSets.liveIn = new BitSet(0);
 250                     blockSets.liveOut = new BitSet(0);
 251 
 252                     if (debug.isLogEnabled()) {
 253                         debug.log("liveGen  B%d %s", block.getId(), blockSets.liveGen);
 254                         debug.log("liveKill B%d %s", block.getId(), blockSets.liveKill);
 255                     }
 256 
 257                 }
 258             } // end of block iteration
 259         } catch (OutOfMemoryError oom) {
 260             throw new PermanentBailoutException(oom, "Out-of-memory during live set allocation of size %d", liveSize);
 261         }
 262     }
 263 
 264     private void verifyTemp(BitSet liveKill, Value operand) {
 265         /*
 266          * Fixed intervals are never live at block boundaries, so they need not be processed in live
 267          * sets. Process them only in debug mode so that this can be checked
 268          */
 269         if (isRegister(operand)) {
 270             if (allocator.isProcessed(operand)) {
 271                 liveKill.set(getOperandNumber(operand));
 272             }
 273         }
 274     }
 275 
 276     private void verifyInput(AbstractBlockBase<?> block, BitSet liveKill, Value operand) {
 277         /*
 278          * Fixed intervals are never live at block boundaries, so they need not be processed in live
 279          * sets. This is checked by these assertions to be sure about it. The entry block may have
 280          * incoming values in registers, which is ok.
 281          */
 282         if (isRegister(operand) && block != allocator.getLIR().getControlFlowGraph().getStartBlock()) {
 283             if (allocator.isProcessed(operand)) {
 284                 assert liveKill.get(getOperandNumber(operand)) : "using fixed register " + asRegister(operand) + " that is not defined in this block " + block;
 285             }
 286         }
 287     }
 288 
 289     protected int getOperandNumber(Value operand) {
 290         return allocator.operandNumber(operand);
 291     }
 292 
 293     /**
 294      * Performs a backward dataflow analysis to compute global live sets (i.e.
 295      * {@link BlockData#liveIn} and {@link BlockData#liveOut}) for each block.
 296      */
 297     @SuppressWarnings("try")
 298     protected void computeGlobalLiveSets() {
 299         try (Indent indent = debug.logAndIndent("compute global live sets")) {
 300             int numBlocks = allocator.blockCount();
 301             boolean changeOccurred;
 302             boolean changeOccurredInBlock;
 303             int iterationCount = 0;
 304             BitSet scratch = new BitSet(allocator.liveSetSize()); // scratch set for calculations
 305 
 306             /*
 307              * Perform a backward dataflow analysis to compute liveOut and liveIn for each block.
 308              * The loop is executed until a fixpoint is reached (no changes in an iteration).
 309              */
 310             do {
 311                 changeOccurred = false;
 312 
 313                 try (Indent indent2 = debug.logAndIndent("new iteration %d", iterationCount)) {
 314 
 315                     // iterate all blocks in reverse order
 316                     for (int i = numBlocks - 1; i >= 0; i--) {
 317                         AbstractBlockBase<?> block = allocator.blockAt(i);
 318                         BlockData blockSets = allocator.getBlockData(block);
 319 
 320                         changeOccurredInBlock = false;
 321 
 322                         /*
 323                          * liveOut(block) is the union of liveIn(sux), for successors sux of block.
 324                          */
 325                         int n = block.getSuccessorCount();
 326                         if (n > 0) {
 327                             scratch.clear();
 328                             // block has successors
 329                             if (n > 0) {
 330                                 for (AbstractBlockBase<?> successor : block.getSuccessors()) {
 331                                     scratch.or(allocator.getBlockData(successor).liveIn);
 332                                 }
 333                             }
 334 
 335                             if (!blockSets.liveOut.equals(scratch)) {
 336                                 blockSets.liveOut = trimClone(scratch);
 337 
 338                                 changeOccurred = true;
 339                                 changeOccurredInBlock = true;
 340                             }
 341                         }
 342 
 343                         if (iterationCount == 0 || changeOccurredInBlock) {
 344                             /*
 345                              * liveIn(block) is the union of liveGen(block) with (liveOut(block) &
 346                              * !liveKill(block)).
 347                              *
 348                              * Note: liveIn has to be computed only in first iteration or if liveOut
 349                              * has changed!
 350                              *
 351                              * Note: liveIn set can only grow, never shrink. No need to clear it.
 352                              */
 353                             BitSet liveIn = blockSets.liveIn;
 354                             /*
 355                              * BitSet#or will call BitSet#ensureSize (since the bit set is of length
 356                              * 0 initially) and set sticky to false
 357                              */
 358                             liveIn.or(blockSets.liveOut);
 359                             liveIn.andNot(blockSets.liveKill);
 360                             liveIn.or(blockSets.liveGen);
 361 
 362                             liveIn.clone(); // trimToSize()
 363 
 364                             if (debug.isLogEnabled()) {
 365                                 debug.log("block %d: livein = %s,  liveout = %s", block.getId(), liveIn, blockSets.liveOut);
 366                             }
 367                         }
 368                     }
 369                     iterationCount++;
 370 
 371                     if (changeOccurred && iterationCount > 50) {
 372                         /*
 373                          * Very unlikely, should never happen: If it happens we cannot guarantee it
 374                          * won't happen again.
 375                          */
 376                         throw new PermanentBailoutException("too many iterations in computeGlobalLiveSets");
 377                     }
 378                 }
 379             } while (changeOccurred);
 380 
 381             if (Assertions.detailedAssertionsEnabled(allocator.getOptions())) {
 382                 verifyLiveness();
 383             }
 384 
 385             // check that the liveIn set of the first block is empty
 386             AbstractBlockBase<?> startBlock = allocator.getLIR().getControlFlowGraph().getStartBlock();
 387             if (allocator.getBlockData(startBlock).liveIn.cardinality() != 0) {
 388                 if (Assertions.detailedAssertionsEnabled(allocator.getOptions())) {
 389                     reportFailure(numBlocks);
 390                 }
 391                 // bailout if this occurs in product mode.
 392                 throw new GraalError("liveIn set of first block must be empty: " + allocator.getBlockData(startBlock).liveIn);
 393             }
 394         }
 395     }
 396 
 397     /**
 398      * Creates a trimmed copy a bit set.
 399      *
 400      * {@link BitSet#clone()} cannot be used since it will not {@linkplain BitSet#trimToSize trim}
 401      * the array if the bit set is {@linkplain BitSet#sizeIsSticky sticky}.
 402      */
 403     @SuppressWarnings("javadoc")
 404     private static BitSet trimClone(BitSet set) {
 405         BitSet trimmedSet = new BitSet(0); // zero-length words array, sticky
 406         trimmedSet.or(set); // words size ensured to be words-in-use of set,
 407                             // also makes it non-sticky
 408         return trimmedSet;
 409     }
 410 
 411     @SuppressWarnings("try")
 412     protected void reportFailure(int numBlocks) {
 413         try (DebugContext.Scope s = debug.forceLog()) {
 414             try (Indent indent = debug.logAndIndent("report failure")) {
 415 
 416                 BitSet startBlockLiveIn = allocator.getBlockData(allocator.getLIR().getControlFlowGraph().getStartBlock()).liveIn;
 417                 try (Indent indent2 = debug.logAndIndent("Error: liveIn set of first block must be empty (when this fails, variables are used before they are defined):")) {
 418                     for (int operandNum = startBlockLiveIn.nextSetBit(0); operandNum >= 0; operandNum = startBlockLiveIn.nextSetBit(operandNum + 1)) {
 419                         Interval interval = allocator.intervalFor(operandNum);
 420                         if (interval != null) {
 421                             Value operand = interval.operand;
 422                             debug.log("var %d; operand=%s; node=%s", operandNum, operand, getSourceForOperandFromDebugContext(debug, operand));
 423                         } else {
 424                             debug.log("var %d; missing operand", operandNum);
 425                         }
 426                     }
 427                 }
 428 
 429                 // print some additional information to simplify debugging
 430                 for (int operandNum = startBlockLiveIn.nextSetBit(0); operandNum >= 0; operandNum = startBlockLiveIn.nextSetBit(operandNum + 1)) {
 431                     Interval interval = allocator.intervalFor(operandNum);
 432                     Value operand = null;
 433                     Object valueForOperandFromDebugContext = null;
 434                     if (interval != null) {
 435                         operand = interval.operand;
 436                         valueForOperandFromDebugContext = getSourceForOperandFromDebugContext(debug, operand);
 437                     }
 438                     try (Indent indent2 = debug.logAndIndent("---- Detailed information for var %d; operand=%s; node=%s ----", operandNum, operand, valueForOperandFromDebugContext)) {
 439 
 440                         ArrayDeque<AbstractBlockBase<?>> definedIn = new ArrayDeque<>();
 441                         EconomicSet<AbstractBlockBase<?>> usedIn = EconomicSet.create(Equivalence.IDENTITY);
 442                         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 443                             if (allocator.getBlockData(block).liveGen.get(operandNum)) {
 444                                 usedIn.add(block);
 445                                 try (Indent indent3 = debug.logAndIndent("used in block B%d", block.getId())) {
 446                                     for (LIRInstruction ins : allocator.getLIR().getLIRforBlock(block)) {
 447                                         try (Indent indent4 = debug.logAndIndent("%d: %s", ins.id(), ins)) {
 448                                             ins.forEachState((liveStateOperand, mode, flags) -> {
 449                                                 debug.log("operand=%s", liveStateOperand);
 450                                                 return liveStateOperand;
 451                                             });
 452                                         }
 453                                     }
 454                                 }
 455                             }
 456                             if (allocator.getBlockData(block).liveKill.get(operandNum)) {
 457                                 definedIn.add(block);
 458                                 try (Indent indent3 = debug.logAndIndent("defined in block B%d", block.getId())) {
 459                                     for (LIRInstruction ins : allocator.getLIR().getLIRforBlock(block)) {
 460                                         debug.log("%d: %s", ins.id(), ins);
 461                                     }
 462                                 }
 463                             }
 464                         }
 465 
 466                         int[] hitCount = new int[numBlocks];
 467 
 468                         while (!definedIn.isEmpty()) {
 469                             AbstractBlockBase<?> block = definedIn.removeFirst();
 470                             usedIn.remove(block);
 471                             for (AbstractBlockBase<?> successor : block.getSuccessors()) {
 472                                 if (successor.isLoopHeader()) {
 473                                     if (!block.isLoopEnd()) {
 474                                         definedIn.add(successor);
 475                                     }
 476                                 } else {
 477                                     if (++hitCount[successor.getId()] == successor.getPredecessorCount()) {
 478                                         definedIn.add(successor);
 479                                     }
 480                                 }
 481                             }
 482                         }
 483                         try (Indent indent3 = debug.logAndIndent("**** offending usages are in: ")) {
 484                             for (AbstractBlockBase<?> block : usedIn) {
 485                                 debug.log("B%d", block.getId());
 486                             }
 487                         }
 488                     }
 489                 }
 490             }
 491         } catch (Throwable e) {
 492             throw debug.handle(e);
 493         }
 494     }
 495 
 496     protected void verifyLiveness() {
 497         /*
 498          * Check that fixed intervals are not live at block boundaries (live set must be empty at
 499          * fixed intervals).
 500          */
 501         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
 502             for (int j = 0; j <= allocator.maxRegisterNumber(); j++) {
 503                 assert !allocator.getBlockData(block).liveIn.get(j) : "liveIn  set of fixed register must be empty";
 504                 assert !allocator.getBlockData(block).liveOut.get(j) : "liveOut set of fixed register must be empty";
 505                 assert !allocator.getBlockData(block).liveGen.get(j) : "liveGen set of fixed register must be empty";
 506             }
 507         }
 508     }
 509 
 510     protected void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts) {
 511         if (!allocator.isProcessed(operand)) {
 512             return;
 513         }
 514 
 515         Interval interval = allocator.getOrCreateInterval(operand);
 516         if (!kind.equals(LIRKind.Illegal)) {
 517             interval.setKind(kind);
 518         }
 519 
 520         interval.addRange(from, to);
 521 
 522         // Register use position at even instruction id.
 523         interval.addUsePos(to & ~1, registerPriority, detailedAsserts);
 524 
 525         if (debug.isLogEnabled()) {
 526             debug.log("add use: %s, from %d to %d (%s)", interval, from, to, registerPriority.name());
 527         }
 528     }
 529 
 530     protected void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts) {
 531         if (!allocator.isProcessed(operand)) {
 532             return;
 533         }
 534 
 535         Interval interval = allocator.getOrCreateInterval(operand);
 536         if (!kind.equals(LIRKind.Illegal)) {
 537             interval.setKind(kind);
 538         }
 539 
 540         interval.addRange(tempPos, tempPos + 1);
 541         interval.addUsePos(tempPos, registerPriority, detailedAsserts);
 542         interval.addMaterializationValue(null);
 543 
 544         if (debug.isLogEnabled()) {
 545             debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name());
 546         }
 547     }
 548 
 549     protected void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts) {
 550         if (!allocator.isProcessed(operand)) {
 551             return;
 552         }
 553         int defPos = op.id();
 554 
 555         Interval interval = allocator.getOrCreateInterval(operand);
 556         if (!kind.equals(LIRKind.Illegal)) {
 557             interval.setKind(kind);
 558         }
 559 
 560         Range r = interval.first();
 561         if (r.from <= defPos) {
 562             /*
 563              * Update the starting point (when a range is first created for a use, its start is the
 564              * beginning of the current block until a def is encountered).
 565              */
 566             r.from = defPos;
 567             interval.addUsePos(defPos, registerPriority, detailedAsserts);
 568 
 569         } else {
 570             /*
 571              * Dead value - make vacuous interval also add register priority for dead intervals
 572              */
 573             interval.addRange(defPos, defPos + 1);
 574             interval.addUsePos(defPos, registerPriority, detailedAsserts);
 575             if (debug.isLogEnabled()) {
 576                 debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
 577             }
 578         }
 579 
 580         changeSpillDefinitionPos(op, operand, interval, defPos);
 581         if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
 582             // detection of method-parameters and roundfp-results
 583             interval.setSpillState(SpillState.StartInMemory);
 584         }
 585         interval.addMaterializationValue(getMaterializedValue(op, operand, interval));
 586 
 587         if (debug.isLogEnabled()) {
 588             debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
 589         }
 590     }
 591 
 592     /**
 593      * Optimizes moves related to incoming stack based arguments. The interval for the destination
 594      * of such moves is assigned the stack slot (which is in the caller's frame) as its spill slot.
 595      */
 596     protected void handleMethodArguments(LIRInstruction op) {
 597         if (ValueMoveOp.isValueMoveOp(op)) {
 598             ValueMoveOp move = ValueMoveOp.asValueMoveOp(op);
 599             if (optimizeMethodArgument(move.getInput())) {
 600                 StackSlot slot = asStackSlot(move.getInput());
 601                 if (Assertions.detailedAssertionsEnabled(allocator.getOptions())) {
 602                     assert op.id() > 0 : "invalid id";
 603                     assert allocator.blockForId(op.id()).getPredecessorCount() == 0 : "move from stack must be in first block";
 604                     assert isVariable(move.getResult()) : "result of move must be a variable";
 605 
 606                     if (debug.isLogEnabled()) {
 607                         debug.log("found move from stack slot %s to %s", slot, move.getResult());
 608                     }
 609                 }
 610 
 611                 Interval interval = allocator.intervalFor(move.getResult());
 612                 interval.setSpillSlot(slot);
 613                 interval.assignLocation(slot);
 614             }
 615         }
 616     }
 617 
 618     protected void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) {
 619         if (flags.contains(OperandFlag.HINT) && LinearScan.isVariableOrRegister(targetValue)) {
 620 
 621             op.forEachRegisterHint(targetValue, mode, (registerHint, valueMode, valueFlags) -> {
 622                 if (LinearScan.isVariableOrRegister(registerHint)) {
 623                     Interval from = allocator.getOrCreateInterval((AllocatableValue) registerHint);
 624                     Interval to = allocator.getOrCreateInterval((AllocatableValue) targetValue);
 625 
 626                     /* hints always point from def to use */
 627                     if (hintAtDef) {
 628                         to.setLocationHint(from);
 629                     } else {
 630                         from.setLocationHint(to);
 631                     }
 632                     if (debug.isLogEnabled()) {
 633                         debug.log("operation at opId %d: added hint from interval %d to %d", op.id(), from.operandNumber, to.operandNumber);
 634                     }
 635 
 636                     return registerHint;
 637                 }
 638                 return null;
 639             });
 640         }
 641     }
 642 
 643     /**
 644      * Eliminates moves from register to stack if the stack slot is known to be correct.
 645      *
 646      * @param op
 647      * @param operand
 648      */
 649     protected void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, Interval interval, int defPos) {
 650         assert interval.isSplitParent() : "can only be called for split parents";
 651 
 652         switch (interval.spillState()) {
 653             case NoDefinitionFound:
 654                 assert interval.spillDefinitionPos() == -1 : "must no be set before";
 655                 interval.setSpillDefinitionPos(defPos);
 656                 interval.setSpillState(SpillState.NoSpillStore);
 657                 break;
 658 
 659             case NoSpillStore:
 660                 assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created";
 661                 if (defPos < interval.spillDefinitionPos() - 2) {
 662                     // second definition found, so no spill optimization possible for this interval
 663                     interval.setSpillState(SpillState.NoOptimization);
 664                 } else {
 665                     // two consecutive definitions (because of two-operand LIR form)
 666                     assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal";
 667                 }
 668                 break;
 669 
 670             case NoOptimization:
 671                 // nothing to do
 672                 break;
 673 
 674             default:
 675                 throw GraalError.shouldNotReachHere("other states not allowed at this time");
 676         }
 677     }
 678 
 679     private static boolean optimizeMethodArgument(Value value) {
 680         /*
 681          * Object method arguments that are passed on the stack are currently not optimized because
 682          * this requires that the runtime visits method arguments during stack walking.
 683          */
 684         return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value);
 685     }
 686 
 687     /**
 688      * Determines the register priority for an instruction's output/result operand.
 689      */
 690     protected RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) {
 691         if (ValueMoveOp.isValueMoveOp(op)) {
 692             ValueMoveOp move = ValueMoveOp.asValueMoveOp(op);
 693             if (optimizeMethodArgument(move.getInput())) {
 694                 return RegisterPriority.None;
 695             }
 696         }
 697 
 698         // all other operands require a register
 699         return RegisterPriority.MustHaveRegister;
 700     }
 701 
 702     /**
 703      * Determines the priority which with an instruction's input operand will be allocated a
 704      * register.
 705      */
 706     protected static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) {
 707         if (flags.contains(OperandFlag.STACK)) {
 708             return RegisterPriority.ShouldHaveRegister;
 709         }
 710         // all other operands require a register
 711         return RegisterPriority.MustHaveRegister;
 712     }
 713 
 714     @SuppressWarnings("try")
 715     protected void buildIntervals(boolean detailedAsserts) {
 716 
 717         try (Indent indent = debug.logAndIndent("build intervals")) {
 718             InstructionValueConsumer outputConsumer = (op, operand, mode, flags) -> {
 719                 if (LinearScan.isVariableOrRegister(operand)) {
 720                     addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op), operand.getValueKind(), detailedAsserts);
 721                     addRegisterHint(op, operand, mode, flags, true);
 722                 }
 723             };
 724 
 725             InstructionValueConsumer tempConsumer = (op, operand, mode, flags) -> {
 726                 if (LinearScan.isVariableOrRegister(operand)) {
 727                     addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister, operand.getValueKind(), detailedAsserts);
 728                     addRegisterHint(op, operand, mode, flags, false);
 729                 }
 730             };
 731 
 732             InstructionValueConsumer aliveConsumer = (op, operand, mode, flags) -> {
 733                 if (LinearScan.isVariableOrRegister(operand)) {
 734                     RegisterPriority p = registerPriorityOfInputOperand(flags);
 735                     int opId = op.id();
 736                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
 737                     addUse((AllocatableValue) operand, blockFrom, opId + 1, p, operand.getValueKind(), detailedAsserts);
 738                     addRegisterHint(op, operand, mode, flags, false);
 739                 }
 740             };
 741 
 742             InstructionValueConsumer inputConsumer = (op, operand, mode, flags) -> {
 743                 if (LinearScan.isVariableOrRegister(operand)) {
 744                     int opId = op.id();
 745                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
 746                     RegisterPriority p = registerPriorityOfInputOperand(flags);
 747                     addUse((AllocatableValue) operand, blockFrom, opId, p, operand.getValueKind(), detailedAsserts);
 748                     addRegisterHint(op, operand, mode, flags, false);
 749                 }
 750             };
 751 
 752             InstructionValueConsumer stateProc = (op, operand, mode, flags) -> {
 753                 if (LinearScan.isVariableOrRegister(operand)) {
 754                     int opId = op.id();
 755                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
 756                     addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None, operand.getValueKind(), detailedAsserts);
 757                 }
 758             };
 759 
 760             // create a list with all caller-save registers (cpu, fpu, xmm)
 761             RegisterArray callerSaveRegs = allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
 762 
 763             // iterate all blocks in reverse order
 764             for (int i = allocator.blockCount() - 1; i >= 0; i--) {
 765 
 766                 AbstractBlockBase<?> block = allocator.blockAt(i);
 767                 try (Indent indent2 = debug.logAndIndent("handle block %d", block.getId())) {
 768 
 769                     ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
 770                     final int blockFrom = allocator.getFirstLirInstructionId(block);
 771                     int blockTo = allocator.getLastLirInstructionId(block);
 772 
 773                     assert blockFrom == instructions.get(0).id();
 774                     assert blockTo == instructions.get(instructions.size() - 1).id();
 775 
 776                     // Update intervals for operands live at the end of this block;
 777                     BitSet live = allocator.getBlockData(block).liveOut;
 778                     for (int operandNum = live.nextSetBit(0); operandNum >= 0; operandNum = live.nextSetBit(operandNum + 1)) {
 779                         assert live.get(operandNum) : "should not stop here otherwise";
 780                         AllocatableValue operand = allocator.intervalFor(operandNum).operand;
 781                         if (debug.isLogEnabled()) {
 782                             debug.log("live in %d: %s", operandNum, operand);
 783                         }
 784 
 785                         addUse(operand, blockFrom, blockTo + 2, RegisterPriority.None, LIRKind.Illegal, detailedAsserts);
 786 
 787                         /*
 788                          * Add special use positions for loop-end blocks when the interval is used
 789                          * anywhere inside this loop. It's possible that the block was part of a
 790                          * non-natural loop, so it might have an invalid loop index.
 791                          */
 792                         if (block.isLoopEnd() && block.getLoop() != null && isIntervalInLoop(operandNum, block.getLoop().getIndex())) {
 793                             allocator.intervalFor(operandNum).addUsePos(blockTo + 1, RegisterPriority.LiveAtLoopEnd, detailedAsserts);
 794                         }
 795                     }
 796 
 797                     /*
 798                      * Iterate all instructions of the block in reverse order. definitions of
 799                      * intervals are processed before uses.
 800                      */
 801                     for (int j = instructions.size() - 1; j >= 0; j--) {
 802                         final LIRInstruction op = instructions.get(j);
 803                         final int opId = op.id();
 804 
 805                         try (Indent indent3 = debug.logAndIndent("handle inst %d: %s", opId, op)) {
 806 
 807                             // add a temp range for each register if operation destroys
 808                             // caller-save registers
 809                             if (op.destroysCallerSavedRegisters()) {
 810                                 for (Register r : callerSaveRegs) {
 811                                     if (allocator.attributes(r).isAllocatable()) {
 812                                         addTemp(r.asValue(), opId, RegisterPriority.None, LIRKind.Illegal, detailedAsserts);
 813                                     }
 814                                 }
 815                                 if (debug.isLogEnabled()) {
 816                                     debug.log("operation destroys all caller-save registers");
 817                                 }
 818                             }
 819 
 820                             op.visitEachOutput(outputConsumer);
 821                             op.visitEachTemp(tempConsumer);
 822                             op.visitEachAlive(aliveConsumer);
 823                             op.visitEachInput(inputConsumer);
 824 
 825                             /*
 826                              * Add uses of live locals from interpreter's point of view for proper
 827                              * debug information generation. Treat these operands as temp values (if
 828                              * the live range is extended to a call site, the value would be in a
 829                              * register at the call otherwise).
 830                              */
 831                             op.visitEachState(stateProc);
 832 
 833                             // special steps for some instructions (especially moves)
 834                             handleMethodArguments(op);
 835 
 836                         }
 837 
 838                     } // end of instruction iteration
 839                 }
 840             } // end of block iteration
 841 
 842             /*
 843              * Add the range [0, 1] to all fixed intervals. the register allocator need not handle
 844              * unhandled fixed intervals.
 845              */
 846             for (Interval interval : allocator.intervals()) {
 847                 if (interval != null && isRegister(interval.operand)) {
 848                     interval.addRange(0, 1);
 849                 }
 850             }
 851         }
 852     }
 853 
 854     /**
 855      * Returns a value for a interval definition, which can be used for re-materialization.
 856      *
 857      * @param op An instruction which defines a value
 858      * @param operand The destination operand of the instruction
 859      * @param interval The interval for this defined value.
 860      * @return Returns the value which is moved to the instruction and which can be reused at all
 861      *         reload-locations in case the interval of this instruction is spilled. Currently this
 862      *         can only be a {@link JavaConstant}.
 863      */
 864     protected Constant getMaterializedValue(LIRInstruction op, Value operand, Interval interval) {
 865         if (LoadConstantOp.isLoadConstantOp(op)) {
 866             LoadConstantOp move = LoadConstantOp.asLoadConstantOp(op);
 867 
 868             if (!allocator.neverSpillConstants()) {
 869                 /*
 870                  * Check if the interval has any uses which would accept an stack location (priority
 871                  * == ShouldHaveRegister). Rematerialization of such intervals can result in a
 872                  * degradation, because rematerialization always inserts a constant load, even if
 873                  * the value is not needed in a register.
 874                  */
 875                 Interval.UsePosList usePosList = interval.usePosList();
 876                 int numUsePos = usePosList.size();
 877                 for (int useIdx = 0; useIdx < numUsePos; useIdx++) {
 878                     Interval.RegisterPriority priority = usePosList.registerPriority(useIdx);
 879                     if (priority == Interval.RegisterPriority.ShouldHaveRegister) {
 880                         return null;
 881                     }
 882                 }
 883             }
 884             return move.getConstant();
 885         }
 886         return null;
 887     }
 888 }