src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra/LinearScanWalker.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra

src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra/LinearScanWalker.java

Print this page




   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.lir.alloc.lsra;
  24 
  25 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
  26 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
  27 import static jdk.vm.ci.code.CodeUtil.isOdd;
  28 import static jdk.vm.ci.code.ValueUtil.asRegister;
  29 import static jdk.vm.ci.code.ValueUtil.isRegister;


  30 
  31 import java.util.ArrayList;
  32 import java.util.Arrays;

  33 import java.util.List;
  34 
  35 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig.AllocatableRegisters;
  36 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
  37 import org.graalvm.compiler.core.common.util.Util;
  38 import org.graalvm.compiler.debug.Debug;
  39 import org.graalvm.compiler.debug.GraalError;
  40 import org.graalvm.compiler.debug.Indent;
  41 import org.graalvm.compiler.lir.LIRInstruction;
  42 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
  43 import org.graalvm.compiler.lir.alloc.OutOfRegistersException;
  44 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterBinding;
  45 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterPriority;
  46 import org.graalvm.compiler.lir.alloc.lsra.Interval.SpillState;
  47 import org.graalvm.compiler.lir.alloc.lsra.Interval.State;
  48 
  49 import jdk.vm.ci.code.Register;
  50 import jdk.vm.ci.meta.Value;
  51 
  52 /**


  55 
  56     protected Register[] availableRegs;
  57 
  58     protected final int[] usePos;
  59     protected final int[] blockPos;
  60 
  61     protected List<Interval>[] spillIntervals;
  62 
  63     private MoveResolver moveResolver; // for ordering spill moves
  64 
  65     private int minReg;
  66 
  67     private int maxReg;
  68 
  69     /**
  70      * Only 10% of the lists in {@link #spillIntervals} are actually used. But when they are used,
  71      * they can grow quite long. The maximum length observed was 45 (all numbers taken from a
  72      * bootstrap run of Graal). Therefore, we initialize {@link #spillIntervals} with this marker
  73      * value, and allocate a "real" list only on demand in {@link #setUsePos}.
  74      */
  75     private static final List<Interval> EMPTY_LIST = new ArrayList<>(0);
  76 
  77     // accessors mapped to same functions in class LinearScan
  78     int blockCount() {
  79         return allocator.blockCount();
  80     }
  81 
  82     AbstractBlockBase<?> blockAt(int idx) {
  83         return allocator.blockAt(idx);
  84     }
  85 
  86     AbstractBlockBase<?> blockOfOpWithId(int opId) {
  87         return allocator.blockForId(opId);
  88     }
  89 
  90     LinearScanWalker(LinearScan allocator, Interval unhandledFixedFirst, Interval unhandledAnyFirst) {
  91         super(allocator, unhandledFixedFirst, unhandledAnyFirst);
  92 
  93         moveResolver = allocator.createMoveResolver();
  94         spillIntervals = Util.uncheckedCast(new List<?>[allocator.getRegisters().size()]);
  95         for (int i = 0; i < allocator.getRegisters().size(); i++) {


 150             }
 151         }
 152     }
 153 
 154     void setBlockPos(Interval i, int blockPos) {
 155         if (blockPos != -1) {
 156             int reg = asRegister(i.location()).number;
 157             if (isRegisterInRange(reg)) {
 158                 if (this.blockPos[reg] > blockPos) {
 159                     this.blockPos[reg] = blockPos;
 160                 }
 161                 if (usePos[reg] > blockPos) {
 162                     usePos[reg] = blockPos;
 163                 }
 164             }
 165         }
 166     }
 167 
 168     void freeExcludeActiveFixed() {
 169         Interval interval = activeLists.get(RegisterBinding.Fixed);
 170         while (interval != Interval.EndMarker) {
 171             assert isRegister(interval.location()) : "active interval must have a register assigned";
 172             excludeFromUse(interval);
 173             interval = interval.next;
 174         }
 175     }
 176 
 177     void freeExcludeActiveAny() {
 178         Interval interval = activeLists.get(RegisterBinding.Any);
 179         while (interval != Interval.EndMarker) {
 180             assert isRegister(interval.location()) : "active interval must have a register assigned";
 181             excludeFromUse(interval);
 182             interval = interval.next;
 183         }
 184     }
 185 
 186     void freeCollectInactiveFixed(Interval current) {
 187         Interval interval = inactiveLists.get(RegisterBinding.Fixed);
 188         while (interval != Interval.EndMarker) {
 189             if (current.to() <= interval.currentFrom()) {
 190                 assert interval.currentIntersectsAt(current) == -1 : "must not intersect";
 191                 setUsePos(interval, interval.currentFrom(), true);
 192             } else {
 193                 setUsePos(interval, interval.currentIntersectsAt(current), true);
 194             }
 195             interval = interval.next;
 196         }
 197     }
 198 
 199     void freeCollectInactiveAny(Interval current) {
 200         Interval interval = inactiveLists.get(RegisterBinding.Any);
 201         while (interval != Interval.EndMarker) {
 202             setUsePos(interval, interval.currentIntersectsAt(current), true);
 203             interval = interval.next;
 204         }
 205     }
 206 
 207     void freeCollectUnhandled(RegisterBinding kind, Interval current) {
 208         Interval interval = unhandledLists.get(kind);
 209         while (interval != Interval.EndMarker) {
 210             setUsePos(interval, interval.intersectsAt(current), true);
 211             if (kind == RegisterBinding.Fixed && current.to() <= interval.from()) {
 212                 setUsePos(interval, interval.from(), true);
 213             }
 214             interval = interval.next;
 215         }
 216     }
 217 
 218     void spillExcludeActiveFixed() {
 219         Interval interval = activeLists.get(RegisterBinding.Fixed);
 220         while (interval != Interval.EndMarker) {
 221             excludeFromUse(interval);
 222             interval = interval.next;
 223         }
 224     }
 225 
 226     void spillBlockUnhandledFixed(Interval current) {
 227         Interval interval = unhandledLists.get(RegisterBinding.Fixed);
 228         while (interval != Interval.EndMarker) {
 229             setBlockPos(interval, interval.intersectsAt(current));
 230             interval = interval.next;
 231         }
 232     }
 233 
 234     void spillBlockInactiveFixed(Interval current) {
 235         Interval interval = inactiveLists.get(RegisterBinding.Fixed);
 236         while (interval != Interval.EndMarker) {
 237             if (current.to() > interval.currentFrom()) {
 238                 setBlockPos(interval, interval.currentIntersectsAt(current));
 239             } else {
 240                 assert interval.currentIntersectsAt(current) == -1 : "invalid optimization: intervals intersect";
 241             }
 242 
 243             interval = interval.next;
 244         }
 245     }
 246 
 247     void spillCollectActiveAny(RegisterPriority registerPriority) {
 248         Interval interval = activeLists.get(RegisterBinding.Any);
 249         while (interval != Interval.EndMarker) {
 250             setUsePos(interval, Math.min(interval.nextUsage(registerPriority, currentPosition), interval.to()), false);
 251             interval = interval.next;
 252         }
 253     }
 254 
 255     void spillCollectInactiveAny(Interval current) {
 256         Interval interval = inactiveLists.get(RegisterBinding.Any);
 257         while (interval != Interval.EndMarker) {
 258             if (interval.currentIntersects(current)) {
 259                 setUsePos(interval, Math.min(interval.nextUsage(RegisterPriority.LiveAtLoopEnd, currentPosition), interval.to()), false);
 260             }
 261             interval = interval.next;
 262         }
 263     }
 264 
 265     void insertMove(int operandId, Interval srcIt, Interval dstIt) {
 266         // output all moves here. When source and target are equal, the move is
 267         // optimized away later in assignRegNums
 268 
 269         int opId = (operandId + 1) & ~1;
 270         AbstractBlockBase<?> opBlock = allocator.blockForId(opId);
 271         assert opId > 0 && allocator.blockForId(opId - 2) == opBlock : "cannot insert move at block boundary";
 272 
 273         // calculate index of instruction inside instruction list of current block
 274         // the minimal index (for a block with no spill moves) can be calculated because the
 275         // numbering of instructions is known.
 276         // When the block already contains spill moves, the index must be increased until the
 277         // correct index is reached.
 278         List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(opBlock);
 279         int index = (opId - instructions.get(0).id()) >> 1;
 280         assert instructions.get(index).id() <= opId : "error in calculation";
 281 
 282         while (instructions.get(index).id() != opId) {
 283             index++;
 284             assert 0 <= index && index < instructions.size() : "index out of bounds";
 285         }
 286         assert 1 <= index && index < instructions.size() : "index out of bounds";
 287         assert instructions.get(index).id() == opId : "error in calculation";
 288 
 289         // insert new instruction before instruction at position index
 290         moveResolver.moveInsertPosition(instructions, index);
 291         moveResolver.addMapping(srcIt, dstIt);
 292     }
 293 
 294     int findOptimalSplitPos(AbstractBlockBase<?> minBlock, AbstractBlockBase<?> maxBlock, int maxSplitPos) {
 295         int fromBlockNr = minBlock.getLinearScanNumber();
 296         int toBlockNr = maxBlock.getLinearScanNumber();
 297 
 298         assert 0 <= fromBlockNr && fromBlockNr < blockCount() : "out of range";


 585                         Debug.log("left interval: %s", interval.logString(allocator));
 586                         Debug.log("spilled interval   : %s", spilledPart.logString(allocator));
 587                     }
 588                 }
 589             }
 590         }
 591     }
 592 
 593     // called during register allocation
 594     private void changeSpillState(Interval interval, int spillPos) {
 595         switch (interval.spillState()) {
 596             case NoSpillStore: {
 597                 int defLoopDepth = allocator.blockForId(interval.spillDefinitionPos()).getLoopDepth();
 598                 int spillLoopDepth = allocator.blockForId(spillPos).getLoopDepth();
 599 
 600                 if (defLoopDepth < spillLoopDepth) {
 601                     /*
 602                      * The loop depth of the spilling position is higher then the loop depth at the
 603                      * definition of the interval. Move write to memory out of loop.
 604                      */
 605                     if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue()) {
 606                         // find best spill position in dominator the tree
 607                         interval.setSpillState(SpillState.SpillInDominator);
 608                     } else {
 609                         // store at definition of the interval
 610                         interval.setSpillState(SpillState.StoreAtDefinition);
 611                     }
 612                 } else {
 613                     /*
 614                      * The interval is currently spilled only once, so for now there is no reason to
 615                      * store the interval at the definition.
 616                      */
 617                     interval.setSpillState(SpillState.OneSpillStore);
 618                 }
 619                 break;
 620             }
 621 
 622             case OneSpillStore: {
 623                 if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue()) {




 624                     // the interval is spilled more then once
 625                     interval.setSpillState(SpillState.SpillInDominator);
 626                 } else {
 627                     // It is better to store it to memory at the definition.
 628                     interval.setSpillState(SpillState.StoreAtDefinition);
 629                 }

 630                 break;
 631             }
 632 
 633             case SpillInDominator:
 634             case StoreAtDefinition:
 635             case StartInMemory:
 636             case NoOptimization:
 637             case NoDefinitionFound:
 638                 // nothing to do
 639                 break;
 640 
 641             default:
 642                 throw GraalError.shouldNotReachHere("other states not allowed at this time");
 643         }
 644     }
 645 
 646     /**
 647      * This is called for every interval that is assigned to a stack slot.
 648      */
 649     protected void handleSpillSlot(Interval interval) {


 682             int minSplitPos = currentPos + 1;
 683             int maxSplitPos = Math.min(interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos), interval.to());
 684 
 685             splitBeforeUsage(interval, minSplitPos, maxSplitPos);
 686 
 687             assert interval.nextUsage(RegisterPriority.MustHaveRegister, currentPos) == Integer.MAX_VALUE : "the remaining part is spilled to stack and therefore has no register";
 688             splitForSpilling(interval);
 689         }
 690     }
 691 
 692     @SuppressWarnings("try")
 693     boolean allocFreeRegister(Interval interval) {
 694         try (Indent indent = Debug.logAndIndent("trying to find free register for %s", interval)) {
 695 
 696             initUseLists(true);
 697             freeExcludeActiveFixed();
 698             freeExcludeActiveAny();
 699             freeCollectInactiveFixed(interval);
 700             freeCollectInactiveAny(interval);
 701             // freeCollectUnhandled(fixedKind, cur);
 702             assert unhandledLists.get(RegisterBinding.Fixed) == Interval.EndMarker : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0";
 703 
 704             // usePos contains the start of the next interval that has this register assigned
 705             // (either as a fixed register or a normal allocated register in the past)
 706             // only intervals overlapping with cur are processed, non-overlapping invervals can be
 707             // ignored safely
 708             if (Debug.isLogEnabled()) {
 709                 // Enable this logging to see all register states
 710                 try (Indent indent2 = Debug.logAndIndent("state of registers:")) {
 711                     for (Register register : availableRegs) {
 712                         int i = register.number;
 713                         Debug.log("reg %d: usePos: %d", register.number, usePos[i]);
 714                     }
 715                 }
 716             }
 717 
 718             Register hint = null;
 719             Interval locationHint = interval.locationHint(true);
 720             if (locationHint != null && locationHint.location() != null && isRegister(locationHint.location())) {
 721                 hint = asRegister(locationHint.location());
 722                 if (Debug.isLogEnabled()) {


 794             // the register must be free at least until this position
 795             int firstUsage = interval.firstUsage(RegisterPriority.MustHaveRegister);
 796             int firstShouldHaveUsage = interval.firstUsage(RegisterPriority.ShouldHaveRegister);
 797             int regNeededUntil = Math.min(firstUsage, interval.from() + 1);
 798             int intervalTo = interval.to();
 799             assert regNeededUntil >= 0 && regNeededUntil < Integer.MAX_VALUE : "interval has no use";
 800 
 801             Register reg;
 802             Register ignore;
 803             /*
 804              * In the common case we don't spill registers that have _any_ use position that is
 805              * closer than the next use of the current interval, but if we can't spill the current
 806              * interval we weaken this strategy and also allow spilling of intervals that have a
 807              * non-mandatory requirements (no MustHaveRegister use position).
 808              */
 809             for (RegisterPriority registerPriority = RegisterPriority.LiveAtLoopEnd; true; registerPriority = RegisterPriority.MustHaveRegister) {
 810                 // collect current usage of registers
 811                 initUseLists(false);
 812                 spillExcludeActiveFixed();
 813                 // spillBlockUnhandledFixed(cur);
 814                 assert unhandledLists.get(RegisterBinding.Fixed) == Interval.EndMarker : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0";
 815                 spillBlockInactiveFixed(interval);
 816                 spillCollectActiveAny(registerPriority);
 817                 spillCollectInactiveAny(interval);
 818                 if (Debug.isLogEnabled()) {
 819                     printRegisterState();
 820                 }
 821 
 822                 reg = null;
 823                 ignore = interval.location() != null && isRegister(interval.location()) ? asRegister(interval.location()) : null;
 824 
 825                 for (Register availableReg : availableRegs) {
 826                     int number = availableReg.number;
 827                     if (availableReg.equals(ignore)) {
 828                         // this register must be ignored
 829                     } else if (usePos[number] > regNeededUntil) {
 830                         if (reg == null || (usePos[number] > usePos[reg.number])) {
 831                             reg = availableReg;
 832                         }
 833                     }
 834                 }




   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.lir.alloc.lsra;
  24 


  25 import static jdk.vm.ci.code.CodeUtil.isOdd;
  26 import static jdk.vm.ci.code.ValueUtil.asRegister;
  27 import static jdk.vm.ci.code.ValueUtil.isRegister;
  28 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
  29 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
  30 
  31 import java.util.ArrayList;
  32 import java.util.Arrays;
  33 import java.util.Collections;
  34 import java.util.List;
  35 
  36 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig.AllocatableRegisters;
  37 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
  38 import org.graalvm.compiler.core.common.util.Util;
  39 import org.graalvm.compiler.debug.Debug;
  40 import org.graalvm.compiler.debug.GraalError;
  41 import org.graalvm.compiler.debug.Indent;
  42 import org.graalvm.compiler.lir.LIRInstruction;
  43 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
  44 import org.graalvm.compiler.lir.alloc.OutOfRegistersException;
  45 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterBinding;
  46 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterPriority;
  47 import org.graalvm.compiler.lir.alloc.lsra.Interval.SpillState;
  48 import org.graalvm.compiler.lir.alloc.lsra.Interval.State;
  49 
  50 import jdk.vm.ci.code.Register;
  51 import jdk.vm.ci.meta.Value;
  52 
  53 /**


  56 
  57     protected Register[] availableRegs;
  58 
  59     protected final int[] usePos;
  60     protected final int[] blockPos;
  61 
  62     protected List<Interval>[] spillIntervals;
  63 
  64     private MoveResolver moveResolver; // for ordering spill moves
  65 
  66     private int minReg;
  67 
  68     private int maxReg;
  69 
  70     /**
  71      * Only 10% of the lists in {@link #spillIntervals} are actually used. But when they are used,
  72      * they can grow quite long. The maximum length observed was 45 (all numbers taken from a
  73      * bootstrap run of Graal). Therefore, we initialize {@link #spillIntervals} with this marker
  74      * value, and allocate a "real" list only on demand in {@link #setUsePos}.
  75      */
  76     private static final List<Interval> EMPTY_LIST = Collections.emptyList();
  77 
  78     // accessors mapped to same functions in class LinearScan
  79     int blockCount() {
  80         return allocator.blockCount();
  81     }
  82 
  83     AbstractBlockBase<?> blockAt(int idx) {
  84         return allocator.blockAt(idx);
  85     }
  86 
  87     AbstractBlockBase<?> blockOfOpWithId(int opId) {
  88         return allocator.blockForId(opId);
  89     }
  90 
  91     LinearScanWalker(LinearScan allocator, Interval unhandledFixedFirst, Interval unhandledAnyFirst) {
  92         super(allocator, unhandledFixedFirst, unhandledAnyFirst);
  93 
  94         moveResolver = allocator.createMoveResolver();
  95         spillIntervals = Util.uncheckedCast(new List<?>[allocator.getRegisters().size()]);
  96         for (int i = 0; i < allocator.getRegisters().size(); i++) {


 151             }
 152         }
 153     }
 154 
 155     void setBlockPos(Interval i, int blockPos) {
 156         if (blockPos != -1) {
 157             int reg = asRegister(i.location()).number;
 158             if (isRegisterInRange(reg)) {
 159                 if (this.blockPos[reg] > blockPos) {
 160                     this.blockPos[reg] = blockPos;
 161                 }
 162                 if (usePos[reg] > blockPos) {
 163                     usePos[reg] = blockPos;
 164                 }
 165             }
 166         }
 167     }
 168 
 169     void freeExcludeActiveFixed() {
 170         Interval interval = activeLists.get(RegisterBinding.Fixed);
 171         while (!interval.isEndMarker()) {
 172             assert isRegister(interval.location()) : "active interval must have a register assigned";
 173             excludeFromUse(interval);
 174             interval = interval.next;
 175         }
 176     }
 177 
 178     void freeExcludeActiveAny() {
 179         Interval interval = activeLists.get(RegisterBinding.Any);
 180         while (!interval.isEndMarker()) {
 181             assert isRegister(interval.location()) : "active interval must have a register assigned";
 182             excludeFromUse(interval);
 183             interval = interval.next;
 184         }
 185     }
 186 
 187     void freeCollectInactiveFixed(Interval current) {
 188         Interval interval = inactiveLists.get(RegisterBinding.Fixed);
 189         while (!interval.isEndMarker()) {
 190             if (current.to() <= interval.currentFrom()) {
 191                 assert interval.currentIntersectsAt(current) == -1 : "must not intersect";
 192                 setUsePos(interval, interval.currentFrom(), true);
 193             } else {
 194                 setUsePos(interval, interval.currentIntersectsAt(current), true);
 195             }
 196             interval = interval.next;
 197         }
 198     }
 199 
 200     void freeCollectInactiveAny(Interval current) {
 201         Interval interval = inactiveLists.get(RegisterBinding.Any);
 202         while (!interval.isEndMarker()) {
 203             setUsePos(interval, interval.currentIntersectsAt(current), true);
 204             interval = interval.next;
 205         }
 206     }
 207 
 208     void freeCollectUnhandled(RegisterBinding kind, Interval current) {
 209         Interval interval = unhandledLists.get(kind);
 210         while (!interval.isEndMarker()) {
 211             setUsePos(interval, interval.intersectsAt(current), true);
 212             if (kind == RegisterBinding.Fixed && current.to() <= interval.from()) {
 213                 setUsePos(interval, interval.from(), true);
 214             }
 215             interval = interval.next;
 216         }
 217     }
 218 
 219     void spillExcludeActiveFixed() {
 220         Interval interval = activeLists.get(RegisterBinding.Fixed);
 221         while (!interval.isEndMarker()) {
 222             excludeFromUse(interval);
 223             interval = interval.next;
 224         }
 225     }
 226 
 227     void spillBlockUnhandledFixed(Interval current) {
 228         Interval interval = unhandledLists.get(RegisterBinding.Fixed);
 229         while (!interval.isEndMarker()) {
 230             setBlockPos(interval, interval.intersectsAt(current));
 231             interval = interval.next;
 232         }
 233     }
 234 
 235     void spillBlockInactiveFixed(Interval current) {
 236         Interval interval = inactiveLists.get(RegisterBinding.Fixed);
 237         while (!interval.isEndMarker()) {
 238             if (current.to() > interval.currentFrom()) {
 239                 setBlockPos(interval, interval.currentIntersectsAt(current));
 240             } else {
 241                 assert interval.currentIntersectsAt(current) == -1 : "invalid optimization: intervals intersect";
 242             }
 243 
 244             interval = interval.next;
 245         }
 246     }
 247 
 248     void spillCollectActiveAny(RegisterPriority registerPriority) {
 249         Interval interval = activeLists.get(RegisterBinding.Any);
 250         while (!interval.isEndMarker()) {
 251             setUsePos(interval, Math.min(interval.nextUsage(registerPriority, currentPosition), interval.to()), false);
 252             interval = interval.next;
 253         }
 254     }
 255 
 256     void spillCollectInactiveAny(Interval current) {
 257         Interval interval = inactiveLists.get(RegisterBinding.Any);
 258         while (!interval.isEndMarker()) {
 259             if (interval.currentIntersects(current)) {
 260                 setUsePos(interval, Math.min(interval.nextUsage(RegisterPriority.LiveAtLoopEnd, currentPosition), interval.to()), false);
 261             }
 262             interval = interval.next;
 263         }
 264     }
 265 
 266     void insertMove(int operandId, Interval srcIt, Interval dstIt) {
 267         // output all moves here. When source and target are equal, the move is
 268         // optimized away later in assignRegNums
 269 
 270         int opId = (operandId + 1) & ~1;
 271         AbstractBlockBase<?> opBlock = allocator.blockForId(opId);
 272         assert opId > 0 && allocator.blockForId(opId - 2) == opBlock : "cannot insert move at block boundary";
 273 
 274         // calculate index of instruction inside instruction list of current block
 275         // the minimal index (for a block with no spill moves) can be calculated because the
 276         // numbering of instructions is known.
 277         // When the block already contains spill moves, the index must be increased until the
 278         // correct index is reached.
 279         ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(opBlock);
 280         int index = (opId - instructions.get(0).id()) >> 1;
 281         assert instructions.get(index).id() <= opId : "error in calculation";
 282 
 283         while (instructions.get(index).id() != opId) {
 284             index++;
 285             assert 0 <= index && index < instructions.size() : "index out of bounds";
 286         }
 287         assert 1 <= index && index < instructions.size() : "index out of bounds";
 288         assert instructions.get(index).id() == opId : "error in calculation";
 289 
 290         // insert new instruction before instruction at position index
 291         moveResolver.moveInsertPosition(instructions, index);
 292         moveResolver.addMapping(srcIt, dstIt);
 293     }
 294 
 295     int findOptimalSplitPos(AbstractBlockBase<?> minBlock, AbstractBlockBase<?> maxBlock, int maxSplitPos) {
 296         int fromBlockNr = minBlock.getLinearScanNumber();
 297         int toBlockNr = maxBlock.getLinearScanNumber();
 298 
 299         assert 0 <= fromBlockNr && fromBlockNr < blockCount() : "out of range";


 586                         Debug.log("left interval: %s", interval.logString(allocator));
 587                         Debug.log("spilled interval   : %s", spilledPart.logString(allocator));
 588                     }
 589                 }
 590             }
 591         }
 592     }
 593 
 594     // called during register allocation
 595     private void changeSpillState(Interval interval, int spillPos) {
 596         switch (interval.spillState()) {
 597             case NoSpillStore: {
 598                 int defLoopDepth = allocator.blockForId(interval.spillDefinitionPos()).getLoopDepth();
 599                 int spillLoopDepth = allocator.blockForId(spillPos).getLoopDepth();
 600 
 601                 if (defLoopDepth < spillLoopDepth) {
 602                     /*
 603                      * The loop depth of the spilling position is higher then the loop depth at the
 604                      * definition of the interval. Move write to memory out of loop.
 605                      */
 606                     if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue(allocator.getOptions())) {
 607                         // find best spill position in dominator the tree
 608                         interval.setSpillState(SpillState.SpillInDominator);
 609                     } else {
 610                         // store at definition of the interval
 611                         interval.setSpillState(SpillState.StoreAtDefinition);
 612                     }
 613                 } else {
 614                     /*
 615                      * The interval is currently spilled only once, so for now there is no reason to
 616                      * store the interval at the definition.
 617                      */
 618                     interval.setSpillState(SpillState.OneSpillStore);
 619                 }
 620                 break;
 621             }
 622 
 623             case OneSpillStore: {
 624                 int defLoopDepth = allocator.blockForId(interval.spillDefinitionPos()).getLoopDepth();
 625                 int spillLoopDepth = allocator.blockForId(spillPos).getLoopDepth();
 626 
 627                 if (defLoopDepth <= spillLoopDepth) {
 628                     if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue(allocator.getOptions())) {
 629                         // the interval is spilled more then once
 630                         interval.setSpillState(SpillState.SpillInDominator);
 631                     } else {
 632                         // It is better to store it to memory at the definition.
 633                         interval.setSpillState(SpillState.StoreAtDefinition);
 634                     }
 635                 }
 636                 break;
 637             }
 638 
 639             case SpillInDominator:
 640             case StoreAtDefinition:
 641             case StartInMemory:
 642             case NoOptimization:
 643             case NoDefinitionFound:
 644                 // nothing to do
 645                 break;
 646 
 647             default:
 648                 throw GraalError.shouldNotReachHere("other states not allowed at this time");
 649         }
 650     }
 651 
 652     /**
 653      * This is called for every interval that is assigned to a stack slot.
 654      */
 655     protected void handleSpillSlot(Interval interval) {


 688             int minSplitPos = currentPos + 1;
 689             int maxSplitPos = Math.min(interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos), interval.to());
 690 
 691             splitBeforeUsage(interval, minSplitPos, maxSplitPos);
 692 
 693             assert interval.nextUsage(RegisterPriority.MustHaveRegister, currentPos) == Integer.MAX_VALUE : "the remaining part is spilled to stack and therefore has no register";
 694             splitForSpilling(interval);
 695         }
 696     }
 697 
 698     @SuppressWarnings("try")
 699     boolean allocFreeRegister(Interval interval) {
 700         try (Indent indent = Debug.logAndIndent("trying to find free register for %s", interval)) {
 701 
 702             initUseLists(true);
 703             freeExcludeActiveFixed();
 704             freeExcludeActiveAny();
 705             freeCollectInactiveFixed(interval);
 706             freeCollectInactiveAny(interval);
 707             // freeCollectUnhandled(fixedKind, cur);
 708             assert unhandledLists.get(RegisterBinding.Fixed).isEndMarker() : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0";
 709 
 710             // usePos contains the start of the next interval that has this register assigned
 711             // (either as a fixed register or a normal allocated register in the past)
 712             // only intervals overlapping with cur are processed, non-overlapping invervals can be
 713             // ignored safely
 714             if (Debug.isLogEnabled()) {
 715                 // Enable this logging to see all register states
 716                 try (Indent indent2 = Debug.logAndIndent("state of registers:")) {
 717                     for (Register register : availableRegs) {
 718                         int i = register.number;
 719                         Debug.log("reg %d: usePos: %d", register.number, usePos[i]);
 720                     }
 721                 }
 722             }
 723 
 724             Register hint = null;
 725             Interval locationHint = interval.locationHint(true);
 726             if (locationHint != null && locationHint.location() != null && isRegister(locationHint.location())) {
 727                 hint = asRegister(locationHint.location());
 728                 if (Debug.isLogEnabled()) {


 800             // the register must be free at least until this position
 801             int firstUsage = interval.firstUsage(RegisterPriority.MustHaveRegister);
 802             int firstShouldHaveUsage = interval.firstUsage(RegisterPriority.ShouldHaveRegister);
 803             int regNeededUntil = Math.min(firstUsage, interval.from() + 1);
 804             int intervalTo = interval.to();
 805             assert regNeededUntil >= 0 && regNeededUntil < Integer.MAX_VALUE : "interval has no use";
 806 
 807             Register reg;
 808             Register ignore;
 809             /*
 810              * In the common case we don't spill registers that have _any_ use position that is
 811              * closer than the next use of the current interval, but if we can't spill the current
 812              * interval we weaken this strategy and also allow spilling of intervals that have a
 813              * non-mandatory requirements (no MustHaveRegister use position).
 814              */
 815             for (RegisterPriority registerPriority = RegisterPriority.LiveAtLoopEnd; true; registerPriority = RegisterPriority.MustHaveRegister) {
 816                 // collect current usage of registers
 817                 initUseLists(false);
 818                 spillExcludeActiveFixed();
 819                 // spillBlockUnhandledFixed(cur);
 820                 assert unhandledLists.get(RegisterBinding.Fixed).isEndMarker() : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0";
 821                 spillBlockInactiveFixed(interval);
 822                 spillCollectActiveAny(registerPriority);
 823                 spillCollectInactiveAny(interval);
 824                 if (Debug.isLogEnabled()) {
 825                     printRegisterState();
 826                 }
 827 
 828                 reg = null;
 829                 ignore = interval.location() != null && isRegister(interval.location()) ? asRegister(interval.location()) : null;
 830 
 831                 for (Register availableReg : availableRegs) {
 832                     int number = availableReg.number;
 833                     if (availableReg.equals(ignore)) {
 834                         // this register must be ignored
 835                     } else if (usePos[number] > regNeededUntil) {
 836                         if (reg == null || (usePos[number] > usePos[reg.number])) {
 837                             reg = availableReg;
 838                         }
 839                     }
 840                 }


src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra/LinearScanWalker.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File