src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra/LinearScanWalker.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra/LinearScanWalker.java	Mon Mar 20 17:39:50 2017
--- new/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra/LinearScanWalker.java	Mon Mar 20 17:39:50 2017

*** 20,37 **** --- 20,38 ---- * or visit www.oracle.com if you need additional information or have any * questions. */ package org.graalvm.compiler.lir.alloc.lsra; import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; import static jdk.vm.ci.code.CodeUtil.isOdd; import static jdk.vm.ci.code.ValueUtil.asRegister; import static jdk.vm.ci.code.ValueUtil.isRegister; + import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; + import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; import java.util.ArrayList; import java.util.Arrays; + import java.util.Collections; import java.util.List; import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig.AllocatableRegisters; import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; import org.graalvm.compiler.core.common.util.Util;
*** 70,80 **** --- 71,81 ---- * Only 10% of the lists in {@link #spillIntervals} are actually used. But when they are used, * they can grow quite long. The maximum length observed was 45 (all numbers taken from a * bootstrap run of Graal). Therefore, we initialize {@link #spillIntervals} with this marker * value, and allocate a "real" list only on demand in {@link #setUsePos}. */ ! private static final List<Interval> EMPTY_LIST = new ArrayList<>(0); ! private static final List<Interval> EMPTY_LIST = Collections.emptyList(); // accessors mapped to same functions in class LinearScan int blockCount() { return allocator.blockCount(); }
*** 165,193 **** --- 166,194 ---- } } void freeExcludeActiveFixed() { Interval interval = activeLists.get(RegisterBinding.Fixed); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { assert isRegister(interval.location()) : "active interval must have a register assigned"; excludeFromUse(interval); interval = interval.next; } } void freeExcludeActiveAny() { Interval interval = activeLists.get(RegisterBinding.Any); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { assert isRegister(interval.location()) : "active interval must have a register assigned"; excludeFromUse(interval); interval = interval.next; } } void freeCollectInactiveFixed(Interval current) { Interval interval = inactiveLists.get(RegisterBinding.Fixed); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { if (current.to() <= interval.currentFrom()) { assert interval.currentIntersectsAt(current) == -1 : "must not intersect"; setUsePos(interval, interval.currentFrom(), true); } else { setUsePos(interval, interval.currentIntersectsAt(current), true);
*** 196,241 **** --- 197,242 ---- } } void freeCollectInactiveAny(Interval current) { Interval interval = inactiveLists.get(RegisterBinding.Any); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { setUsePos(interval, interval.currentIntersectsAt(current), true); interval = interval.next; } } void freeCollectUnhandled(RegisterBinding kind, Interval current) { Interval interval = unhandledLists.get(kind); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { setUsePos(interval, interval.intersectsAt(current), true); if (kind == RegisterBinding.Fixed && current.to() <= interval.from()) { setUsePos(interval, interval.from(), true); } interval = interval.next; } } void spillExcludeActiveFixed() { Interval interval = activeLists.get(RegisterBinding.Fixed); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { excludeFromUse(interval); interval = interval.next; } } void spillBlockUnhandledFixed(Interval current) { Interval interval = unhandledLists.get(RegisterBinding.Fixed); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { setBlockPos(interval, interval.intersectsAt(current)); interval = interval.next; } } void spillBlockInactiveFixed(Interval current) { Interval interval = inactiveLists.get(RegisterBinding.Fixed); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { if (current.to() > interval.currentFrom()) { setBlockPos(interval, interval.currentIntersectsAt(current)); } else { assert interval.currentIntersectsAt(current) == -1 : "invalid optimization: intervals intersect"; }
*** 244,262 **** --- 245,263 ---- } } void spillCollectActiveAny(RegisterPriority registerPriority) { Interval interval = activeLists.get(RegisterBinding.Any); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { setUsePos(interval, Math.min(interval.nextUsage(registerPriority, currentPosition), interval.to()), false); interval = interval.next; } } void spillCollectInactiveAny(Interval current) { Interval interval = inactiveLists.get(RegisterBinding.Any); ! while (interval != Interval.EndMarker) { ! while (!interval.isEndMarker()) { if (interval.currentIntersects(current)) { setUsePos(interval, Math.min(interval.nextUsage(RegisterPriority.LiveAtLoopEnd, currentPosition), interval.to()), false); } interval = interval.next; }
*** 273,283 **** --- 274,284 ---- // calculate index of instruction inside instruction list of current block // the minimal index (for a block with no spill moves) can be calculated because the // numbering of instructions is known. // When the block already contains spill moves, the index must be increased until the // correct index is reached. ! ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(opBlock); int index = (opId - instructions.get(0).id()) >> 1; assert instructions.get(index).id() <= opId : "error in calculation"; while (instructions.get(index).id() != opId) { index++;
*** 600,610 **** --- 601,611 ---- if (defLoopDepth < spillLoopDepth) { /* * The loop depth of the spilling position is higher then the loop depth at the * definition of the interval. Move write to memory out of loop. */ ! if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue(allocator.getOptions())) { // find best spill position in dominator the tree interval.setSpillState(SpillState.SpillInDominator); } else { // store at definition of the interval interval.setSpillState(SpillState.StoreAtDefinition);
*** 618,634 **** --- 619,640 ---- } break; } case OneSpillStore: { ! if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue()) { ! int defLoopDepth = allocator.blockForId(interval.spillDefinitionPos()).getLoopDepth(); + int spillLoopDepth = allocator.blockForId(spillPos).getLoopDepth(); + + if (defLoopDepth <= spillLoopDepth) { + if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue(allocator.getOptions())) { // the interval is spilled more then once interval.setSpillState(SpillState.SpillInDominator); } else { // It is better to store it to memory at the definition. interval.setSpillState(SpillState.StoreAtDefinition); } + } break; } case SpillInDominator: case StoreAtDefinition:
*** 697,707 **** --- 703,713 ---- freeExcludeActiveFixed(); freeExcludeActiveAny(); freeCollectInactiveFixed(interval); freeCollectInactiveAny(interval); // freeCollectUnhandled(fixedKind, cur); ! assert unhandledLists.get(RegisterBinding.Fixed) == Interval.EndMarker : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0"; ! assert unhandledLists.get(RegisterBinding.Fixed).isEndMarker() : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0"; // usePos contains the start of the next interval that has this register assigned // (either as a fixed register or a normal allocated register in the past) // only intervals overlapping with cur are processed, non-overlapping invervals can be // ignored safely
*** 809,819 **** --- 815,825 ---- for (RegisterPriority registerPriority = RegisterPriority.LiveAtLoopEnd; true; registerPriority = RegisterPriority.MustHaveRegister) { // collect current usage of registers initUseLists(false); spillExcludeActiveFixed(); // spillBlockUnhandledFixed(cur); ! assert unhandledLists.get(RegisterBinding.Fixed) == Interval.EndMarker : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0"; ! assert unhandledLists.get(RegisterBinding.Fixed).isEndMarker() : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0"; spillBlockInactiveFixed(interval); spillCollectActiveAny(registerPriority); spillCollectInactiveAny(interval); if (Debug.isLogEnabled()) { printRegisterState();

src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.lir/src/org/graalvm/compiler/lir/alloc/lsra/LinearScanWalker.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File