1 /* 2 * Copyright (c) 2009, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.lir.alloc.trace.lsra; 24 25 import static jdk.vm.ci.code.CodeUtil.isEven; 26 import static jdk.vm.ci.code.ValueUtil.asRegister; 27 import static jdk.vm.ci.code.ValueUtil.isIllegal; 28 import static jdk.vm.ci.code.ValueUtil.isLegal; 29 import static jdk.vm.ci.code.ValueUtil.isRegister; 30 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; 31 32 import java.util.ArrayList; 33 import java.util.Arrays; 34 import java.util.Comparator; 35 36 import org.graalvm.compiler.core.common.LIRKind; 37 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig; 38 import org.graalvm.compiler.core.common.alloc.Trace; 39 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult; 40 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; 41 import org.graalvm.compiler.debug.Assertions; 42 import org.graalvm.compiler.debug.DebugContext; 43 import org.graalvm.compiler.debug.GraalError; 44 import org.graalvm.compiler.debug.Indent; 45 import org.graalvm.compiler.lir.LIR; 46 import org.graalvm.compiler.lir.LIRInstruction; 47 import org.graalvm.compiler.lir.LIRInstruction.OperandMode; 48 import org.graalvm.compiler.lir.StandardOp.BlockEndOp; 49 import org.graalvm.compiler.lir.Variable; 50 import org.graalvm.compiler.lir.VirtualStackSlot; 51 import org.graalvm.compiler.lir.alloc.trace.GlobalLivenessInfo; 52 import org.graalvm.compiler.lir.alloc.trace.TraceAllocationPhase; 53 import org.graalvm.compiler.lir.alloc.trace.TraceAllocationPhase.TraceAllocationContext; 54 import org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase; 55 import org.graalvm.compiler.lir.alloc.trace.TraceUtil; 56 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority; 57 import org.graalvm.compiler.lir.debug.IntervalDumper; 58 import org.graalvm.compiler.lir.debug.IntervalDumper.IntervalVisitor; 59 import org.graalvm.compiler.lir.framemap.FrameMapBuilder; 60 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 61 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory; 62 import org.graalvm.compiler.lir.phases.LIRPhase; 63 import org.graalvm.compiler.options.NestedBooleanOptionKey; 64 import org.graalvm.compiler.options.Option; 65 import org.graalvm.compiler.options.OptionKey; 66 import org.graalvm.compiler.options.OptionType; 67 import org.graalvm.compiler.options.OptionValues; 68 69 import jdk.vm.ci.code.Register; 70 import jdk.vm.ci.code.RegisterArray; 71 import jdk.vm.ci.code.RegisterAttributes; 72 import jdk.vm.ci.code.RegisterValue; 73 import jdk.vm.ci.code.TargetDescription; 74 import jdk.vm.ci.meta.AllocatableValue; 75 import jdk.vm.ci.meta.Value; 76 import jdk.vm.ci.meta.ValueKind; 77 78 /** 79 * Implementation of the Linear Scan allocation approach for traces described in 80 * <a href="http://dx.doi.org/10.1145/2972206.2972211">"Trace-based Register Allocation in a JIT 81 * Compiler"</a> by Josef Eisl et al. It is derived from 82 * <a href="http://doi.acm.org/10.1145/1064979.1064998" > "Optimized Interval Splitting in a Linear 83 * Scan Register Allocator"</a> by Christian Wimmer and Hanspeter Moessenboeck. 84 */ 85 public final class TraceLinearScanPhase extends TraceAllocationPhase<TraceAllocationContext> { 86 87 public static class Options { 88 // @formatter:off 89 @Option(help = "Enable spill position optimization", type = OptionType.Debug) 90 public static final OptionKey<Boolean> LIROptTraceRAEliminateSpillMoves = new NestedBooleanOptionKey(LIRPhase.Options.LIROptimization, true); 91 // @formatter:on 92 } 93 94 private static final TraceLinearScanRegisterAllocationPhase TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE = new TraceLinearScanRegisterAllocationPhase(); 95 private static final TraceLinearScanAssignLocationsPhase TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE = new TraceLinearScanAssignLocationsPhase(); 96 private static final TraceLinearScanEliminateSpillMovePhase TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE = new TraceLinearScanEliminateSpillMovePhase(); 97 private static final TraceLinearScanResolveDataFlowPhase TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE = new TraceLinearScanResolveDataFlowPhase(); 98 private static final TraceLinearScanLifetimeAnalysisPhase TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE = new TraceLinearScanLifetimeAnalysisPhase(); 99 100 public static final int DOMINATOR_SPILL_MOVE_ID = -2; 101 102 private final FrameMapBuilder frameMapBuilder; 103 private final RegisterAttributes[] registerAttributes; 104 private final RegisterArray registers; 105 private final RegisterAllocationConfig regAllocConfig; 106 private final MoveFactory moveFactory; 107 108 protected final TraceBuilderResult traceBuilderResult; 109 110 private final boolean neverSpillConstants; 111 112 /** 113 * Maps from {@link Variable#index} to a spill stack slot. If 114 * {@linkplain org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options#TraceRACacheStackSlots 115 * enabled} a {@link Variable} is always assigned to the same stack slot. 116 */ 117 private final AllocatableValue[] cachedStackSlots; 118 119 private final LIRGenerationResult res; 120 private final GlobalLivenessInfo livenessInfo; 121 122 public TraceLinearScanPhase(TargetDescription target, LIRGenerationResult res, MoveFactory spillMoveFactory, RegisterAllocationConfig regAllocConfig, TraceBuilderResult traceBuilderResult, 123 boolean neverSpillConstants, AllocatableValue[] cachedStackSlots, GlobalLivenessInfo livenessInfo) { 124 this.res = res; 125 this.moveFactory = spillMoveFactory; 126 this.frameMapBuilder = res.getFrameMapBuilder(); 127 this.registerAttributes = regAllocConfig.getRegisterConfig().getAttributesMap(); 128 this.regAllocConfig = regAllocConfig; 129 130 this.registers = target.arch.getRegisters(); 131 this.traceBuilderResult = traceBuilderResult; 132 this.neverSpillConstants = neverSpillConstants; 133 this.cachedStackSlots = cachedStackSlots; 134 this.livenessInfo = livenessInfo; 135 assert livenessInfo != null; 136 } 137 138 protected DebugContext getDebug() { 139 return res.getLIR().getDebug(); 140 } 141 142 public static boolean isVariableOrRegister(Value value) { 143 return isVariable(value) || isRegister(value); 144 } 145 146 abstract static class IntervalPredicate { 147 148 abstract boolean apply(TraceInterval i); 149 } 150 151 static final IntervalPredicate IS_VARIABLE_INTERVAL = new IntervalPredicate() { 152 153 @Override 154 public boolean apply(TraceInterval i) { 155 // all TraceIntervals are variable intervals 156 return true; 157 } 158 }; 159 private static final Comparator<TraceInterval> SORT_BY_FROM_COMP = new Comparator<TraceInterval>() { 160 161 @Override 162 public int compare(TraceInterval a, TraceInterval b) { 163 return a.from() - b.from(); 164 } 165 }; 166 private static final Comparator<TraceInterval> SORT_BY_SPILL_POS_COMP = new Comparator<TraceInterval>() { 167 168 @Override 169 public int compare(TraceInterval a, TraceInterval b) { 170 return a.spillDefinitionPos() - b.spillDefinitionPos(); 171 } 172 }; 173 174 public TraceLinearScan createAllocator(Trace trace) { 175 return new TraceLinearScan(trace); 176 } 177 178 @Override 179 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, TraceAllocationContext traceContext) { 180 createAllocator(trace).allocate(target, lirGenRes, traceContext); 181 } 182 183 private static <T extends IntervalHint> boolean isSortedByFrom(T[] intervals) { 184 int from = -1; 185 for (T interval : intervals) { 186 if (interval == null) { 187 continue; 188 } 189 assert from <= interval.from(); 190 from = interval.from(); 191 } 192 return true; 193 } 194 195 private static boolean isSortedBySpillPos(TraceInterval[] intervals) { 196 int from = -1; 197 for (TraceInterval interval : intervals) { 198 assert interval != null; 199 assert from <= interval.spillDefinitionPos(); 200 from = interval.spillDefinitionPos(); 201 } 202 return true; 203 } 204 205 private static TraceInterval[] sortIntervalsBeforeAllocation(TraceInterval[] intervals, TraceInterval[] sortedList) { 206 int sortedIdx = 0; 207 int sortedFromMax = -1; 208 209 // special sorting algorithm: the original interval-list is almost sorted, 210 // only some intervals are swapped. So this is much faster than a complete QuickSort 211 for (TraceInterval interval : intervals) { 212 if (interval != null) { 213 int from = interval.from(); 214 215 if (sortedFromMax <= from) { 216 sortedList[sortedIdx++] = interval; 217 sortedFromMax = interval.from(); 218 } else { 219 // the assumption that the intervals are already sorted failed, 220 // so this interval must be sorted in manually 221 int j; 222 for (j = sortedIdx - 1; j >= 0 && from < sortedList[j].from(); j--) { 223 sortedList[j + 1] = sortedList[j]; 224 } 225 sortedList[j + 1] = interval; 226 sortedIdx++; 227 } 228 } 229 } 230 return sortedList; 231 } 232 233 public final class TraceLinearScan implements IntervalDumper { 234 235 /** 236 * Intervals sorted by {@link TraceInterval#from()}. 237 */ 238 private TraceInterval[] sortedIntervals; 239 240 private final Trace trace; 241 242 public TraceLinearScan(Trace trace) { 243 this.trace = trace; 244 this.fixedIntervals = new FixedInterval[registers.size()]; 245 } 246 247 GlobalLivenessInfo getGlobalLivenessInfo() { 248 return livenessInfo; 249 } 250 251 /** 252 * @return {@link Variable#index} 253 */ 254 int operandNumber(Variable operand) { 255 return operand.index; 256 } 257 258 OptionValues getOptions() { 259 return getLIR().getOptions(); 260 } 261 262 DebugContext getDebug() { 263 return getLIR().getDebug(); 264 } 265 266 /** 267 * Gets the number of operands. This value will increase by 1 for new variable. 268 */ 269 int operandSize() { 270 return getLIR().numVariables(); 271 } 272 273 /** 274 * Gets the number of registers. This value will never change. 275 */ 276 int numRegisters() { 277 return registers.size(); 278 } 279 280 public int getFirstLirInstructionId(AbstractBlockBase<?> block) { 281 int result = getLIR().getLIRforBlock(block).get(0).id(); 282 assert result >= 0; 283 return result; 284 } 285 286 public int getLastLirInstructionId(AbstractBlockBase<?> block) { 287 ArrayList<LIRInstruction> instructions = getLIR().getLIRforBlock(block); 288 int result = instructions.get(instructions.size() - 1).id(); 289 assert result >= 0; 290 return result; 291 } 292 293 /** 294 * Gets an object describing the attributes of a given register according to this register 295 * configuration. 296 */ 297 public RegisterAttributes attributes(Register reg) { 298 return registerAttributes[reg.number]; 299 } 300 301 public boolean isAllocatable(RegisterValue register) { 302 return attributes(register.getRegister()).isAllocatable(); 303 } 304 305 public MoveFactory getSpillMoveFactory() { 306 return moveFactory; 307 } 308 309 protected TraceLocalMoveResolver createMoveResolver() { 310 TraceLocalMoveResolver moveResolver = new TraceLocalMoveResolver(this); 311 assert moveResolver.checkEmpty(); 312 return moveResolver; 313 } 314 315 void assignSpillSlot(TraceInterval interval) { 316 /* 317 * Assign the canonical spill slot of the parent (if a part of the interval is already 318 * spilled) or allocate a new spill slot. 319 */ 320 if (interval.canMaterialize()) { 321 interval.assignLocation(Value.ILLEGAL); 322 } else if (interval.spillSlot() != null) { 323 interval.assignLocation(interval.spillSlot()); 324 } else { 325 AllocatableValue slot = allocateSpillSlot(interval); 326 interval.setSpillSlot(slot); 327 interval.assignLocation(slot); 328 } 329 } 330 331 /** 332 * Returns a new spill slot or a cached entry if there is already one for the 333 * {@linkplain TraceInterval variable}. 334 */ 335 private AllocatableValue allocateSpillSlot(TraceInterval interval) { 336 DebugContext debug = res.getLIR().getDebug(); 337 int variableIndex = interval.splitParent().operandNumber; 338 OptionValues options = getOptions(); 339 if (TraceRegisterAllocationPhase.Options.TraceRACacheStackSlots.getValue(options)) { 340 AllocatableValue cachedStackSlot = cachedStackSlots[variableIndex]; 341 if (cachedStackSlot != null) { 342 TraceRegisterAllocationPhase.globalStackSlots.increment(debug); 343 assert cachedStackSlot.getValueKind().equals(getKind(interval)) : "CachedStackSlot: kind mismatch? " + getKind(interval) + " vs. " + cachedStackSlot.getValueKind(); 344 return cachedStackSlot; 345 } 346 } 347 VirtualStackSlot slot = frameMapBuilder.allocateSpillSlot(getKind(interval)); 348 if (TraceRegisterAllocationPhase.Options.TraceRACacheStackSlots.getValue(options)) { 349 cachedStackSlots[variableIndex] = slot; 350 } 351 TraceRegisterAllocationPhase.allocatedStackSlots.increment(debug); 352 return slot; 353 } 354 355 // access to block list (sorted in linear scan order) 356 public int blockCount() { 357 return sortedBlocks().length; 358 } 359 360 public AbstractBlockBase<?> blockAt(int index) { 361 return sortedBlocks()[index]; 362 } 363 364 int numLoops() { 365 return getLIR().getControlFlowGraph().getLoops().size(); 366 } 367 368 boolean isBlockBegin(int opId) { 369 return opId == 0 || blockForId(opId) != blockForId(opId - 1); 370 } 371 372 boolean isBlockEnd(int opId) { 373 boolean isBlockBegin = isBlockBegin(opId + 2); 374 assert isBlockBegin == (instructionForId(opId & (~1)) instanceof BlockEndOp); 375 return isBlockBegin; 376 } 377 378 boolean coversBlockBegin(int opId1, int opId2) { 379 return blockForId(opId1) != blockForId(opId2); 380 } 381 382 /** 383 * Determines if an {@link LIRInstruction} destroys all caller saved registers. 384 * 385 * @param opId an instruction {@linkplain LIRInstruction#id id} 386 * @return {@code true} if the instruction denoted by {@code id} destroys all caller saved 387 * registers. 388 */ 389 boolean hasCall(int opId) { 390 assert isEven(opId) : "opId not even"; 391 return instructionForId(opId).destroysCallerSavedRegisters(); 392 } 393 394 public boolean isProcessed(Value operand) { 395 return !isRegister(operand) || attributes(asRegister(operand)).isAllocatable(); 396 } 397 398 // * Phase 5: actual register allocation 399 400 private TraceInterval addToList(TraceInterval first, TraceInterval prev, TraceInterval interval) { 401 TraceInterval newFirst = first; 402 if (prev != null) { 403 prev.next = interval; 404 } else { 405 newFirst = interval; 406 } 407 return newFirst; 408 } 409 410 TraceInterval createUnhandledListByFrom(IntervalPredicate isList1) { 411 assert isSortedByFrom(sortedIntervals) : "interval list is not sorted"; 412 return createUnhandledList(isList1); 413 } 414 415 TraceInterval createUnhandledListBySpillPos(IntervalPredicate isList1) { 416 assert isSortedBySpillPos(sortedIntervals) : "interval list is not sorted"; 417 return createUnhandledList(isList1); 418 } 419 420 private TraceInterval createUnhandledList(IntervalPredicate isList1) { 421 422 TraceInterval list1 = TraceInterval.EndMarker; 423 424 TraceInterval list1Prev = null; 425 TraceInterval v; 426 427 int n = sortedIntervals.length; 428 for (int i = 0; i < n; i++) { 429 v = sortedIntervals[i]; 430 if (v == null) { 431 continue; 432 } 433 434 if (isList1.apply(v)) { 435 list1 = addToList(list1, list1Prev, v); 436 list1Prev = v; 437 } 438 } 439 440 if (list1Prev != null) { 441 list1Prev.next = TraceInterval.EndMarker; 442 } 443 444 assert list1Prev == null || list1Prev.next == TraceInterval.EndMarker : "linear list ends not with sentinel"; 445 446 return list1; 447 } 448 449 private FixedInterval addToList(FixedInterval first, FixedInterval prev, FixedInterval interval) { 450 FixedInterval newFirst = first; 451 if (prev != null) { 452 prev.next = interval; 453 } else { 454 newFirst = interval; 455 } 456 return newFirst; 457 } 458 459 FixedInterval createFixedUnhandledList() { 460 assert isSortedByFrom(fixedIntervals) : "interval list is not sorted"; 461 462 FixedInterval list1 = FixedInterval.EndMarker; 463 464 FixedInterval list1Prev = null; 465 for (FixedInterval v : fixedIntervals) { 466 467 if (v == null) { 468 continue; 469 } 470 471 v.rewindRange(); 472 list1 = addToList(list1, list1Prev, v); 473 list1Prev = v; 474 } 475 476 if (list1Prev != null) { 477 list1Prev.next = FixedInterval.EndMarker; 478 } 479 480 assert list1Prev == null || list1Prev.next == FixedInterval.EndMarker : "linear list ends not with sentinel"; 481 482 return list1; 483 } 484 485 // SORTING 486 487 protected void sortIntervalsBeforeAllocation() { 488 int sortedLen = 0; 489 for (TraceInterval interval : intervals()) { 490 if (interval != null) { 491 sortedLen++; 492 } 493 } 494 sortedIntervals = TraceLinearScanPhase.sortIntervalsBeforeAllocation(intervals(), new TraceInterval[sortedLen]); 495 } 496 497 void sortIntervalsAfterAllocation() { 498 if (hasDerivedIntervals()) { 499 // no intervals have been added during allocation, so sorted list is already up to 500 // date 501 return; 502 } 503 504 TraceInterval[] oldList = sortedIntervals; 505 TraceInterval[] newList = Arrays.copyOfRange(intervals(), firstDerivedIntervalIndex(), intervalsSize()); 506 int oldLen = oldList.length; 507 int newLen = newList.length; 508 509 // conventional sort-algorithm for new intervals 510 Arrays.sort(newList, SORT_BY_FROM_COMP); 511 512 // merge old and new list (both already sorted) into one combined list 513 TraceInterval[] combinedList = new TraceInterval[oldLen + newLen]; 514 int oldIdx = 0; 515 int newIdx = 0; 516 517 while (oldIdx + newIdx < combinedList.length) { 518 if (newIdx >= newLen || (oldIdx < oldLen && oldList[oldIdx].from() <= newList[newIdx].from())) { 519 combinedList[oldIdx + newIdx] = oldList[oldIdx]; 520 oldIdx++; 521 } else { 522 combinedList[oldIdx + newIdx] = newList[newIdx]; 523 newIdx++; 524 } 525 } 526 527 sortedIntervals = combinedList; 528 } 529 530 void sortIntervalsBySpillPos() { 531 // TODO (JE): better algorithm? 532 // conventional sort-algorithm for new intervals 533 Arrays.sort(sortedIntervals, SORT_BY_SPILL_POS_COMP); 534 } 535 536 // wrapper for Interval.splitChildAtOpId that performs a bailout in product mode 537 // instead of returning null 538 public TraceInterval splitChildAtOpId(TraceInterval interval, int opId, LIRInstruction.OperandMode mode) { 539 TraceInterval result = interval.getSplitChildAtOpId(opId, mode); 540 541 if (result != null) { 542 if (getDebug().isLogEnabled()) { 543 getDebug().log("Split child at pos %d of interval %s is %s", opId, interval, result); 544 } 545 return result; 546 } 547 throw new GraalError("LinearScan: interval is null"); 548 } 549 550 AllocatableValue canonicalSpillOpr(TraceInterval interval) { 551 assert interval.spillSlot() != null : "canonical spill slot not set"; 552 return interval.spillSlot(); 553 } 554 555 boolean isMaterialized(Variable operand, int opId, OperandMode mode) { 556 TraceInterval interval = intervalFor(operand); 557 assert interval != null : "interval must exist"; 558 559 if (opId != -1) { 560 /* 561 * Operands are not changed when an interval is split during allocation, so search 562 * the right interval here. 563 */ 564 interval = splitChildAtOpId(interval, opId, mode); 565 } 566 567 return isIllegal(interval.location()) && interval.canMaterialize(); 568 } 569 570 boolean isCallerSave(Value operand) { 571 return attributes(asRegister(operand)).isCallerSave(); 572 } 573 574 @SuppressWarnings("try") 575 protected void allocate(TargetDescription target, LIRGenerationResult lirGenRes, TraceAllocationContext traceContext) { 576 MoveFactory spillMoveFactory = traceContext.spillMoveFactory; 577 RegisterAllocationConfig registerAllocationConfig = traceContext.registerAllocationConfig; 578 /* 579 * This is the point to enable debug logging for the whole register allocation. 580 */ 581 DebugContext debug = res.getLIR().getDebug(); 582 try (Indent indent = debug.logAndIndent("LinearScan allocate")) { 583 TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this, false); 584 585 try (DebugContext.Scope s = debug.scope("AfterLifetimeAnalysis", this)) { 586 587 printLir("After instruction numbering"); 588 printIntervals("Before register allocation"); 589 590 sortIntervalsBeforeAllocation(); 591 592 TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this, false); 593 printIntervals("After register allocation"); 594 595 // resolve intra-trace data-flow 596 TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this); 597 598 // eliminate spill moves 599 OptionValues options = getOptions(); 600 if (Options.LIROptTraceRAEliminateSpillMoves.getValue(options)) { 601 TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this); 602 } 603 604 TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this, false); 605 606 if (Assertions.detailedAssertionsEnabled(options)) { 607 verifyIntervals(); 608 } 609 } catch (Throwable e) { 610 throw debug.handle(e); 611 } 612 } 613 } 614 615 public void printLir(String label) { 616 if (getDebug().isDumpEnabled(DebugContext.DETAILED_LEVEL)) { 617 getDebug().dump(DebugContext.DETAILED_LEVEL, sortedBlocks(), "%s (Trace%d)", label, trace.getId()); 618 } 619 } 620 621 boolean verify() { 622 // (check that all intervals have a correct register and that no registers are 623 // overwritten) 624 verifyIntervals(); 625 626 verifyRegisters(); 627 628 getDebug().log("no errors found"); 629 630 return true; 631 } 632 633 @SuppressWarnings("try") 634 private void verifyRegisters() { 635 // Enable this logging to get output for the verification process. 636 try (Indent indent = getDebug().logAndIndent("verifying register allocation")) { 637 RegisterVerifier verifier = new RegisterVerifier(this); 638 verifier.verify(blockAt(0)); 639 } 640 } 641 642 @SuppressWarnings("try") 643 protected void verifyIntervals() { 644 DebugContext debug = getDebug(); 645 try (Indent indent = debug.logAndIndent("verifying intervals")) { 646 int len = intervalsSize(); 647 648 for (int i = 0; i < len; i++) { 649 final TraceInterval i1 = intervals()[i]; 650 if (i1 == null) { 651 continue; 652 } 653 654 i1.checkSplitChildren(); 655 656 if (i1.operandNumber != i) { 657 debug.log("Interval %d is on position %d in list", i1.operandNumber, i); 658 debug.log(i1.logString()); 659 throw new GraalError(""); 660 } 661 662 if (getKind(i1).equals(LIRKind.Illegal)) { 663 debug.log("Interval %d has no type assigned", i1.operandNumber); 664 debug.log(i1.logString()); 665 throw new GraalError(""); 666 } 667 668 if (i1.location() == null) { 669 debug.log("Interval %d has no register assigned", i1.operandNumber); 670 debug.log(i1.logString()); 671 throw new GraalError(""); 672 } 673 674 if (i1.isEmpty()) { 675 debug.log("Interval %d has no Range", i1.operandNumber); 676 debug.log(i1.logString()); 677 throw new GraalError(""); 678 } 679 680 if (i1.from() >= i1.to()) { 681 debug.log("Interval %d has zero length range", i1.operandNumber); 682 debug.log(i1.logString()); 683 throw new GraalError(""); 684 } 685 686 // special intervals that are created in MoveResolver 687 // . ignore them because the range information has no meaning there 688 if (i1.from() == 1 && i1.to() == 2) { 689 continue; 690 } 691 // check any intervals 692 for (int j = i + 1; j < len; j++) { 693 final TraceInterval i2 = intervals()[j]; 694 if (i2 == null) { 695 continue; 696 } 697 698 // special intervals that are created in MoveResolver 699 // . ignore them because the range information has no meaning there 700 if (i2.from() == 1 && i2.to() == 2) { 701 continue; 702 } 703 Value l1 = i1.location(); 704 Value l2 = i2.location(); 705 boolean intersects = i1.intersects(i2); 706 if (intersects && !isIllegal(l1) && (l1.equals(l2))) { 707 throw GraalError.shouldNotReachHere(String.format("Intervals %s and %s overlap and have the same register assigned\n%s\n%s", i1, i2, i1.logString(), i2.logString())); 708 } 709 } 710 // check fixed intervals 711 for (FixedInterval i2 : fixedIntervals()) { 712 if (i2 == null) { 713 continue; 714 } 715 716 Value l1 = i1.location(); 717 Value l2 = i2.location(); 718 boolean intersects = i2.intersects(i1); 719 if (intersects && !isIllegal(l1) && (l1.equals(l2))) { 720 throw GraalError.shouldNotReachHere(String.format("Intervals %s and %s overlap and have the same register assigned\n%s\n%s", i1, i2, i1.logString(), i2.logString())); 721 } 722 } 723 } 724 } 725 } 726 727 public LIR getLIR() { 728 return res.getLIR(); 729 } 730 731 public FrameMapBuilder getFrameMapBuilder() { 732 return frameMapBuilder; 733 } 734 735 public AbstractBlockBase<?>[] sortedBlocks() { 736 return trace.getBlocks(); 737 } 738 739 public RegisterArray getRegisters() { 740 return registers; 741 } 742 743 public RegisterAllocationConfig getRegisterAllocationConfig() { 744 return regAllocConfig; 745 } 746 747 public boolean callKillsRegisters() { 748 return regAllocConfig.getRegisterConfig().areAllAllocatableRegistersCallerSaved(); 749 } 750 751 boolean neverSpillConstants() { 752 return neverSpillConstants; 753 } 754 755 // IntervalData 756 757 private static final int SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT = 1; 758 759 /** 760 * The index of the first entry in {@link #intervals} for a 761 * {@linkplain #createDerivedInterval(TraceInterval) derived interval}. 762 */ 763 private int firstDerivedIntervalIndex = -1; 764 765 /** 766 * @see #fixedIntervals() 767 */ 768 private final FixedInterval[] fixedIntervals; 769 770 /** 771 * @see #intervals() 772 */ 773 private TraceInterval[] intervals; 774 775 /** 776 * The number of valid entries in {@link #intervals}. 777 */ 778 private int intervalsSize; 779 780 /** 781 * Map from an instruction {@linkplain LIRInstruction#id id} to the instruction. Entries 782 * should be retrieved with {@link #instructionForId(int)} as the id is not simply an index 783 * into this array. 784 */ 785 private LIRInstruction[] opIdToInstructionMap; 786 787 /** 788 * Map from an instruction {@linkplain LIRInstruction#id id} to the 789 * {@linkplain AbstractBlockBase block} containing the instruction. Entries should be 790 * retrieved with {@link #blockForId(int)} as the id is not simply an index into this array. 791 */ 792 private AbstractBlockBase<?>[] opIdToBlockMap; 793 794 /** 795 * Map from {@linkplain #operandNumber operand numbers} to intervals. 796 */ 797 TraceInterval[] intervals() { 798 return intervals; 799 } 800 801 /** 802 * Map from {@linkplain Register#number} to fixed intervals. 803 */ 804 FixedInterval[] fixedIntervals() { 805 return fixedIntervals; 806 } 807 808 void initIntervals() { 809 intervalsSize = operandSize(); 810 intervals = new TraceInterval[intervalsSize + (intervalsSize >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT)]; 811 } 812 813 /** 814 * Creates a new fixed interval. 815 * 816 * @param reg the operand for the interval 817 * @return the created interval 818 */ 819 private FixedInterval createFixedInterval(RegisterValue reg) { 820 FixedInterval interval = new FixedInterval(reg); 821 int operandNumber = reg.getRegister().number; 822 assert fixedIntervals[operandNumber] == null; 823 fixedIntervals[operandNumber] = interval; 824 return interval; 825 } 826 827 /** 828 * Creates a new interval. 829 * 830 * @param operand the operand for the interval 831 * @return the created interval 832 */ 833 private TraceInterval createInterval(Variable operand) { 834 assert isLegal(operand); 835 int operandNumber = operandNumber(operand); 836 assert operand.index == operandNumber; 837 TraceInterval interval = new TraceInterval(operand); 838 assert operandNumber < intervalsSize; 839 assert intervals[operandNumber] == null; 840 intervals[operandNumber] = interval; 841 return interval; 842 } 843 844 /** 845 * Creates an interval as a result of splitting or spilling another interval. 846 * 847 * @param source an interval being split of spilled 848 * @return a new interval derived from {@code source} 849 */ 850 TraceInterval createDerivedInterval(TraceInterval source) { 851 if (firstDerivedIntervalIndex == -1) { 852 firstDerivedIntervalIndex = intervalsSize; 853 } 854 if (intervalsSize == intervals.length) { 855 intervals = Arrays.copyOf(intervals, intervals.length + (intervals.length >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT) + 1); 856 } 857 // increments intervalsSize 858 Variable variable = createVariable(getKind(source)); 859 860 assert intervalsSize <= intervals.length; 861 862 TraceInterval interval = createInterval(variable); 863 assert intervals[intervalsSize - 1] == interval; 864 return interval; 865 } 866 867 /** 868 * Creates a new variable for a derived interval. Note that the variable is not 869 * {@linkplain LIR#numVariables() managed} so it must not be inserted into the {@link LIR}. 870 */ 871 private Variable createVariable(ValueKind<?> kind) { 872 return new Variable(kind, intervalsSize++); 873 } 874 875 boolean hasDerivedIntervals() { 876 return firstDerivedIntervalIndex != -1; 877 } 878 879 int firstDerivedIntervalIndex() { 880 return firstDerivedIntervalIndex; 881 } 882 883 public int intervalsSize() { 884 return intervalsSize; 885 } 886 887 FixedInterval fixedIntervalFor(RegisterValue reg) { 888 return fixedIntervals[reg.getRegister().number]; 889 } 890 891 FixedInterval getOrCreateFixedInterval(RegisterValue reg) { 892 FixedInterval ret = fixedIntervalFor(reg); 893 if (ret == null) { 894 return createFixedInterval(reg); 895 } else { 896 return ret; 897 } 898 } 899 900 TraceInterval intervalFor(Variable operand) { 901 return intervalFor(operandNumber(operand)); 902 } 903 904 TraceInterval intervalFor(int operandNumber) { 905 assert operandNumber < intervalsSize; 906 return intervals[operandNumber]; 907 } 908 909 TraceInterval getOrCreateInterval(Variable operand) { 910 TraceInterval ret = intervalFor(operand); 911 if (ret == null) { 912 return createInterval(operand); 913 } else { 914 return ret; 915 } 916 } 917 918 void initOpIdMaps(int numInstructions) { 919 opIdToInstructionMap = new LIRInstruction[numInstructions]; 920 opIdToBlockMap = new AbstractBlockBase<?>[numInstructions]; 921 } 922 923 void putOpIdMaps(int index, LIRInstruction op, AbstractBlockBase<?> block) { 924 opIdToInstructionMap[index] = op; 925 opIdToBlockMap[index] = block; 926 } 927 928 /** 929 * Gets the highest instruction id allocated by this object. 930 */ 931 int maxOpId() { 932 assert opIdToInstructionMap.length > 0 : "no operations"; 933 return (opIdToInstructionMap.length - 1) << 1; 934 } 935 936 /** 937 * Converts an {@linkplain LIRInstruction#id instruction id} to an instruction index. All 938 * LIR instructions in a method have an index one greater than their linear-scan order 939 * predecessor with the first instruction having an index of 0. 940 */ 941 private int opIdToIndex(int opId) { 942 return opId >> 1; 943 } 944 945 /** 946 * Retrieves the {@link LIRInstruction} based on its {@linkplain LIRInstruction#id id}. 947 * 948 * @param opId an instruction {@linkplain LIRInstruction#id id} 949 * @return the instruction whose {@linkplain LIRInstruction#id} {@code == id} 950 */ 951 LIRInstruction instructionForId(int opId) { 952 assert isEven(opId) : "opId not even"; 953 LIRInstruction instr = opIdToInstructionMap[opIdToIndex(opId)]; 954 assert instr.id() == opId; 955 return instr; 956 } 957 958 /** 959 * Gets the block containing a given instruction. 960 * 961 * @param opId an instruction {@linkplain LIRInstruction#id id} 962 * @return the block containing the instruction denoted by {@code opId} 963 */ 964 AbstractBlockBase<?> blockForId(int opId) { 965 assert opIdToBlockMap.length > 0 && opId >= 0 && opId <= maxOpId() + 1 : "opId out of range: " + opId; 966 return opIdToBlockMap[opIdToIndex(opId)]; 967 } 968 969 @SuppressWarnings("try") 970 public void printIntervals(String label) { 971 DebugContext debug = getDebug(); 972 if (debug.isDumpEnabled(DebugContext.DETAILED_LEVEL)) { 973 if (debug.isLogEnabled()) { 974 try (Indent indent = debug.logAndIndent("intervals %s", label)) { 975 for (FixedInterval interval : fixedIntervals) { 976 if (interval != null) { 977 debug.log("%s", interval.logString()); 978 } 979 } 980 981 for (TraceInterval interval : intervals) { 982 if (interval != null) { 983 debug.log("%s", interval.logString()); 984 } 985 } 986 987 try (Indent indent2 = debug.logAndIndent("Basic Blocks")) { 988 for (AbstractBlockBase<?> block : trace.getBlocks()) { 989 debug.log("B%d [%d, %d, %s] ", block.getId(), getFirstLirInstructionId(block), getLastLirInstructionId(block), block.getLoop()); 990 } 991 } 992 } 993 } 994 debug.dump(DebugContext.DETAILED_LEVEL, this, "%s (Trace%d)", label, trace.getId()); 995 } 996 } 997 998 @Override 999 public void visitIntervals(IntervalVisitor visitor) { 1000 for (FixedInterval interval : fixedIntervals) { 1001 if (interval != null) { 1002 printFixedInterval(interval, visitor); 1003 } 1004 } 1005 for (TraceInterval interval : intervals) { 1006 if (interval != null) { 1007 printInterval(interval, visitor); 1008 } 1009 } 1010 } 1011 1012 boolean hasInterTracePredecessor(AbstractBlockBase<?> block) { 1013 return TraceUtil.hasInterTracePredecessor(traceBuilderResult, trace, block); 1014 } 1015 1016 boolean hasInterTraceSuccessor(AbstractBlockBase<?> block) { 1017 return TraceUtil.hasInterTraceSuccessor(traceBuilderResult, trace, block); 1018 } 1019 1020 private void printInterval(TraceInterval interval, IntervalVisitor visitor) { 1021 Value hint = interval.locationHint(false) != null ? interval.locationHint(false).location() : null; 1022 AllocatableValue operand = getOperand(interval); 1023 String type = getKind(interval).getPlatformKind().toString(); 1024 visitor.visitIntervalStart(getOperand(interval.splitParent()), operand, interval.location(), hint, type); 1025 1026 // print ranges 1027 visitor.visitRange(interval.from(), interval.to()); 1028 1029 // print use positions 1030 int prev = -1; 1031 for (int i = interval.numUsePos() - 1; i >= 0; --i) { 1032 assert prev < interval.getUsePos(i) : "use positions not sorted"; 1033 visitor.visitUsePos(interval.getUsePos(i), interval.getUsePosRegisterPriority(i)); 1034 prev = interval.getUsePos(i); 1035 } 1036 1037 visitor.visitIntervalEnd(interval.spillState()); 1038 } 1039 1040 AllocatableValue getOperand(TraceInterval interval) { 1041 return interval.operand; 1042 } 1043 1044 ValueKind<?> getKind(TraceInterval interval) { 1045 return getOperand(interval).getValueKind(); 1046 } 1047 1048 } 1049 1050 public static boolean verifyEquals(TraceLinearScan a, TraceLinearScan b) { 1051 assert compareFixed(a.fixedIntervals(), b.fixedIntervals()); 1052 assert compareIntervals(a.intervals(), b.intervals()); 1053 return true; 1054 } 1055 1056 private static boolean compareIntervals(TraceInterval[] a, TraceInterval[] b) { 1057 for (int i = 0; i < Math.max(a.length, b.length); i++) { 1058 if (i >= a.length) { 1059 assert b[i] == null : "missing a interval: " + i + " b: " + b[i]; 1060 continue; 1061 } 1062 if (i >= b.length) { 1063 assert a[i] == null : "missing b interval: " + i + " a: " + a[i]; 1064 continue; 1065 } 1066 compareInterval(a[i], b[i]); 1067 } 1068 return true; 1069 } 1070 1071 private static void compareInterval(TraceInterval a, TraceInterval b) { 1072 if (a == null) { 1073 assert b == null : "First interval is null but second is: " + b; 1074 return; 1075 } 1076 assert b != null : "Second interval is null but forst is: " + a; 1077 assert a.operandNumber == b.operandNumber : "Operand mismatch: " + a + " vs. " + b; 1078 assert a.from() == b.from() : "From mismatch: " + a + " vs. " + b; 1079 assert a.to() == b.to() : "To mismatch: " + a + " vs. " + b; 1080 assert verifyIntervalsEquals(a, b); 1081 } 1082 1083 private static boolean verifyIntervalsEquals(TraceInterval a, TraceInterval b) { 1084 for (int i = 0; i < Math.max(a.numUsePos(), b.numUsePos()); i++) { 1085 assert i < a.numUsePos() : "missing a usepos: " + i + " b: " + b; 1086 assert i < b.numUsePos() : "missing b usepos: " + i + " a: " + a; 1087 int aPos = a.getUsePos(i); 1088 int bPos = b.getUsePos(i); 1089 assert aPos == bPos : "Use Positions differ: " + aPos + " vs. " + bPos; 1090 RegisterPriority aReg = a.getUsePosRegisterPriority(i); 1091 RegisterPriority bReg = b.getUsePosRegisterPriority(i); 1092 assert aReg == bReg : "Register priority differ: " + aReg + " vs. " + bReg; 1093 } 1094 return true; 1095 } 1096 1097 private static boolean compareFixed(FixedInterval[] a, FixedInterval[] b) { 1098 for (int i = 0; i < Math.max(a.length, b.length); i++) { 1099 if (i >= a.length) { 1100 assert b[i] == null : "missing a interval: " + i + " b: " + b[i]; 1101 continue; 1102 } 1103 if (i >= b.length) { 1104 assert a[i] == null : "missing b interval: " + i + " a: " + a[i]; 1105 continue; 1106 } 1107 compareFixedInterval(a[i], b[i]); 1108 } 1109 return true; 1110 } 1111 1112 private static void compareFixedInterval(FixedInterval a, FixedInterval b) { 1113 if (a == null) { 1114 assert b == null || isEmptyInterval(b) : "First interval is null but second is: " + b; 1115 return; 1116 } 1117 if (b == null) { 1118 assert isEmptyInterval(a) : "Second interval is null but first is: " + a; 1119 return; 1120 } 1121 assert a.operand.equals(b.operand) : "Operand mismatch: " + a + " vs. " + b; 1122 assert a.from() == b.from() : "From mismatch: " + a + " vs. " + b; 1123 assert a.to() == b.to() : "To mismatch: " + a + " vs. " + b; 1124 assert verifyFixeEquas(a, b); 1125 } 1126 1127 private static boolean verifyFixeEquas(FixedInterval a, FixedInterval b) { 1128 a.rewindRange(); 1129 b.rewindRange(); 1130 while (!a.currentAtEnd()) { 1131 assert !b.currentAtEnd() : "Fixed range mismatch: " + a + " vs. " + b; 1132 assert a.currentFrom() == b.currentFrom() : "From range mismatch: " + a + " vs. " + b + " from: " + a.currentFrom() + " vs. " + b.currentFrom(); 1133 assert a.currentTo() == b.currentTo() : "To range mismatch: " + a + " vs. " + b + " from: " + a.currentTo() + " vs. " + b.currentTo(); 1134 a.nextRange(); 1135 b.nextRange(); 1136 } 1137 assert b.currentAtEnd() : "Fixed range mismatch: " + a + " vs. " + b; 1138 return true; 1139 } 1140 1141 private static boolean isEmptyInterval(FixedInterval fixed) { 1142 return fixed.from() == -1 && fixed.to() == 0; 1143 } 1144 1145 private static void printFixedInterval(FixedInterval interval, IntervalVisitor visitor) { 1146 Value hint = null; 1147 AllocatableValue operand = interval.operand; 1148 String type = "fixed"; 1149 visitor.visitIntervalStart(operand, operand, operand, hint, type); 1150 1151 // print ranges 1152 for (FixedRange range = interval.first(); range != FixedRange.EndMarker; range = range.next) { 1153 visitor.visitRange(range.from, range.to); 1154 } 1155 1156 // no use positions 1157 1158 visitor.visitIntervalEnd("NOT_SUPPORTED"); 1159 1160 } 1161 1162 }