1 /* 2 * Copyright (c) 2009, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.lir.alloc.trace.lsra; 24 25 import static jdk.vm.ci.code.CodeUtil.isEven; 26 import static jdk.vm.ci.code.ValueUtil.asRegister; 27 import static jdk.vm.ci.code.ValueUtil.isIllegal; 28 import static jdk.vm.ci.code.ValueUtil.isLegal; 29 import static jdk.vm.ci.code.ValueUtil.isRegister; 30 import static org.graalvm.compiler.core.common.GraalOptions.DetailedAsserts; 31 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; 32 33 import java.util.ArrayList; 34 import java.util.Arrays; 35 import java.util.Comparator; 36 37 import org.graalvm.compiler.core.common.LIRKind; 38 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig; 39 import org.graalvm.compiler.core.common.alloc.Trace; 40 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult; 41 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; 42 import org.graalvm.compiler.debug.Debug; 43 import org.graalvm.compiler.debug.Debug.Scope; 44 import org.graalvm.compiler.debug.GraalError; 45 import org.graalvm.compiler.debug.Indent; 46 import org.graalvm.compiler.lir.LIR; 47 import org.graalvm.compiler.lir.LIRInstruction; 48 import org.graalvm.compiler.lir.LIRInstruction.OperandMode; 49 import org.graalvm.compiler.lir.StandardOp.BlockEndOp; 50 import org.graalvm.compiler.lir.Variable; 51 import org.graalvm.compiler.lir.VirtualStackSlot; 52 import org.graalvm.compiler.lir.alloc.trace.GlobalLivenessInfo; 53 import org.graalvm.compiler.lir.alloc.trace.TraceAllocationPhase; 54 import org.graalvm.compiler.lir.alloc.trace.TraceAllocationPhase.TraceAllocationContext; 55 import org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase; 56 import org.graalvm.compiler.lir.alloc.trace.TraceUtil; 57 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority; 58 import org.graalvm.compiler.lir.debug.IntervalDumper; 59 import org.graalvm.compiler.lir.debug.IntervalDumper.IntervalVisitor; 60 import org.graalvm.compiler.lir.framemap.FrameMapBuilder; 61 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 62 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory; 63 import org.graalvm.compiler.lir.phases.LIRPhase; 64 import org.graalvm.compiler.options.NestedBooleanOptionKey; 65 import org.graalvm.compiler.options.Option; 66 import org.graalvm.compiler.options.OptionKey; 67 import org.graalvm.compiler.options.OptionType; 68 import org.graalvm.compiler.options.OptionValues; 69 70 import jdk.vm.ci.code.Register; 71 import jdk.vm.ci.code.RegisterArray; 72 import jdk.vm.ci.code.RegisterAttributes; 73 import jdk.vm.ci.code.RegisterValue; 74 import jdk.vm.ci.code.TargetDescription; 75 import jdk.vm.ci.meta.AllocatableValue; 76 import jdk.vm.ci.meta.Value; 77 import jdk.vm.ci.meta.ValueKind; 78 79 /** 80 * Implementation of the Linear Scan allocation approach for traces described in 81 * <a href="http://dx.doi.org/10.1145/2972206.2972211">"Trace-based Register Allocation in a JIT 82 * Compiler"</a> by Josef Eisl et al. It is derived from 83 * <a href="http://doi.acm.org/10.1145/1064979.1064998" > "Optimized Interval Splitting in a Linear 84 * Scan Register Allocator"</a> by Christian Wimmer and Hanspeter Moessenboeck. 85 */ 86 public final class TraceLinearScanPhase extends TraceAllocationPhase<TraceAllocationContext> { 87 88 public static class Options { 89 // @formatter:off 90 @Option(help = "Enable spill position optimization", type = OptionType.Debug) 91 public static final OptionKey<Boolean> LIROptTraceRAEliminateSpillMoves = new NestedBooleanOptionKey(LIRPhase.Options.LIROptimization, true); 92 // @formatter:on 93 } 94 95 private static final TraceLinearScanRegisterAllocationPhase TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE = new TraceLinearScanRegisterAllocationPhase(); 96 private static final TraceLinearScanAssignLocationsPhase TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE = new TraceLinearScanAssignLocationsPhase(); 97 private static final TraceLinearScanEliminateSpillMovePhase TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE = new TraceLinearScanEliminateSpillMovePhase(); 98 private static final TraceLinearScanResolveDataFlowPhase TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE = new TraceLinearScanResolveDataFlowPhase(); 99 private static final TraceLinearScanLifetimeAnalysisPhase TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE = new TraceLinearScanLifetimeAnalysisPhase(); 100 101 public static final int DOMINATOR_SPILL_MOVE_ID = -2; 102 103 private final FrameMapBuilder frameMapBuilder; 104 private final RegisterAttributes[] registerAttributes; 105 private final RegisterArray registers; 106 private final RegisterAllocationConfig regAllocConfig; 107 private final MoveFactory moveFactory; 108 109 protected final TraceBuilderResult traceBuilderResult; 110 111 private final boolean neverSpillConstants; 112 113 /** 114 * Maps from {@link Variable#index} to a spill stack slot. If 115 * {@linkplain org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options#TraceRACacheStackSlots 116 * enabled} a {@link Variable} is always assigned to the same stack slot. 117 */ 118 private final AllocatableValue[] cachedStackSlots; 119 120 private final LIRGenerationResult res; 121 private final GlobalLivenessInfo livenessInfo; 122 123 public TraceLinearScanPhase(TargetDescription target, LIRGenerationResult res, MoveFactory spillMoveFactory, RegisterAllocationConfig regAllocConfig, TraceBuilderResult traceBuilderResult, 124 boolean neverSpillConstants, AllocatableValue[] cachedStackSlots, GlobalLivenessInfo livenessInfo) { 125 this.res = res; 126 this.moveFactory = spillMoveFactory; 127 this.frameMapBuilder = res.getFrameMapBuilder(); 128 this.registerAttributes = regAllocConfig.getRegisterConfig().getAttributesMap(); 129 this.regAllocConfig = regAllocConfig; 130 131 this.registers = target.arch.getRegisters(); 132 this.traceBuilderResult = traceBuilderResult; 133 this.neverSpillConstants = neverSpillConstants; 134 this.cachedStackSlots = cachedStackSlots; 135 this.livenessInfo = livenessInfo; 136 assert livenessInfo != null; 137 } 138 139 public static boolean isVariableOrRegister(Value value) { 140 return isVariable(value) || isRegister(value); 141 } 142 143 abstract static class IntervalPredicate { 144 145 abstract boolean apply(TraceInterval i); 146 } 147 148 static final IntervalPredicate IS_VARIABLE_INTERVAL = new IntervalPredicate() { 149 150 @Override 151 public boolean apply(TraceInterval i) { 152 // all TraceIntervals are variable intervals 153 return true; 154 } 155 }; 156 private static final Comparator<TraceInterval> SORT_BY_FROM_COMP = new Comparator<TraceInterval>() { 157 158 @Override 159 public int compare(TraceInterval a, TraceInterval b) { 160 return a.from() - b.from(); 161 } 162 }; 163 private static final Comparator<TraceInterval> SORT_BY_SPILL_POS_COMP = new Comparator<TraceInterval>() { 164 165 @Override 166 public int compare(TraceInterval a, TraceInterval b) { 167 return a.spillDefinitionPos() - b.spillDefinitionPos(); 168 } 169 }; 170 171 public TraceLinearScan createAllocator(Trace trace) { 172 return new TraceLinearScan(trace); 173 } 174 175 @Override 176 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, TraceAllocationContext traceContext) { 177 createAllocator(trace).allocate(target, lirGenRes, traceContext); 178 } 179 180 private static <T extends IntervalHint> boolean isSortedByFrom(T[] intervals) { 181 int from = -1; 182 for (T interval : intervals) { 183 if (interval == null) { 184 continue; 185 } 186 assert from <= interval.from(); 187 from = interval.from(); 188 } 189 return true; 190 } 191 192 private static boolean isSortedBySpillPos(TraceInterval[] intervals) { 193 int from = -1; 194 for (TraceInterval interval : intervals) { 195 assert interval != null; 196 assert from <= interval.spillDefinitionPos(); 197 from = interval.spillDefinitionPos(); 198 } 199 return true; 200 } 201 202 private static TraceInterval[] sortIntervalsBeforeAllocation(TraceInterval[] intervals, TraceInterval[] sortedList) { 203 int sortedIdx = 0; 204 int sortedFromMax = -1; 205 206 // special sorting algorithm: the original interval-list is almost sorted, 207 // only some intervals are swapped. So this is much faster than a complete QuickSort 208 for (TraceInterval interval : intervals) { 209 if (interval != null) { 210 int from = interval.from(); 211 212 if (sortedFromMax <= from) { 213 sortedList[sortedIdx++] = interval; 214 sortedFromMax = interval.from(); 215 } else { 216 // the assumption that the intervals are already sorted failed, 217 // so this interval must be sorted in manually 218 int j; 219 for (j = sortedIdx - 1; j >= 0 && from < sortedList[j].from(); j--) { 220 sortedList[j + 1] = sortedList[j]; 221 } 222 sortedList[j + 1] = interval; 223 sortedIdx++; 224 } 225 } 226 } 227 return sortedList; 228 } 229 230 public final class TraceLinearScan implements IntervalDumper { 231 232 /** 233 * Intervals sorted by {@link TraceInterval#from()}. 234 */ 235 private TraceInterval[] sortedIntervals; 236 237 private final Trace trace; 238 239 public TraceLinearScan(Trace trace) { 240 this.trace = trace; 241 this.fixedIntervals = new FixedInterval[registers.size()]; 242 } 243 244 GlobalLivenessInfo getGlobalLivenessInfo() { 245 return livenessInfo; 246 } 247 248 /** 249 * @return {@link Variable#index} 250 */ 251 int operandNumber(Variable operand) { 252 return operand.index; 253 } 254 255 OptionValues getOptions() { 256 return getLIR().getOptions(); 257 } 258 259 /** 260 * Gets the number of operands. This value will increase by 1 for new variable. 261 */ 262 int operandSize() { 263 return getLIR().numVariables(); 264 } 265 266 /** 267 * Gets the number of registers. This value will never change. 268 */ 269 int numRegisters() { 270 return registers.size(); 271 } 272 273 public int getFirstLirInstructionId(AbstractBlockBase<?> block) { 274 int result = getLIR().getLIRforBlock(block).get(0).id(); 275 assert result >= 0; 276 return result; 277 } 278 279 public int getLastLirInstructionId(AbstractBlockBase<?> block) { 280 ArrayList<LIRInstruction> instructions = getLIR().getLIRforBlock(block); 281 int result = instructions.get(instructions.size() - 1).id(); 282 assert result >= 0; 283 return result; 284 } 285 286 /** 287 * Gets an object describing the attributes of a given register according to this register 288 * configuration. 289 */ 290 public RegisterAttributes attributes(Register reg) { 291 return registerAttributes[reg.number]; 292 } 293 294 public boolean isAllocatable(RegisterValue register) { 295 return attributes(register.getRegister()).isAllocatable(); 296 } 297 298 public MoveFactory getSpillMoveFactory() { 299 return moveFactory; 300 } 301 302 protected TraceLocalMoveResolver createMoveResolver() { 303 TraceLocalMoveResolver moveResolver = new TraceLocalMoveResolver(this); 304 assert moveResolver.checkEmpty(); 305 return moveResolver; 306 } 307 308 void assignSpillSlot(TraceInterval interval) { 309 /* 310 * Assign the canonical spill slot of the parent (if a part of the interval is already 311 * spilled) or allocate a new spill slot. 312 */ 313 if (interval.canMaterialize()) { 314 interval.assignLocation(Value.ILLEGAL); 315 } else if (interval.spillSlot() != null) { 316 interval.assignLocation(interval.spillSlot()); 317 } else { 318 AllocatableValue slot = allocateSpillSlot(interval); 319 interval.setSpillSlot(slot); 320 interval.assignLocation(slot); 321 } 322 } 323 324 /** 325 * Returns a new spill slot or a cached entry if there is already one for the 326 * {@linkplain TraceInterval variable}. 327 */ 328 private AllocatableValue allocateSpillSlot(TraceInterval interval) { 329 int variableIndex = interval.splitParent().operandNumber; 330 OptionValues options = getOptions(); 331 if (TraceRegisterAllocationPhase.Options.TraceRACacheStackSlots.getValue(options)) { 332 AllocatableValue cachedStackSlot = cachedStackSlots[variableIndex]; 333 if (cachedStackSlot != null) { 334 TraceRegisterAllocationPhase.globalStackSlots.increment(); 335 assert cachedStackSlot.getValueKind().equals(getKind(interval)) : "CachedStackSlot: kind mismatch? " + getKind(interval) + " vs. " + cachedStackSlot.getValueKind(); 336 return cachedStackSlot; 337 } 338 } 339 VirtualStackSlot slot = frameMapBuilder.allocateSpillSlot(getKind(interval)); 340 if (TraceRegisterAllocationPhase.Options.TraceRACacheStackSlots.getValue(options)) { 341 cachedStackSlots[variableIndex] = slot; 342 } 343 TraceRegisterAllocationPhase.allocatedStackSlots.increment(); 344 return slot; 345 } 346 347 // access to block list (sorted in linear scan order) 348 public int blockCount() { 349 return sortedBlocks().length; 350 } 351 352 public AbstractBlockBase<?> blockAt(int index) { 353 return sortedBlocks()[index]; 354 } 355 356 int numLoops() { 357 return getLIR().getControlFlowGraph().getLoops().size(); 358 } 359 360 boolean isBlockBegin(int opId) { 361 return opId == 0 || blockForId(opId) != blockForId(opId - 1); 362 } 363 364 boolean isBlockEnd(int opId) { 365 boolean isBlockBegin = isBlockBegin(opId + 2); 366 assert isBlockBegin == (instructionForId(opId & (~1)) instanceof BlockEndOp); 367 return isBlockBegin; 368 } 369 370 boolean coversBlockBegin(int opId1, int opId2) { 371 return blockForId(opId1) != blockForId(opId2); 372 } 373 374 /** 375 * Determines if an {@link LIRInstruction} destroys all caller saved registers. 376 * 377 * @param opId an instruction {@linkplain LIRInstruction#id id} 378 * @return {@code true} if the instruction denoted by {@code id} destroys all caller saved 379 * registers. 380 */ 381 boolean hasCall(int opId) { 382 assert isEven(opId) : "opId not even"; 383 return instructionForId(opId).destroysCallerSavedRegisters(); 384 } 385 386 public boolean isProcessed(Value operand) { 387 return !isRegister(operand) || attributes(asRegister(operand)).isAllocatable(); 388 } 389 390 // * Phase 5: actual register allocation 391 392 private TraceInterval addToList(TraceInterval first, TraceInterval prev, TraceInterval interval) { 393 TraceInterval newFirst = first; 394 if (prev != null) { 395 prev.next = interval; 396 } else { 397 newFirst = interval; 398 } 399 return newFirst; 400 } 401 402 TraceInterval createUnhandledListByFrom(IntervalPredicate isList1) { 403 assert isSortedByFrom(sortedIntervals) : "interval list is not sorted"; 404 return createUnhandledList(isList1); 405 } 406 407 TraceInterval createUnhandledListBySpillPos(IntervalPredicate isList1) { 408 assert isSortedBySpillPos(sortedIntervals) : "interval list is not sorted"; 409 return createUnhandledList(isList1); 410 } 411 412 private TraceInterval createUnhandledList(IntervalPredicate isList1) { 413 414 TraceInterval list1 = TraceInterval.EndMarker; 415 416 TraceInterval list1Prev = null; 417 TraceInterval v; 418 419 int n = sortedIntervals.length; 420 for (int i = 0; i < n; i++) { 421 v = sortedIntervals[i]; 422 if (v == null) { 423 continue; 424 } 425 426 if (isList1.apply(v)) { 427 list1 = addToList(list1, list1Prev, v); 428 list1Prev = v; 429 } 430 } 431 432 if (list1Prev != null) { 433 list1Prev.next = TraceInterval.EndMarker; 434 } 435 436 assert list1Prev == null || list1Prev.next == TraceInterval.EndMarker : "linear list ends not with sentinel"; 437 438 return list1; 439 } 440 441 private FixedInterval addToList(FixedInterval first, FixedInterval prev, FixedInterval interval) { 442 FixedInterval newFirst = first; 443 if (prev != null) { 444 prev.next = interval; 445 } else { 446 newFirst = interval; 447 } 448 return newFirst; 449 } 450 451 FixedInterval createFixedUnhandledList() { 452 assert isSortedByFrom(fixedIntervals) : "interval list is not sorted"; 453 454 FixedInterval list1 = FixedInterval.EndMarker; 455 456 FixedInterval list1Prev = null; 457 for (FixedInterval v : fixedIntervals) { 458 459 if (v == null) { 460 continue; 461 } 462 463 v.rewindRange(); 464 list1 = addToList(list1, list1Prev, v); 465 list1Prev = v; 466 } 467 468 if (list1Prev != null) { 469 list1Prev.next = FixedInterval.EndMarker; 470 } 471 472 assert list1Prev == null || list1Prev.next == FixedInterval.EndMarker : "linear list ends not with sentinel"; 473 474 return list1; 475 } 476 477 // SORTING 478 479 protected void sortIntervalsBeforeAllocation() { 480 int sortedLen = 0; 481 for (TraceInterval interval : intervals()) { 482 if (interval != null) { 483 sortedLen++; 484 } 485 } 486 sortedIntervals = TraceLinearScanPhase.sortIntervalsBeforeAllocation(intervals(), new TraceInterval[sortedLen]); 487 } 488 489 void sortIntervalsAfterAllocation() { 490 if (hasDerivedIntervals()) { 491 // no intervals have been added during allocation, so sorted list is already up to 492 // date 493 return; 494 } 495 496 TraceInterval[] oldList = sortedIntervals; 497 TraceInterval[] newList = Arrays.copyOfRange(intervals(), firstDerivedIntervalIndex(), intervalsSize()); 498 int oldLen = oldList.length; 499 int newLen = newList.length; 500 501 // conventional sort-algorithm for new intervals 502 Arrays.sort(newList, SORT_BY_FROM_COMP); 503 504 // merge old and new list (both already sorted) into one combined list 505 TraceInterval[] combinedList = new TraceInterval[oldLen + newLen]; 506 int oldIdx = 0; 507 int newIdx = 0; 508 509 while (oldIdx + newIdx < combinedList.length) { 510 if (newIdx >= newLen || (oldIdx < oldLen && oldList[oldIdx].from() <= newList[newIdx].from())) { 511 combinedList[oldIdx + newIdx] = oldList[oldIdx]; 512 oldIdx++; 513 } else { 514 combinedList[oldIdx + newIdx] = newList[newIdx]; 515 newIdx++; 516 } 517 } 518 519 sortedIntervals = combinedList; 520 } 521 522 void sortIntervalsBySpillPos() { 523 // TODO (JE): better algorithm? 524 // conventional sort-algorithm for new intervals 525 Arrays.sort(sortedIntervals, SORT_BY_SPILL_POS_COMP); 526 } 527 528 // wrapper for Interval.splitChildAtOpId that performs a bailout in product mode 529 // instead of returning null 530 public TraceInterval splitChildAtOpId(TraceInterval interval, int opId, LIRInstruction.OperandMode mode) { 531 TraceInterval result = interval.getSplitChildAtOpId(opId, mode); 532 533 if (result != null) { 534 if (Debug.isLogEnabled()) { 535 Debug.log("Split child at pos %d of interval %s is %s", opId, interval, result); 536 } 537 return result; 538 } 539 throw new GraalError("LinearScan: interval is null"); 540 } 541 542 AllocatableValue canonicalSpillOpr(TraceInterval interval) { 543 assert interval.spillSlot() != null : "canonical spill slot not set"; 544 return interval.spillSlot(); 545 } 546 547 boolean isMaterialized(Variable operand, int opId, OperandMode mode) { 548 TraceInterval interval = intervalFor(operand); 549 assert interval != null : "interval must exist"; 550 551 if (opId != -1) { 552 /* 553 * Operands are not changed when an interval is split during allocation, so search 554 * the right interval here. 555 */ 556 interval = splitChildAtOpId(interval, opId, mode); 557 } 558 559 return isIllegal(interval.location()) && interval.canMaterialize(); 560 } 561 562 boolean isCallerSave(Value operand) { 563 return attributes(asRegister(operand)).isCallerSave(); 564 } 565 566 @SuppressWarnings("try") 567 protected void allocate(TargetDescription target, LIRGenerationResult lirGenRes, TraceAllocationContext traceContext) { 568 MoveFactory spillMoveFactory = traceContext.spillMoveFactory; 569 RegisterAllocationConfig registerAllocationConfig = traceContext.registerAllocationConfig; 570 /* 571 * This is the point to enable debug logging for the whole register allocation. 572 */ 573 try (Indent indent = Debug.logAndIndent("LinearScan allocate")) { 574 TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this, false); 575 576 try (Scope s = Debug.scope("AfterLifetimeAnalysis", this)) { 577 578 printLir("After instruction numbering"); 579 printIntervals("Before register allocation"); 580 581 sortIntervalsBeforeAllocation(); 582 583 TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this, false); 584 printIntervals("After register allocation"); 585 586 // resolve intra-trace data-flow 587 TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this); 588 589 // eliminate spill moves 590 OptionValues options = getOptions(); 591 if (Options.LIROptTraceRAEliminateSpillMoves.getValue(options)) { 592 TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this); 593 } 594 595 TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE.apply(target, lirGenRes, trace, spillMoveFactory, registerAllocationConfig, traceBuilderResult, this, false); 596 597 if (DetailedAsserts.getValue(options)) { 598 verifyIntervals(); 599 } 600 } catch (Throwable e) { 601 throw Debug.handle(e); 602 } 603 } 604 } 605 606 public void printLir(String label) { 607 if (Debug.isDumpEnabled(Debug.DETAILED_LEVEL)) { 608 Debug.dump(Debug.DETAILED_LEVEL, sortedBlocks(), "%s (Trace%d)", label, trace.getId()); 609 } 610 } 611 612 boolean verify() { 613 // (check that all intervals have a correct register and that no registers are 614 // overwritten) 615 verifyIntervals(); 616 617 verifyRegisters(); 618 619 Debug.log("no errors found"); 620 621 return true; 622 } 623 624 @SuppressWarnings("try") 625 private void verifyRegisters() { 626 // Enable this logging to get output for the verification process. 627 try (Indent indent = Debug.logAndIndent("verifying register allocation")) { 628 RegisterVerifier verifier = new RegisterVerifier(this); 629 verifier.verify(blockAt(0)); 630 } 631 } 632 633 @SuppressWarnings("try") 634 protected void verifyIntervals() { 635 try (Indent indent = Debug.logAndIndent("verifying intervals")) { 636 int len = intervalsSize(); 637 638 for (int i = 0; i < len; i++) { 639 final TraceInterval i1 = intervals()[i]; 640 if (i1 == null) { 641 continue; 642 } 643 644 i1.checkSplitChildren(); 645 646 if (i1.operandNumber != i) { 647 Debug.log("Interval %d is on position %d in list", i1.operandNumber, i); 648 Debug.log(i1.logString()); 649 throw new GraalError(""); 650 } 651 652 if (getKind(i1).equals(LIRKind.Illegal)) { 653 Debug.log("Interval %d has no type assigned", i1.operandNumber); 654 Debug.log(i1.logString()); 655 throw new GraalError(""); 656 } 657 658 if (i1.location() == null) { 659 Debug.log("Interval %d has no register assigned", i1.operandNumber); 660 Debug.log(i1.logString()); 661 throw new GraalError(""); 662 } 663 664 if (i1.isEmpty()) { 665 Debug.log("Interval %d has no Range", i1.operandNumber); 666 Debug.log(i1.logString()); 667 throw new GraalError(""); 668 } 669 670 if (i1.from() >= i1.to()) { 671 Debug.log("Interval %d has zero length range", i1.operandNumber); 672 Debug.log(i1.logString()); 673 throw new GraalError(""); 674 } 675 676 // special intervals that are created in MoveResolver 677 // . ignore them because the range information has no meaning there 678 if (i1.from() == 1 && i1.to() == 2) { 679 continue; 680 } 681 // check any intervals 682 for (int j = i + 1; j < len; j++) { 683 final TraceInterval i2 = intervals()[j]; 684 if (i2 == null) { 685 continue; 686 } 687 688 // special intervals that are created in MoveResolver 689 // . ignore them because the range information has no meaning there 690 if (i2.from() == 1 && i2.to() == 2) { 691 continue; 692 } 693 Value l1 = i1.location(); 694 Value l2 = i2.location(); 695 boolean intersects = i1.intersects(i2); 696 if (intersects && !isIllegal(l1) && (l1.equals(l2))) { 697 throw GraalError.shouldNotReachHere(String.format("Intervals %s and %s overlap and have the same register assigned\n%s\n%s", i1, i2, i1.logString(), i2.logString())); 698 } 699 } 700 // check fixed intervals 701 for (FixedInterval i2 : fixedIntervals()) { 702 if (i2 == null) { 703 continue; 704 } 705 706 Value l1 = i1.location(); 707 Value l2 = i2.location(); 708 boolean intersects = i2.intersects(i1); 709 if (intersects && !isIllegal(l1) && (l1.equals(l2))) { 710 throw GraalError.shouldNotReachHere(String.format("Intervals %s and %s overlap and have the same register assigned\n%s\n%s", i1, i2, i1.logString(), i2.logString())); 711 } 712 } 713 } 714 } 715 } 716 717 public LIR getLIR() { 718 return res.getLIR(); 719 } 720 721 public FrameMapBuilder getFrameMapBuilder() { 722 return frameMapBuilder; 723 } 724 725 public AbstractBlockBase<?>[] sortedBlocks() { 726 return trace.getBlocks(); 727 } 728 729 public RegisterArray getRegisters() { 730 return registers; 731 } 732 733 public RegisterAllocationConfig getRegisterAllocationConfig() { 734 return regAllocConfig; 735 } 736 737 public boolean callKillsRegisters() { 738 return regAllocConfig.getRegisterConfig().areAllAllocatableRegistersCallerSaved(); 739 } 740 741 boolean neverSpillConstants() { 742 return neverSpillConstants; 743 } 744 745 // IntervalData 746 747 private static final int SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT = 1; 748 749 /** 750 * The index of the first entry in {@link #intervals} for a 751 * {@linkplain #createDerivedInterval(TraceInterval) derived interval}. 752 */ 753 private int firstDerivedIntervalIndex = -1; 754 755 /** 756 * @see #fixedIntervals() 757 */ 758 private final FixedInterval[] fixedIntervals; 759 760 /** 761 * @see #intervals() 762 */ 763 private TraceInterval[] intervals; 764 765 /** 766 * The number of valid entries in {@link #intervals}. 767 */ 768 private int intervalsSize; 769 770 /** 771 * Map from an instruction {@linkplain LIRInstruction#id id} to the instruction. Entries 772 * should be retrieved with {@link #instructionForId(int)} as the id is not simply an index 773 * into this array. 774 */ 775 private LIRInstruction[] opIdToInstructionMap; 776 777 /** 778 * Map from an instruction {@linkplain LIRInstruction#id id} to the 779 * {@linkplain AbstractBlockBase block} containing the instruction. Entries should be 780 * retrieved with {@link #blockForId(int)} as the id is not simply an index into this array. 781 */ 782 private AbstractBlockBase<?>[] opIdToBlockMap; 783 784 /** 785 * Map from {@linkplain #operandNumber operand numbers} to intervals. 786 */ 787 TraceInterval[] intervals() { 788 return intervals; 789 } 790 791 /** 792 * Map from {@linkplain Register#number} to fixed intervals. 793 */ 794 FixedInterval[] fixedIntervals() { 795 return fixedIntervals; 796 } 797 798 void initIntervals() { 799 intervalsSize = operandSize(); 800 intervals = new TraceInterval[intervalsSize + (intervalsSize >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT)]; 801 } 802 803 /** 804 * Creates a new fixed interval. 805 * 806 * @param reg the operand for the interval 807 * @return the created interval 808 */ 809 private FixedInterval createFixedInterval(RegisterValue reg) { 810 FixedInterval interval = new FixedInterval(reg); 811 int operandNumber = reg.getRegister().number; 812 assert fixedIntervals[operandNumber] == null; 813 fixedIntervals[operandNumber] = interval; 814 return interval; 815 } 816 817 /** 818 * Creates a new interval. 819 * 820 * @param operand the operand for the interval 821 * @return the created interval 822 */ 823 private TraceInterval createInterval(Variable operand) { 824 assert isLegal(operand); 825 int operandNumber = operandNumber(operand); 826 assert operand.index == operandNumber; 827 TraceInterval interval = new TraceInterval(operand); 828 assert operandNumber < intervalsSize; 829 assert intervals[operandNumber] == null; 830 intervals[operandNumber] = interval; 831 return interval; 832 } 833 834 /** 835 * Creates an interval as a result of splitting or spilling another interval. 836 * 837 * @param source an interval being split of spilled 838 * @return a new interval derived from {@code source} 839 */ 840 TraceInterval createDerivedInterval(TraceInterval source) { 841 if (firstDerivedIntervalIndex == -1) { 842 firstDerivedIntervalIndex = intervalsSize; 843 } 844 if (intervalsSize == intervals.length) { 845 intervals = Arrays.copyOf(intervals, intervals.length + (intervals.length >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT) + 1); 846 } 847 // increments intervalsSize 848 Variable variable = createVariable(getKind(source)); 849 850 assert intervalsSize <= intervals.length; 851 852 TraceInterval interval = createInterval(variable); 853 assert intervals[intervalsSize - 1] == interval; 854 return interval; 855 } 856 857 /** 858 * Creates a new variable for a derived interval. Note that the variable is not 859 * {@linkplain LIR#numVariables() managed} so it must not be inserted into the {@link LIR}. 860 */ 861 private Variable createVariable(ValueKind<?> kind) { 862 return new Variable(kind, intervalsSize++); 863 } 864 865 boolean hasDerivedIntervals() { 866 return firstDerivedIntervalIndex != -1; 867 } 868 869 int firstDerivedIntervalIndex() { 870 return firstDerivedIntervalIndex; 871 } 872 873 public int intervalsSize() { 874 return intervalsSize; 875 } 876 877 FixedInterval fixedIntervalFor(RegisterValue reg) { 878 return fixedIntervals[reg.getRegister().number]; 879 } 880 881 FixedInterval getOrCreateFixedInterval(RegisterValue reg) { 882 FixedInterval ret = fixedIntervalFor(reg); 883 if (ret == null) { 884 return createFixedInterval(reg); 885 } else { 886 return ret; 887 } 888 } 889 890 TraceInterval intervalFor(Variable operand) { 891 return intervalFor(operandNumber(operand)); 892 } 893 894 TraceInterval intervalFor(int operandNumber) { 895 assert operandNumber < intervalsSize; 896 return intervals[operandNumber]; 897 } 898 899 TraceInterval getOrCreateInterval(Variable operand) { 900 TraceInterval ret = intervalFor(operand); 901 if (ret == null) { 902 return createInterval(operand); 903 } else { 904 return ret; 905 } 906 } 907 908 void initOpIdMaps(int numInstructions) { 909 opIdToInstructionMap = new LIRInstruction[numInstructions]; 910 opIdToBlockMap = new AbstractBlockBase<?>[numInstructions]; 911 } 912 913 void putOpIdMaps(int index, LIRInstruction op, AbstractBlockBase<?> block) { 914 opIdToInstructionMap[index] = op; 915 opIdToBlockMap[index] = block; 916 } 917 918 /** 919 * Gets the highest instruction id allocated by this object. 920 */ 921 int maxOpId() { 922 assert opIdToInstructionMap.length > 0 : "no operations"; 923 return (opIdToInstructionMap.length - 1) << 1; 924 } 925 926 /** 927 * Converts an {@linkplain LIRInstruction#id instruction id} to an instruction index. All 928 * LIR instructions in a method have an index one greater than their linear-scan order 929 * predecessor with the first instruction having an index of 0. 930 */ 931 private int opIdToIndex(int opId) { 932 return opId >> 1; 933 } 934 935 /** 936 * Retrieves the {@link LIRInstruction} based on its {@linkplain LIRInstruction#id id}. 937 * 938 * @param opId an instruction {@linkplain LIRInstruction#id id} 939 * @return the instruction whose {@linkplain LIRInstruction#id} {@code == id} 940 */ 941 LIRInstruction instructionForId(int opId) { 942 assert isEven(opId) : "opId not even"; 943 LIRInstruction instr = opIdToInstructionMap[opIdToIndex(opId)]; 944 assert instr.id() == opId; 945 return instr; 946 } 947 948 /** 949 * Gets the block containing a given instruction. 950 * 951 * @param opId an instruction {@linkplain LIRInstruction#id id} 952 * @return the block containing the instruction denoted by {@code opId} 953 */ 954 AbstractBlockBase<?> blockForId(int opId) { 955 assert opIdToBlockMap.length > 0 && opId >= 0 && opId <= maxOpId() + 1 : "opId out of range: " + opId; 956 return opIdToBlockMap[opIdToIndex(opId)]; 957 } 958 959 @SuppressWarnings("try") 960 public void printIntervals(String label) { 961 if (Debug.isDumpEnabled(Debug.DETAILED_LEVEL)) { 962 if (Debug.isLogEnabled()) { 963 try (Indent indent = Debug.logAndIndent("intervals %s", label)) { 964 for (FixedInterval interval : fixedIntervals) { 965 if (interval != null) { 966 Debug.log("%s", interval.logString()); 967 } 968 } 969 970 for (TraceInterval interval : intervals) { 971 if (interval != null) { 972 Debug.log("%s", interval.logString()); 973 } 974 } 975 976 try (Indent indent2 = Debug.logAndIndent("Basic Blocks")) { 977 for (AbstractBlockBase<?> block : trace.getBlocks()) { 978 Debug.log("B%d [%d, %d, %s] ", block.getId(), getFirstLirInstructionId(block), getLastLirInstructionId(block), block.getLoop()); 979 } 980 } 981 } 982 } 983 Debug.dump(Debug.DETAILED_LEVEL, this, "%s (Trace%d)", label, trace.getId()); 984 } 985 } 986 987 @Override 988 public void visitIntervals(IntervalVisitor visitor) { 989 for (FixedInterval interval : fixedIntervals) { 990 if (interval != null) { 991 printFixedInterval(interval, visitor); 992 } 993 } 994 for (TraceInterval interval : intervals) { 995 if (interval != null) { 996 printInterval(interval, visitor); 997 } 998 } 999 } 1000 1001 boolean hasInterTracePredecessor(AbstractBlockBase<?> block) { 1002 return TraceUtil.hasInterTracePredecessor(traceBuilderResult, trace, block); 1003 } 1004 1005 boolean hasInterTraceSuccessor(AbstractBlockBase<?> block) { 1006 return TraceUtil.hasInterTraceSuccessor(traceBuilderResult, trace, block); 1007 } 1008 1009 private void printInterval(TraceInterval interval, IntervalVisitor visitor) { 1010 Value hint = interval.locationHint(false) != null ? interval.locationHint(false).location() : null; 1011 AllocatableValue operand = getOperand(interval); 1012 String type = getKind(interval).getPlatformKind().toString(); 1013 visitor.visitIntervalStart(getOperand(interval.splitParent()), operand, interval.location(), hint, type); 1014 1015 // print ranges 1016 visitor.visitRange(interval.from(), interval.to()); 1017 1018 // print use positions 1019 int prev = -1; 1020 for (int i = interval.numUsePos() - 1; i >= 0; --i) { 1021 assert prev < interval.getUsePos(i) : "use positions not sorted"; 1022 visitor.visitUsePos(interval.getUsePos(i), interval.getUsePosRegisterPriority(i)); 1023 prev = interval.getUsePos(i); 1024 } 1025 1026 visitor.visitIntervalEnd(interval.spillState()); 1027 } 1028 1029 AllocatableValue getOperand(TraceInterval interval) { 1030 return interval.operand; 1031 } 1032 1033 ValueKind<?> getKind(TraceInterval interval) { 1034 return getOperand(interval).getValueKind(); 1035 } 1036 1037 } 1038 1039 public static boolean verifyEquals(TraceLinearScan a, TraceLinearScan b) { 1040 assert compareFixed(a.fixedIntervals(), b.fixedIntervals()); 1041 assert compareIntervals(a.intervals(), b.intervals()); 1042 return true; 1043 } 1044 1045 private static boolean compareIntervals(TraceInterval[] a, TraceInterval[] b) { 1046 for (int i = 0; i < Math.max(a.length, b.length); i++) { 1047 if (i >= a.length) { 1048 assert b[i] == null : "missing a interval: " + i + " b: " + b[i]; 1049 continue; 1050 } 1051 if (i >= b.length) { 1052 assert a[i] == null : "missing b interval: " + i + " a: " + a[i]; 1053 continue; 1054 } 1055 compareInterval(a[i], b[i]); 1056 } 1057 return true; 1058 } 1059 1060 private static void compareInterval(TraceInterval a, TraceInterval b) { 1061 if (a == null) { 1062 assert b == null : "First interval is null but second is: " + b; 1063 return; 1064 } 1065 assert b != null : "Second interval is null but forst is: " + a; 1066 assert a.operandNumber == b.operandNumber : "Operand mismatch: " + a + " vs. " + b; 1067 assert a.from() == b.from() : "From mismatch: " + a + " vs. " + b; 1068 assert a.to() == b.to() : "To mismatch: " + a + " vs. " + b; 1069 assert verifyIntervalsEquals(a, b); 1070 } 1071 1072 private static boolean verifyIntervalsEquals(TraceInterval a, TraceInterval b) { 1073 for (int i = 0; i < Math.max(a.numUsePos(), b.numUsePos()); i++) { 1074 assert i < a.numUsePos() : "missing a usepos: " + i + " b: " + b; 1075 assert i < b.numUsePos() : "missing b usepos: " + i + " a: " + a; 1076 int aPos = a.getUsePos(i); 1077 int bPos = b.getUsePos(i); 1078 assert aPos == bPos : "Use Positions differ: " + aPos + " vs. " + bPos; 1079 RegisterPriority aReg = a.getUsePosRegisterPriority(i); 1080 RegisterPriority bReg = b.getUsePosRegisterPriority(i); 1081 assert aReg == bReg : "Register priority differ: " + aReg + " vs. " + bReg; 1082 } 1083 return true; 1084 } 1085 1086 private static boolean compareFixed(FixedInterval[] a, FixedInterval[] b) { 1087 for (int i = 0; i < Math.max(a.length, b.length); i++) { 1088 if (i >= a.length) { 1089 assert b[i] == null : "missing a interval: " + i + " b: " + b[i]; 1090 continue; 1091 } 1092 if (i >= b.length) { 1093 assert a[i] == null : "missing b interval: " + i + " a: " + a[i]; 1094 continue; 1095 } 1096 compareFixedInterval(a[i], b[i]); 1097 } 1098 return true; 1099 } 1100 1101 private static void compareFixedInterval(FixedInterval a, FixedInterval b) { 1102 if (a == null) { 1103 assert b == null || isEmptyInterval(b) : "First interval is null but second is: " + b; 1104 return; 1105 } 1106 if (b == null) { 1107 assert isEmptyInterval(a) : "Second interval is null but first is: " + a; 1108 return; 1109 } 1110 assert a.operand.equals(b.operand) : "Operand mismatch: " + a + " vs. " + b; 1111 assert a.from() == b.from() : "From mismatch: " + a + " vs. " + b; 1112 assert a.to() == b.to() : "To mismatch: " + a + " vs. " + b; 1113 assert verifyFixeEquas(a, b); 1114 } 1115 1116 private static boolean verifyFixeEquas(FixedInterval a, FixedInterval b) { 1117 a.rewindRange(); 1118 b.rewindRange(); 1119 while (!a.currentAtEnd()) { 1120 assert !b.currentAtEnd() : "Fixed range mismatch: " + a + " vs. " + b; 1121 assert a.currentFrom() == b.currentFrom() : "From range mismatch: " + a + " vs. " + b + " from: " + a.currentFrom() + " vs. " + b.currentFrom(); 1122 assert a.currentTo() == b.currentTo() : "To range mismatch: " + a + " vs. " + b + " from: " + a.currentTo() + " vs. " + b.currentTo(); 1123 a.nextRange(); 1124 b.nextRange(); 1125 } 1126 assert b.currentAtEnd() : "Fixed range mismatch: " + a + " vs. " + b; 1127 return true; 1128 } 1129 1130 private static boolean isEmptyInterval(FixedInterval fixed) { 1131 return fixed.from() == -1 && fixed.to() == 0; 1132 } 1133 1134 private static void printFixedInterval(FixedInterval interval, IntervalVisitor visitor) { 1135 Value hint = null; 1136 AllocatableValue operand = interval.operand; 1137 String type = "fixed"; 1138 visitor.visitIntervalStart(operand, operand, operand, hint, type); 1139 1140 // print ranges 1141 for (FixedRange range = interval.first(); range != FixedRange.EndMarker; range = range.next) { 1142 visitor.visitRange(range.from, range.to); 1143 } 1144 1145 // no use positions 1146 1147 visitor.visitIntervalEnd("NOT_SUPPORTED"); 1148 1149 } 1150 1151 }