1 /* 2 * Copyright (c) 2009, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.lir.alloc.trace.lsra; 24 25 import static org.graalvm.compiler.core.common.GraalOptions.DetailedAsserts; 26 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; 27 import static jdk.vm.ci.code.CodeUtil.isEven; 28 import static jdk.vm.ci.code.ValueUtil.asRegister; 29 import static jdk.vm.ci.code.ValueUtil.asRegisterValue; 30 import static jdk.vm.ci.code.ValueUtil.isIllegal; 31 import static jdk.vm.ci.code.ValueUtil.isLegal; 32 import static jdk.vm.ci.code.ValueUtil.isRegister; 33 34 import java.util.Arrays; 35 import java.util.EnumSet; 36 import java.util.List; 37 38 import org.graalvm.compiler.core.common.LIRKind; 39 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig; 40 import org.graalvm.compiler.core.common.alloc.Trace; 41 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult; 42 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; 43 import org.graalvm.compiler.debug.Debug; 44 import org.graalvm.compiler.debug.Debug.Scope; 45 import org.graalvm.compiler.debug.GraalError; 46 import org.graalvm.compiler.debug.Indent; 47 import org.graalvm.compiler.lir.LIR; 48 import org.graalvm.compiler.lir.LIRInstruction; 49 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag; 50 import org.graalvm.compiler.lir.LIRInstruction.OperandMode; 51 import org.graalvm.compiler.lir.LIRValueUtil; 52 import org.graalvm.compiler.lir.StandardOp.BlockEndOp; 53 import org.graalvm.compiler.lir.ValueConsumer; 54 import org.graalvm.compiler.lir.Variable; 55 import org.graalvm.compiler.lir.VirtualStackSlot; 56 import org.graalvm.compiler.lir.alloc.trace.TraceAllocationPhase; 57 import org.graalvm.compiler.lir.alloc.trace.TraceAllocationPhase.TraceAllocationContext; 58 import org.graalvm.compiler.lir.alloc.trace.TraceBuilderPhase; 59 import org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase; 60 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority; 61 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanAllocationPhase.TraceLinearScanAllocationContext; 62 import org.graalvm.compiler.lir.debug.IntervalDumper; 63 import org.graalvm.compiler.lir.debug.IntervalDumper.IntervalVisitor; 64 import org.graalvm.compiler.lir.framemap.FrameMapBuilder; 65 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 66 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory; 67 import org.graalvm.compiler.lir.phases.LIRPhase; 68 import org.graalvm.compiler.options.NestedBooleanOptionValue; 69 import org.graalvm.compiler.options.Option; 70 import org.graalvm.compiler.options.OptionType; 71 import org.graalvm.compiler.options.OptionValue; 72 73 import jdk.vm.ci.code.Register; 74 import jdk.vm.ci.code.RegisterArray; 75 import jdk.vm.ci.code.RegisterAttributes; 76 import jdk.vm.ci.code.RegisterValue; 77 import jdk.vm.ci.code.TargetDescription; 78 import jdk.vm.ci.meta.AllocatableValue; 79 import jdk.vm.ci.meta.Value; 80 import jdk.vm.ci.meta.ValueKind; 81 82 /** 83 * Implementation of the Linear Scan allocation approach for traces described in 84 * <a href="http://dx.doi.org/10.1145/2972206.2972211">"Trace-based Register Allocation in a JIT 85 * Compiler"</a> by Josef Eisl et al. It is derived from 86 * <a href="http://doi.acm.org/10.1145/1064979.1064998" > "Optimized Interval Splitting in a Linear 87 * Scan Register Allocator"</a> by Christian Wimmer and Hanspeter Moessenboeck. 88 */ 89 public final class TraceLinearScanPhase extends TraceAllocationPhase<TraceAllocationContext> { 90 91 public static class Options { 92 // @formatter:off 93 @Option(help = "Enable spill position optimization", type = OptionType.Debug) 94 public static final OptionValue<Boolean> LIROptTraceRAEliminateSpillMoves = new NestedBooleanOptionValue(LIRPhase.Options.LIROptimization, true); 95 // @formatter:on 96 } 97 98 private static final TraceLinearScanRegisterAllocationPhase TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE = new TraceLinearScanRegisterAllocationPhase(); 99 private static final TraceLinearScanAssignLocationsPhase TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE = new TraceLinearScanAssignLocationsPhase(); 100 private static final TraceLinearScanEliminateSpillMovePhase TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE = new TraceLinearScanEliminateSpillMovePhase(); 101 private static final TraceLinearScanResolveDataFlowPhase TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE = new TraceLinearScanResolveDataFlowPhase(); 102 private static final TraceLinearScanLifetimeAnalysisPhase TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE = new TraceLinearScanLifetimeAnalysisPhase(); 103 104 public static final int DOMINATOR_SPILL_MOVE_ID = -2; 105 106 private final FrameMapBuilder frameMapBuilder; 107 private final RegisterAttributes[] registerAttributes; 108 private final RegisterArray registers; 109 private final RegisterAllocationConfig regAllocConfig; 110 private final MoveFactory moveFactory; 111 112 protected final TraceBuilderResult traceBuilderResult; 113 114 private final boolean neverSpillConstants; 115 116 /** 117 * Maps from {@link Variable#index} to a spill stack slot. If 118 * {@linkplain org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options#TraceRACacheStackSlots 119 * enabled} a {@link Variable} is always assigned to the same stack slot. 120 */ 121 private final AllocatableValue[] cachedStackSlots; 122 123 private final LIRGenerationResult res; 124 125 public TraceLinearScanPhase(TargetDescription target, LIRGenerationResult res, MoveFactory spillMoveFactory, RegisterAllocationConfig regAllocConfig, TraceBuilderResult traceBuilderResult, 126 boolean neverSpillConstants, AllocatableValue[] cachedStackSlots) { 127 this.res = res; 128 this.moveFactory = spillMoveFactory; 129 this.frameMapBuilder = res.getFrameMapBuilder(); 130 this.registerAttributes = regAllocConfig.getRegisterConfig().getAttributesMap(); 131 this.regAllocConfig = regAllocConfig; 132 133 this.registers = target.arch.getRegisters(); 134 this.traceBuilderResult = traceBuilderResult; 135 this.neverSpillConstants = neverSpillConstants; 136 this.cachedStackSlots = cachedStackSlots; 137 138 } 139 140 public static boolean isVariableOrRegister(Value value) { 141 return isVariable(value) || isRegister(value); 142 } 143 144 abstract static class IntervalPredicate { 145 146 abstract boolean apply(TraceInterval i); 147 } 148 149 static final IntervalPredicate IS_PRECOLORED_INTERVAL = new IntervalPredicate() { 150 151 @Override 152 public boolean apply(TraceInterval i) { 153 return isRegister(i.operand); 154 } 155 }; 156 157 static final IntervalPredicate IS_VARIABLE_INTERVAL = new IntervalPredicate() { 158 159 @Override 160 public boolean apply(TraceInterval i) { 161 return isVariable(i.operand); 162 } 163 }; 164 165 static final IntervalPredicate IS_STACK_INTERVAL = new IntervalPredicate() { 166 167 @Override 168 public boolean apply(TraceInterval i) { 169 return !isRegister(i.operand); 170 } 171 }; 172 173 public TraceLinearScan createAllocator(Trace trace) { 174 return new TraceLinearScan(trace); 175 } 176 177 @Override 178 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, TraceAllocationContext traceContext) { 179 createAllocator(trace).allocate(target, lirGenRes, traceContext); 180 } 181 182 private static <T extends IntervalHint> boolean isSortedByFrom(T[] intervals) { 183 int from = -1; 184 for (T interval : intervals) { 185 assert interval != null; 186 assert from <= interval.from(); 187 from = interval.from(); 188 } 189 return true; 190 } 191 192 private static boolean isSortedBySpillPos(TraceInterval[] intervals) { 193 int from = -1; 194 for (TraceInterval interval : intervals) { 195 assert interval != null; 196 assert from <= interval.spillDefinitionPos(); 197 from = interval.spillDefinitionPos(); 198 } 199 return true; 200 } 201 202 private static <T extends IntervalHint> T[] sortIntervalsBeforeAllocation(T[] intervals, T[] sortedList) { 203 int sortedIdx = 0; 204 int sortedFromMax = -1; 205 206 // special sorting algorithm: the original interval-list is almost sorted, 207 // only some intervals are swapped. So this is much faster than a complete QuickSort 208 for (T interval : intervals) { 209 if (interval != null) { 210 int from = interval.from(); 211 212 if (sortedFromMax <= from) { 213 sortedList[sortedIdx++] = interval; 214 sortedFromMax = interval.from(); 215 } else { 216 // the assumption that the intervals are already sorted failed, 217 // so this interval must be sorted in manually 218 int j; 219 for (j = sortedIdx - 1; j >= 0 && from < sortedList[j].from(); j--) { 220 sortedList[j + 1] = sortedList[j]; 221 } 222 sortedList[j + 1] = interval; 223 sortedIdx++; 224 } 225 } 226 } 227 return sortedList; 228 } 229 230 public final class TraceLinearScan implements IntervalDumper { 231 232 /** 233 * Intervals sorted by {@link TraceInterval#from()}. 234 */ 235 private TraceInterval[] sortedIntervals; 236 237 /** 238 * Fixed intervals sorted by {@link FixedInterval#from()}. 239 */ 240 private FixedInterval[] sortedFixedIntervals; 241 242 private final Trace trace; 243 244 public TraceLinearScan(Trace trace) { 245 this.trace = trace; 246 this.fixedIntervals = new FixedInterval[registers.size()]; 247 } 248 249 /** 250 * Converts an operand (variable or register) to an index in a flat address space covering 251 * all the {@linkplain Variable variables} and {@linkplain RegisterValue registers} being 252 * processed by this allocator. 253 */ 254 int operandNumber(Value operand) { 255 assert !isRegister(operand) : "Register do not have operand numbers: " + operand; 256 assert isVariable(operand) : "Unsupported Value " + operand; 257 return ((Variable) operand).index; 258 } 259 260 /** 261 * Gets the number of operands. This value will increase by 1 for new variable. 262 */ 263 int operandSize() { 264 return getLIR().numVariables(); 265 } 266 267 /** 268 * Gets the number of registers. This value will never change. 269 */ 270 int numRegisters() { 271 return registers.size(); 272 } 273 274 public int getFirstLirInstructionId(AbstractBlockBase<?> block) { 275 int result = getLIR().getLIRforBlock(block).get(0).id(); 276 assert result >= 0; 277 return result; 278 } 279 280 public int getLastLirInstructionId(AbstractBlockBase<?> block) { 281 List<LIRInstruction> instructions = getLIR().getLIRforBlock(block); 282 int result = instructions.get(instructions.size() - 1).id(); 283 assert result >= 0; 284 return result; 285 } 286 287 /** 288 * Gets an object describing the attributes of a given register according to this register 289 * configuration. 290 */ 291 public RegisterAttributes attributes(Register reg) { 292 return registerAttributes[reg.number]; 293 } 294 295 public MoveFactory getSpillMoveFactory() { 296 return moveFactory; 297 } 298 299 protected TraceLocalMoveResolver createMoveResolver() { 300 TraceLocalMoveResolver moveResolver = new TraceLocalMoveResolver(this); 301 assert moveResolver.checkEmpty(); 302 return moveResolver; 303 } 304 305 void assignSpillSlot(TraceInterval interval) { 306 /* 307 * Assign the canonical spill slot of the parent (if a part of the interval is already 308 * spilled) or allocate a new spill slot. 309 */ 310 if (interval.canMaterialize()) { 311 interval.assignLocation(Value.ILLEGAL); 312 } else if (interval.spillSlot() != null) { 313 interval.assignLocation(interval.spillSlot()); 314 } else { 315 AllocatableValue slot = allocateSpillSlot(interval); 316 interval.setSpillSlot(slot); 317 interval.assignLocation(slot); 318 } 319 } 320 321 /** 322 * Returns a new spill slot or a cached entry if there is already one for the 323 * {@linkplain TraceInterval#operand variable}. 324 */ 325 private AllocatableValue allocateSpillSlot(TraceInterval interval) { 326 int variableIndex = LIRValueUtil.asVariable(interval.splitParent().operand).index; 327 if (TraceRegisterAllocationPhase.Options.TraceRACacheStackSlots.getValue()) { 328 AllocatableValue cachedStackSlot = cachedStackSlots[variableIndex]; 329 if (cachedStackSlot != null) { 330 TraceRegisterAllocationPhase.globalStackSlots.increment(); 331 assert cachedStackSlot.getValueKind().equals(interval.kind()) : "CachedStackSlot: kind mismatch? " + interval.kind() + " vs. " + cachedStackSlot.getValueKind(); 332 return cachedStackSlot; 333 } 334 } 335 VirtualStackSlot slot = frameMapBuilder.allocateSpillSlot(interval.kind()); 336 if (TraceRegisterAllocationPhase.Options.TraceRACacheStackSlots.getValue()) { 337 cachedStackSlots[variableIndex] = slot; 338 } 339 TraceRegisterAllocationPhase.allocatedStackSlots.increment(); 340 return slot; 341 } 342 343 // access to block list (sorted in linear scan order) 344 public int blockCount() { 345 return sortedBlocks().length; 346 } 347 348 public AbstractBlockBase<?> blockAt(int index) { 349 return sortedBlocks()[index]; 350 } 351 352 int numLoops() { 353 return getLIR().getControlFlowGraph().getLoops().size(); 354 } 355 356 boolean isBlockBegin(int opId) { 357 return opId == 0 || blockForId(opId) != blockForId(opId - 1); 358 } 359 360 boolean isBlockEnd(int opId) { 361 boolean isBlockBegin = isBlockBegin(opId + 2); 362 assert isBlockBegin == (instructionForId(opId & (~1)) instanceof BlockEndOp); 363 return isBlockBegin; 364 } 365 366 boolean coversBlockBegin(int opId1, int opId2) { 367 return blockForId(opId1) != blockForId(opId2); 368 } 369 370 /** 371 * Determines if an {@link LIRInstruction} destroys all caller saved registers. 372 * 373 * @param opId an instruction {@linkplain LIRInstruction#id id} 374 * @return {@code true} if the instruction denoted by {@code id} destroys all caller saved 375 * registers. 376 */ 377 boolean hasCall(int opId) { 378 assert isEven(opId) : "opId not even"; 379 return instructionForId(opId).destroysCallerSavedRegisters(); 380 } 381 382 public boolean isProcessed(Value operand) { 383 return !isRegister(operand) || attributes(asRegister(operand)).isAllocatable(); 384 } 385 386 // * Phase 5: actual register allocation 387 388 private TraceInterval addToList(TraceInterval first, TraceInterval prev, TraceInterval interval) { 389 TraceInterval newFirst = first; 390 if (prev != null) { 391 prev.next = interval; 392 } else { 393 newFirst = interval; 394 } 395 return newFirst; 396 } 397 398 TraceInterval createUnhandledListByFrom(IntervalPredicate isList1) { 399 assert isSortedByFrom(sortedIntervals) : "interval list is not sorted"; 400 return createUnhandledList(isList1); 401 } 402 403 TraceInterval createUnhandledListBySpillPos(IntervalPredicate isList1) { 404 assert isSortedBySpillPos(sortedIntervals) : "interval list is not sorted"; 405 return createUnhandledList(isList1); 406 } 407 408 private TraceInterval createUnhandledList(IntervalPredicate isList1) { 409 410 TraceInterval list1 = TraceInterval.EndMarker; 411 412 TraceInterval list1Prev = null; 413 TraceInterval v; 414 415 int n = sortedIntervals.length; 416 for (int i = 0; i < n; i++) { 417 v = sortedIntervals[i]; 418 if (v == null) { 419 continue; 420 } 421 422 if (isList1.apply(v)) { 423 list1 = addToList(list1, list1Prev, v); 424 list1Prev = v; 425 } 426 } 427 428 if (list1Prev != null) { 429 list1Prev.next = TraceInterval.EndMarker; 430 } 431 432 assert list1Prev == null || list1Prev.next == TraceInterval.EndMarker : "linear list ends not with sentinel"; 433 434 return list1; 435 } 436 437 private FixedInterval addToList(FixedInterval first, FixedInterval prev, FixedInterval interval) { 438 FixedInterval newFirst = first; 439 if (prev != null) { 440 prev.next = interval; 441 } else { 442 newFirst = interval; 443 } 444 return newFirst; 445 } 446 447 FixedInterval createFixedUnhandledList() { 448 assert isSortedByFrom(sortedFixedIntervals) : "interval list is not sorted"; 449 450 FixedInterval list1 = FixedInterval.EndMarker; 451 452 FixedInterval list1Prev = null; 453 FixedInterval v; 454 455 int n = sortedFixedIntervals.length; 456 for (int i = 0; i < n; i++) { 457 v = sortedFixedIntervals[i]; 458 if (v == null) { 459 continue; 460 } 461 462 v.rewindRange(); 463 list1 = addToList(list1, list1Prev, v); 464 list1Prev = v; 465 } 466 467 if (list1Prev != null) { 468 list1Prev.next = FixedInterval.EndMarker; 469 } 470 471 assert list1Prev == null || list1Prev.next == FixedInterval.EndMarker : "linear list ends not with sentinel"; 472 473 return list1; 474 } 475 476 // SORTING 477 478 protected void sortIntervalsBeforeAllocation() { 479 int sortedLen = 0; 480 for (TraceInterval interval : intervals()) { 481 if (interval != null) { 482 sortedLen++; 483 } 484 } 485 sortedIntervals = TraceLinearScanPhase.sortIntervalsBeforeAllocation(intervals(), new TraceInterval[sortedLen]); 486 } 487 488 protected void sortFixedIntervalsBeforeAllocation() { 489 int sortedLen = 0; 490 for (FixedInterval interval : fixedIntervals()) { 491 if (interval != null) { 492 sortedLen++; 493 } 494 } 495 sortedFixedIntervals = TraceLinearScanPhase.sortIntervalsBeforeAllocation(fixedIntervals(), new FixedInterval[sortedLen]); 496 } 497 498 void sortIntervalsAfterAllocation() { 499 if (hasDerivedIntervals()) { 500 // no intervals have been added during allocation, so sorted list is already up to 501 // date 502 return; 503 } 504 505 TraceInterval[] oldList = sortedIntervals; 506 TraceInterval[] newList = Arrays.copyOfRange(intervals(), firstDerivedIntervalIndex(), intervalsSize()); 507 int oldLen = oldList.length; 508 int newLen = newList.length; 509 510 // conventional sort-algorithm for new intervals 511 Arrays.sort(newList, (TraceInterval a, TraceInterval b) -> a.from() - b.from()); 512 513 // merge old and new list (both already sorted) into one combined list 514 TraceInterval[] combinedList = new TraceInterval[oldLen + newLen]; 515 int oldIdx = 0; 516 int newIdx = 0; 517 518 while (oldIdx + newIdx < combinedList.length) { 519 if (newIdx >= newLen || (oldIdx < oldLen && oldList[oldIdx].from() <= newList[newIdx].from())) { 520 combinedList[oldIdx + newIdx] = oldList[oldIdx]; 521 oldIdx++; 522 } else { 523 combinedList[oldIdx + newIdx] = newList[newIdx]; 524 newIdx++; 525 } 526 } 527 528 sortedIntervals = combinedList; 529 } 530 531 void sortIntervalsBySpillPos() { 532 // TODO (JE): better algorithm? 533 // conventional sort-algorithm for new intervals 534 Arrays.sort(sortedIntervals, (TraceInterval a, TraceInterval b) -> a.spillDefinitionPos() - b.spillDefinitionPos()); 535 } 536 537 // wrapper for Interval.splitChildAtOpId that performs a bailout in product mode 538 // instead of returning null 539 public TraceInterval splitChildAtOpId(TraceInterval interval, int opId, LIRInstruction.OperandMode mode) { 540 TraceInterval result = interval.getSplitChildAtOpId(opId, mode); 541 542 if (result != null) { 543 if (Debug.isLogEnabled()) { 544 Debug.log("Split child at pos %d of interval %s is %s", opId, interval, result); 545 } 546 return result; 547 } 548 throw new GraalError("LinearScan: interval is null"); 549 } 550 551 AllocatableValue canonicalSpillOpr(TraceInterval interval) { 552 assert interval.spillSlot() != null : "canonical spill slot not set"; 553 return interval.spillSlot(); 554 } 555 556 boolean isMaterialized(AllocatableValue operand, int opId, OperandMode mode) { 557 TraceInterval interval = intervalFor(operand); 558 assert interval != null : "interval must exist"; 559 560 if (opId != -1) { 561 /* 562 * Operands are not changed when an interval is split during allocation, so search 563 * the right interval here. 564 */ 565 interval = splitChildAtOpId(interval, opId, mode); 566 } 567 568 return isIllegal(interval.location()) && interval.canMaterialize(); 569 } 570 571 boolean isCallerSave(Value operand) { 572 return attributes(asRegister(operand)).isCallerSave(); 573 } 574 575 @SuppressWarnings("try") 576 protected void allocate(TargetDescription target, LIRGenerationResult lirGenRes, TraceAllocationContext traceContext) { 577 /* 578 * This is the point to enable debug logging for the whole register allocation. 579 */ 580 try (Indent indent = Debug.logAndIndent("LinearScan allocate")) { 581 TraceLinearScanAllocationContext context = new TraceLinearScanAllocationContext(traceContext.spillMoveFactory, traceContext.registerAllocationConfig, traceBuilderResult, this); 582 583 TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE.apply(target, lirGenRes, trace, context, false); 584 585 try (Scope s = Debug.scope("AfterLifetimeAnalysis", this)) { 586 587 printLir("Before register allocation", true); 588 printIntervals("Before register allocation"); 589 590 sortIntervalsBeforeAllocation(); 591 sortFixedIntervalsBeforeAllocation(); 592 593 TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE.apply(target, lirGenRes, trace, context, false); 594 printIntervals("After register allocation"); 595 596 // resolve intra-trace data-flow 597 TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.apply(target, lirGenRes, trace, context, false); 598 Debug.dump(TraceBuilderPhase.TRACE_DUMP_LEVEL, sortedBlocks(), "%s", TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.getName()); 599 600 // eliminate spill moves 601 if (Options.LIROptTraceRAEliminateSpillMoves.getValue()) { 602 TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.apply(target, lirGenRes, trace, context, false); 603 Debug.dump(TraceBuilderPhase.TRACE_DUMP_LEVEL, sortedBlocks(), "%s", TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.getName()); 604 } 605 606 TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE.apply(target, lirGenRes, trace, context, false); 607 608 if (DetailedAsserts.getValue()) { 609 verifyIntervals(); 610 } 611 } catch (Throwable e) { 612 throw Debug.handle(e); 613 } 614 } 615 } 616 617 public void printLir(String label, @SuppressWarnings("unused") boolean hirValid) { 618 if (Debug.isDumpEnabled(TraceBuilderPhase.TRACE_DUMP_LEVEL)) { 619 Debug.dump(TraceBuilderPhase.TRACE_DUMP_LEVEL, sortedBlocks(), label); 620 } 621 } 622 623 boolean verify() { 624 // (check that all intervals have a correct register and that no registers are 625 // overwritten) 626 verifyIntervals(); 627 628 verifyRegisters(); 629 630 Debug.log("no errors found"); 631 632 return true; 633 } 634 635 @SuppressWarnings("try") 636 private void verifyRegisters() { 637 // Enable this logging to get output for the verification process. 638 try (Indent indent = Debug.logAndIndent("verifying register allocation")) { 639 RegisterVerifier verifier = new RegisterVerifier(this); 640 verifier.verify(blockAt(0)); 641 } 642 } 643 644 @SuppressWarnings("try") 645 protected void verifyIntervals() { 646 try (Indent indent = Debug.logAndIndent("verifying intervals")) { 647 int len = intervalsSize(); 648 649 for (int i = 0; i < len; i++) { 650 final TraceInterval i1 = intervals()[i]; 651 if (i1 == null) { 652 continue; 653 } 654 655 i1.checkSplitChildren(); 656 657 if (i1.operandNumber != i) { 658 Debug.log("Interval %d is on position %d in list", i1.operandNumber, i); 659 Debug.log(i1.logString()); 660 throw new GraalError(""); 661 } 662 663 if (isVariable(i1.operand) && i1.kind().equals(LIRKind.Illegal)) { 664 Debug.log("Interval %d has no type assigned", i1.operandNumber); 665 Debug.log(i1.logString()); 666 throw new GraalError(""); 667 } 668 669 if (i1.location() == null) { 670 Debug.log("Interval %d has no register assigned", i1.operandNumber); 671 Debug.log(i1.logString()); 672 throw new GraalError(""); 673 } 674 675 if (i1.isEmpty()) { 676 Debug.log("Interval %d has no Range", i1.operandNumber); 677 Debug.log(i1.logString()); 678 throw new GraalError(""); 679 } 680 681 if (i1.from() >= i1.to()) { 682 Debug.log("Interval %d has zero length range", i1.operandNumber); 683 Debug.log(i1.logString()); 684 throw new GraalError(""); 685 } 686 687 // special intervals that are created in MoveResolver 688 // . ignore them because the range information has no meaning there 689 if (i1.from() == 1 && i1.to() == 2) { 690 continue; 691 } 692 // check any intervals 693 for (int j = i + 1; j < len; j++) { 694 final TraceInterval i2 = intervals()[j]; 695 if (i2 == null) { 696 continue; 697 } 698 699 // special intervals that are created in MoveResolver 700 // . ignore them because the range information has no meaning there 701 if (i2.from() == 1 && i2.to() == 2) { 702 continue; 703 } 704 Value l1 = i1.location(); 705 Value l2 = i2.location(); 706 boolean intersects = i1.intersects(i2); 707 if (intersects && !isIllegal(l1) && (l1.equals(l2))) { 708 throw GraalError.shouldNotReachHere(String.format("Intervals %s and %s overlap and have the same register assigned\n%s\n%s", i1, i2, i1.logString(), i2.logString())); 709 } 710 } 711 // check fixed intervals 712 for (FixedInterval i2 : fixedIntervals()) { 713 if (i2 == null) { 714 continue; 715 } 716 717 Value l1 = i1.location(); 718 Value l2 = i2.location(); 719 boolean intersects = i2.intersects(i1); 720 if (intersects && !isIllegal(l1) && (l1.equals(l2))) { 721 throw GraalError.shouldNotReachHere(String.format("Intervals %s and %s overlap and have the same register assigned\n%s\n%s", i1, i2, i1.logString(), i2.logString())); 722 } 723 } 724 } 725 } 726 } 727 728 class CheckConsumer implements ValueConsumer { 729 730 boolean ok; 731 FixedInterval curInterval; 732 733 @Override 734 public void visitValue(Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 735 if (isRegister(operand)) { 736 if (fixedIntervalFor(asRegisterValue(operand)) == curInterval) { 737 ok = true; 738 } 739 } 740 } 741 } 742 743 @SuppressWarnings("try") 744 void verifyNoOopsInFixedIntervals() { 745 try (Indent indent = Debug.logAndIndent("verifying that no oops are in fixed intervals *")) { 746 CheckConsumer checkConsumer = new CheckConsumer(); 747 748 TraceInterval otherIntervals; 749 FixedInterval fixedInts = createFixedUnhandledList(); 750 // to ensure a walking until the last instruction id, add a dummy interval 751 // with a high operation id 752 otherIntervals = new TraceInterval(Value.ILLEGAL, -1); 753 otherIntervals.addRange(Integer.MAX_VALUE - 2, Integer.MAX_VALUE - 1); 754 TraceIntervalWalker iw = new TraceIntervalWalker(this, fixedInts, otherIntervals); 755 756 for (AbstractBlockBase<?> block : sortedBlocks()) { 757 List<LIRInstruction> instructions = getLIR().getLIRforBlock(block); 758 759 for (int j = 0; j < instructions.size(); j++) { 760 LIRInstruction op = instructions.get(j); 761 762 if (op.hasState()) { 763 iw.walkBefore(op.id()); 764 boolean checkLive = true; 765 766 /* 767 * Make sure none of the fixed registers is live across an oopmap since 768 * we can't handle that correctly. 769 */ 770 if (checkLive) { 771 for (FixedInterval interval = iw.activeFixedList.getFixed(); interval != FixedInterval.EndMarker; interval = interval.next) { 772 if (interval.to() > op.id() + 1) { 773 /* 774 * This interval is live out of this op so make sure that 775 * this interval represents some value that's referenced by 776 * this op either as an input or output. 777 */ 778 checkConsumer.curInterval = interval; 779 checkConsumer.ok = false; 780 781 op.visitEachInput(checkConsumer); 782 op.visitEachAlive(checkConsumer); 783 op.visitEachTemp(checkConsumer); 784 op.visitEachOutput(checkConsumer); 785 786 assert checkConsumer.ok : "fixed intervals should never be live across an oopmap point"; 787 } 788 } 789 } 790 } 791 } 792 } 793 } 794 } 795 796 public LIR getLIR() { 797 return res.getLIR(); 798 } 799 800 public FrameMapBuilder getFrameMapBuilder() { 801 return frameMapBuilder; 802 } 803 804 public AbstractBlockBase<?>[] sortedBlocks() { 805 return trace.getBlocks(); 806 } 807 808 public RegisterArray getRegisters() { 809 return registers; 810 } 811 812 public RegisterAllocationConfig getRegisterAllocationConfig() { 813 return regAllocConfig; 814 } 815 816 public boolean callKillsRegisters() { 817 return regAllocConfig.getRegisterConfig().areAllAllocatableRegistersCallerSaved(); 818 } 819 820 boolean neverSpillConstants() { 821 return neverSpillConstants; 822 } 823 824 // IntervalData 825 826 private static final int SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT = 1; 827 828 /** 829 * The index of the first entry in {@link #intervals} for a 830 * {@linkplain #createDerivedInterval(TraceInterval) derived interval}. 831 */ 832 private int firstDerivedIntervalIndex = -1; 833 834 /** 835 * @see #fixedIntervals() 836 */ 837 private final FixedInterval[] fixedIntervals; 838 839 /** 840 * @see #intervals() 841 */ 842 private TraceInterval[] intervals; 843 844 /** 845 * The number of valid entries in {@link #intervals}. 846 */ 847 private int intervalsSize; 848 849 /** 850 * Map from an instruction {@linkplain LIRInstruction#id id} to the instruction. Entries 851 * should be retrieved with {@link #instructionForId(int)} as the id is not simply an index 852 * into this array. 853 */ 854 private LIRInstruction[] opIdToInstructionMap; 855 856 /** 857 * Map from an instruction {@linkplain LIRInstruction#id id} to the 858 * {@linkplain AbstractBlockBase block} containing the instruction. Entries should be 859 * retrieved with {@link #blockForId(int)} as the id is not simply an index into this array. 860 */ 861 private AbstractBlockBase<?>[] opIdToBlockMap; 862 863 /** 864 * Map from {@linkplain #operandNumber(Value) operand numbers} to intervals. 865 */ 866 TraceInterval[] intervals() { 867 return intervals; 868 } 869 870 /** 871 * Map from {@linkplain #operandNumber(Value) operand numbers} to intervals. 872 */ 873 FixedInterval[] fixedIntervals() { 874 return fixedIntervals; 875 } 876 877 void initIntervals() { 878 intervalsSize = operandSize(); 879 intervals = new TraceInterval[intervalsSize + (intervalsSize >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT)]; 880 } 881 882 /** 883 * Creates a new fixed interval. 884 * 885 * @param reg the operand for the interval 886 * @return the created interval 887 */ 888 private FixedInterval createFixedInterval(RegisterValue reg) { 889 FixedInterval interval = new FixedInterval(reg); 890 int operandNumber = reg.getRegister().number; 891 assert fixedIntervals[operandNumber] == null; 892 fixedIntervals[operandNumber] = interval; 893 return interval; 894 } 895 896 /** 897 * Creates a new interval. 898 * 899 * @param operand the operand for the interval 900 * @return the created interval 901 */ 902 private TraceInterval createInterval(AllocatableValue operand) { 903 assert isLegal(operand); 904 int operandNumber = operandNumber(operand); 905 TraceInterval interval = new TraceInterval(operand, operandNumber); 906 assert operandNumber < intervalsSize; 907 assert intervals[operandNumber] == null; 908 intervals[operandNumber] = interval; 909 return interval; 910 } 911 912 /** 913 * Creates an interval as a result of splitting or spilling another interval. 914 * 915 * @param source an interval being split of spilled 916 * @return a new interval derived from {@code source} 917 */ 918 TraceInterval createDerivedInterval(TraceInterval source) { 919 if (firstDerivedIntervalIndex == -1) { 920 firstDerivedIntervalIndex = intervalsSize; 921 } 922 if (intervalsSize == intervals.length) { 923 intervals = Arrays.copyOf(intervals, intervals.length + (intervals.length >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT) + 1); 924 } 925 // increments intervalsSize 926 Variable variable = createVariable(source.kind()); 927 928 assert intervalsSize <= intervals.length; 929 930 TraceInterval interval = createInterval(variable); 931 assert intervals[intervalsSize - 1] == interval; 932 return interval; 933 } 934 935 /** 936 * Creates a new variable for a derived interval. Note that the variable is not 937 * {@linkplain LIR#numVariables() managed} so it must not be inserted into the {@link LIR}. 938 */ 939 private Variable createVariable(ValueKind<?> kind) { 940 return new Variable(kind, intervalsSize++); 941 } 942 943 boolean hasDerivedIntervals() { 944 return firstDerivedIntervalIndex != -1; 945 } 946 947 int firstDerivedIntervalIndex() { 948 return firstDerivedIntervalIndex; 949 } 950 951 public int intervalsSize() { 952 return intervalsSize; 953 } 954 955 FixedInterval fixedIntervalFor(RegisterValue reg) { 956 return fixedIntervals[reg.getRegister().number]; 957 } 958 959 FixedInterval getOrCreateFixedInterval(RegisterValue reg) { 960 FixedInterval ret = fixedIntervalFor(reg); 961 if (ret == null) { 962 return createFixedInterval(reg); 963 } else { 964 return ret; 965 } 966 } 967 968 TraceInterval intervalFor(Value operand) { 969 int operandNumber = operandNumber(operand); 970 assert operandNumber < intervalsSize; 971 return intervals[operandNumber]; 972 } 973 974 TraceInterval getOrCreateInterval(AllocatableValue operand) { 975 TraceInterval ret = intervalFor(operand); 976 if (ret == null) { 977 return createInterval(operand); 978 } else { 979 return ret; 980 } 981 } 982 983 void initOpIdMaps(int numInstructions) { 984 opIdToInstructionMap = new LIRInstruction[numInstructions]; 985 opIdToBlockMap = new AbstractBlockBase<?>[numInstructions]; 986 } 987 988 void putOpIdMaps(int index, LIRInstruction op, AbstractBlockBase<?> block) { 989 opIdToInstructionMap[index] = op; 990 opIdToBlockMap[index] = block; 991 } 992 993 /** 994 * Gets the highest instruction id allocated by this object. 995 */ 996 int maxOpId() { 997 assert opIdToInstructionMap.length > 0 : "no operations"; 998 return (opIdToInstructionMap.length - 1) << 1; 999 } 1000 1001 /** 1002 * Converts an {@linkplain LIRInstruction#id instruction id} to an instruction index. All 1003 * LIR instructions in a method have an index one greater than their linear-scan order 1004 * predecessor with the first instruction having an index of 0. 1005 */ 1006 private int opIdToIndex(int opId) { 1007 return opId >> 1; 1008 } 1009 1010 /** 1011 * Retrieves the {@link LIRInstruction} based on its {@linkplain LIRInstruction#id id}. 1012 * 1013 * @param opId an instruction {@linkplain LIRInstruction#id id} 1014 * @return the instruction whose {@linkplain LIRInstruction#id} {@code == id} 1015 */ 1016 LIRInstruction instructionForId(int opId) { 1017 assert isEven(opId) : "opId not even"; 1018 LIRInstruction instr = opIdToInstructionMap[opIdToIndex(opId)]; 1019 assert instr.id() == opId; 1020 return instr; 1021 } 1022 1023 /** 1024 * Gets the block containing a given instruction. 1025 * 1026 * @param opId an instruction {@linkplain LIRInstruction#id id} 1027 * @return the block containing the instruction denoted by {@code opId} 1028 */ 1029 AbstractBlockBase<?> blockForId(int opId) { 1030 assert opIdToBlockMap.length > 0 && opId >= 0 && opId <= maxOpId() + 1 : "opId out of range: " + opId; 1031 return opIdToBlockMap[opIdToIndex(opId)]; 1032 } 1033 1034 @SuppressWarnings("try") 1035 public void printIntervals(String label) { 1036 if (Debug.isDumpEnabled(TraceBuilderPhase.TRACE_DUMP_LEVEL)) { 1037 if (Debug.isLogEnabled()) { 1038 try (Indent indent = Debug.logAndIndent("intervals %s", label)) { 1039 for (FixedInterval interval : fixedIntervals) { 1040 if (interval != null) { 1041 Debug.log("%s", interval.logString()); 1042 } 1043 } 1044 1045 for (TraceInterval interval : intervals) { 1046 if (interval != null) { 1047 Debug.log("%s", interval.logString()); 1048 } 1049 } 1050 1051 try (Indent indent2 = Debug.logAndIndent("Basic Blocks")) { 1052 for (AbstractBlockBase<?> block : trace.getBlocks()) { 1053 Debug.log("B%d [%d, %d, %s] ", block.getId(), getFirstLirInstructionId(block), getLastLirInstructionId(block), block.getLoop()); 1054 } 1055 } 1056 } 1057 } 1058 Debug.dump(Debug.INFO_LOG_LEVEL, this, label); 1059 } 1060 } 1061 1062 @Override 1063 public void visitIntervals(IntervalVisitor visitor) { 1064 for (FixedInterval interval : fixedIntervals) { 1065 if (interval != null) { 1066 printFixedInterval(interval, visitor); 1067 } 1068 } 1069 for (TraceInterval interval : intervals) { 1070 if (interval != null) { 1071 printInterval(interval, visitor); 1072 } 1073 } 1074 } 1075 1076 } 1077 1078 public static boolean verifyEquals(TraceLinearScan a, TraceLinearScan b) { 1079 assert compareFixed(a.fixedIntervals(), b.fixedIntervals()); 1080 assert compareIntervals(a.intervals(), b.intervals()); 1081 return true; 1082 } 1083 1084 private static boolean compareIntervals(TraceInterval[] a, TraceInterval[] b) { 1085 for (int i = 0; i < Math.max(a.length, b.length); i++) { 1086 if (i >= a.length) { 1087 assert b[i] == null : "missing a interval: " + i + " b: " + b[i]; 1088 continue; 1089 } 1090 if (i >= b.length) { 1091 assert a[i] == null : "missing b interval: " + i + " a: " + a[i]; 1092 continue; 1093 } 1094 compareInterval(a[i], b[i]); 1095 } 1096 return true; 1097 } 1098 1099 private static void compareInterval(TraceInterval a, TraceInterval b) { 1100 if (a == null) { 1101 assert b == null : "First interval is null but second is: " + b; 1102 return; 1103 } 1104 assert b != null : "Second interval is null but forst is: " + a; 1105 assert a.operand.equals(b.operand) : "Operand mismatch: " + a + " vs. " + b; 1106 assert a.from() == b.from() : "From mismatch: " + a + " vs. " + b; 1107 assert a.to() == b.to() : "To mismatch: " + a + " vs. " + b; 1108 assert verifyIntervalsEquals(a, b); 1109 } 1110 1111 private static boolean verifyIntervalsEquals(TraceInterval a, TraceInterval b) { 1112 for (int i = 0; i < Math.max(a.numUsePos(), b.numUsePos()); i++) { 1113 assert i < a.numUsePos() : "missing a usepos: " + i + " b: " + b; 1114 assert i < b.numUsePos() : "missing b usepos: " + i + " a: " + a; 1115 int aPos = a.getUsePos(i); 1116 int bPos = b.getUsePos(i); 1117 assert aPos == bPos : "Use Positions differ: " + aPos + " vs. " + bPos; 1118 RegisterPriority aReg = a.getUsePosRegisterPriority(i); 1119 RegisterPriority bReg = b.getUsePosRegisterPriority(i); 1120 assert aReg == bReg : "Register priority differ: " + aReg + " vs. " + bReg; 1121 } 1122 return true; 1123 } 1124 1125 private static boolean compareFixed(FixedInterval[] a, FixedInterval[] b) { 1126 for (int i = 0; i < Math.max(a.length, b.length); i++) { 1127 if (i >= a.length) { 1128 assert b[i] == null : "missing a interval: " + i + " b: " + b[i]; 1129 continue; 1130 } 1131 if (i >= b.length) { 1132 assert a[i] == null : "missing b interval: " + i + " a: " + a[i]; 1133 continue; 1134 } 1135 compareFixedInterval(a[i], b[i]); 1136 } 1137 return true; 1138 } 1139 1140 private static void compareFixedInterval(FixedInterval a, FixedInterval b) { 1141 if (a == null) { 1142 assert b == null || isEmptyInterval(b) : "First interval is null but second is: " + b; 1143 return; 1144 } 1145 if (b == null) { 1146 assert isEmptyInterval(a) : "Second interval is null but first is: " + a; 1147 return; 1148 } 1149 assert a.operand.equals(b.operand) : "Operand mismatch: " + a + " vs. " + b; 1150 assert a.from() == b.from() : "From mismatch: " + a + " vs. " + b; 1151 assert a.to() == b.to() : "To mismatch: " + a + " vs. " + b; 1152 assert verifyFixeEquas(a, b); 1153 } 1154 1155 private static boolean verifyFixeEquas(FixedInterval a, FixedInterval b) { 1156 a.rewindRange(); 1157 b.rewindRange(); 1158 while (!a.currentAtEnd()) { 1159 assert !b.currentAtEnd() : "Fixed range mismatch: " + a + " vs. " + b; 1160 assert a.currentFrom() == b.currentFrom() : "From range mismatch: " + a + " vs. " + b + " from: " + a.currentFrom() + " vs. " + b.currentFrom(); 1161 assert a.currentTo() == b.currentTo() : "To range mismatch: " + a + " vs. " + b + " from: " + a.currentTo() + " vs. " + b.currentTo(); 1162 a.nextRange(); 1163 b.nextRange(); 1164 } 1165 assert b.currentAtEnd() : "Fixed range mismatch: " + a + " vs. " + b; 1166 return true; 1167 } 1168 1169 private static boolean isEmptyInterval(FixedInterval fixed) { 1170 return fixed.from() == -1 && fixed.to() == 0; 1171 } 1172 1173 private static void printFixedInterval(FixedInterval interval, IntervalVisitor visitor) { 1174 Value hint = null; 1175 AllocatableValue operand = interval.operand; 1176 String type = "fixed"; 1177 visitor.visitIntervalStart(operand, operand, operand, hint, type); 1178 1179 // print ranges 1180 for (FixedRange range = interval.first(); range != FixedRange.EndMarker; range = range.next) { 1181 visitor.visitRange(range.from, range.to); 1182 } 1183 1184 // no use positions 1185 1186 visitor.visitIntervalEnd("NOT_SUPPORTED"); 1187 1188 } 1189 1190 private static void printInterval(TraceInterval interval, IntervalVisitor visitor) { 1191 Value hint = interval.locationHint(false) != null ? interval.locationHint(false).location() : null; 1192 AllocatableValue operand = interval.operand; 1193 String type = isRegister(operand) ? "fixed" : operand.getValueKind().getPlatformKind().toString(); 1194 visitor.visitIntervalStart(interval.splitParent().operand, operand, interval.location(), hint, type); 1195 1196 // print ranges 1197 visitor.visitRange(interval.from(), interval.to()); 1198 1199 // print use positions 1200 int prev = -1; 1201 for (int i = interval.numUsePos() - 1; i >= 0; --i) { 1202 assert prev < interval.getUsePos(i) : "use positions not sorted"; 1203 visitor.visitUsePos(interval.getUsePos(i), interval.getUsePosRegisterPriority(i)); 1204 prev = interval.getUsePos(i); 1205 } 1206 1207 visitor.visitIntervalEnd(interval.spillState()); 1208 } 1209 }