1 /* 2 * Copyright (c) 2015, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.lir.alloc.trace.lsra; 24 25 import static jdk.vm.ci.code.ValueUtil.asRegisterValue; 26 import static jdk.vm.ci.code.ValueUtil.asStackSlot; 27 import static jdk.vm.ci.code.ValueUtil.isRegister; 28 import static jdk.vm.ci.code.ValueUtil.isStackSlot; 29 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable; 30 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; 31 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; 32 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation; 33 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints; 34 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.asShadowedRegisterValue; 35 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.isShadowedRegisterValue; 36 import static org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.isVariableOrRegister; 37 38 import java.util.ArrayList; 39 import java.util.EnumSet; 40 41 import org.graalvm.compiler.core.common.LIRKind; 42 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig; 43 import org.graalvm.compiler.core.common.alloc.Trace; 44 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult; 45 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; 46 import org.graalvm.compiler.debug.Debug; 47 import org.graalvm.compiler.debug.Debug.Scope; 48 import org.graalvm.compiler.debug.GraalError; 49 import org.graalvm.compiler.debug.Indent; 50 import org.graalvm.compiler.lir.InstructionValueConsumer; 51 import org.graalvm.compiler.lir.LIR; 52 import org.graalvm.compiler.lir.LIRInstruction; 53 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag; 54 import org.graalvm.compiler.lir.LIRInstruction.OperandMode; 55 import org.graalvm.compiler.lir.LIRValueUtil; 56 import org.graalvm.compiler.lir.StandardOp.JumpOp; 57 import org.graalvm.compiler.lir.StandardOp.LabelOp; 58 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp; 59 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp; 60 import org.graalvm.compiler.lir.ValueProcedure; 61 import org.graalvm.compiler.lir.Variable; 62 import org.graalvm.compiler.lir.alloc.trace.GlobalLivenessInfo; 63 import org.graalvm.compiler.lir.alloc.trace.ShadowedRegisterValue; 64 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority; 65 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState; 66 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan; 67 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 68 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory; 69 import org.graalvm.compiler.lir.ssa.SSAUtil; 70 71 import jdk.vm.ci.code.Register; 72 import jdk.vm.ci.code.RegisterArray; 73 import jdk.vm.ci.code.RegisterValue; 74 import jdk.vm.ci.code.TargetDescription; 75 import jdk.vm.ci.meta.AllocatableValue; 76 import jdk.vm.ci.meta.JavaConstant; 77 import jdk.vm.ci.meta.Value; 78 79 public final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase { 80 81 @Override 82 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, MoveFactory spillMoveFactory, RegisterAllocationConfig registerAllocationConfig, 83 TraceBuilderResult traceBuilderResult, TraceLinearScan allocator) { 84 new Analyser(allocator, traceBuilderResult).analyze(); 85 } 86 87 public static final class Analyser { 88 private final TraceLinearScan allocator; 89 private final TraceBuilderResult traceBuilderResult; 90 private int numInstructions; 91 92 public Analyser(TraceLinearScan allocator, TraceBuilderResult traceBuilderResult) { 93 this.allocator = allocator; 94 this.traceBuilderResult = traceBuilderResult; 95 } 96 97 private AbstractBlockBase<?>[] sortedBlocks() { 98 return allocator.sortedBlocks(); 99 } 100 101 private LIR getLIR() { 102 return allocator.getLIR(); 103 } 104 105 private RegisterArray getCallerSavedRegisters() { 106 return allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters(); 107 } 108 109 public void analyze() { 110 countInstructions(); 111 buildIntervals(); 112 } 113 114 private boolean isAllocated(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) { 115 return traceBuilderResult.getTraceForBlock(other).getId() < traceBuilderResult.getTraceForBlock(currentBlock).getId(); 116 } 117 118 /** 119 * Count instructions in all blocks. The numbering follows the 120 * {@linkplain TraceLinearScan#sortedBlocks() register allocation order}. 121 */ 122 private void countInstructions() { 123 124 allocator.initIntervals(); 125 126 int numberInstructions = 0; 127 for (AbstractBlockBase<?> block : sortedBlocks()) { 128 numberInstructions += getLIR().getLIRforBlock(block).size(); 129 } 130 numInstructions = numberInstructions; 131 132 // initialize with correct length 133 allocator.initOpIdMaps(numberInstructions); 134 } 135 136 private final InstructionValueConsumer outputConsumer = new InstructionValueConsumer() { 137 @Override 138 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 139 if (isVariableOrRegister(operand)) { 140 addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op)); 141 addRegisterHint(op, operand, mode, flags, true); 142 } 143 } 144 }; 145 146 private final InstructionValueConsumer tempConsumer = new InstructionValueConsumer() { 147 @Override 148 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 149 if (isVariableOrRegister(operand)) { 150 addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister); 151 addRegisterHint(op, operand, mode, flags, false); 152 } 153 } 154 }; 155 private final InstructionValueConsumer aliveConsumer = new InstructionValueConsumer() { 156 @Override 157 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 158 if (isVariableOrRegister(operand)) { 159 RegisterPriority p = registerPriorityOfInputOperand(flags); 160 int opId = op.id(); 161 int blockFrom = 0; 162 addUse((AllocatableValue) operand, blockFrom, opId + 1, p); 163 addRegisterHint(op, operand, mode, flags, false); 164 } 165 } 166 }; 167 168 private final InstructionValueConsumer inputConsumer = new InstructionValueConsumer() { 169 @Override 170 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 171 if (isVariableOrRegister(operand)) { 172 int opId = op.id(); 173 RegisterPriority p = registerPriorityOfInputOperand(flags); 174 int blockFrom = 0; 175 addUse((AllocatableValue) operand, blockFrom, opId, p); 176 addRegisterHint(op, operand, mode, flags, false); 177 } 178 } 179 180 }; 181 182 private final InstructionValueConsumer stateProc = new InstructionValueConsumer() { 183 @Override 184 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 185 if (isVariableOrRegister(operand)) { 186 int opId = op.id(); 187 int blockFrom = 0; 188 addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None); 189 } 190 } 191 }; 192 193 private void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority) { 194 if (isRegister(operand)) { 195 RegisterValue reg = asRegisterValue(operand); 196 if (allocator.isAllocatable(reg)) { 197 addFixedUse(reg, from, to); 198 } 199 } else { 200 assert isVariable(operand) : operand; 201 addVariableUse(asVariable(operand), from, to, registerPriority); 202 } 203 } 204 205 private void addFixedUse(RegisterValue reg, int from, int to) { 206 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 207 interval.addRange(from, to); 208 if (Debug.isLogEnabled()) { 209 Debug.log("add fixed use: %s, at %d", interval, to); 210 } 211 } 212 213 private void addVariableUse(Variable operand, int from, int to, RegisterPriority registerPriority) { 214 TraceInterval interval = allocator.getOrCreateInterval(operand); 215 interval.addRange(from, to); 216 217 // Register use position at even instruction id. 218 interval.addUsePos(to & ~1, registerPriority, allocator.getOptions()); 219 220 if (Debug.isLogEnabled()) { 221 Debug.log("add use: %s, at %d (%s)", interval, to, registerPriority.name()); 222 } 223 } 224 225 private void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority) { 226 if (isRegister(operand)) { 227 RegisterValue reg = asRegisterValue(operand); 228 if (allocator.isAllocatable(reg)) { 229 addFixedDef(reg, op); 230 } 231 } else { 232 assert isVariable(operand) : operand; 233 addVariableDef(asVariable(operand), op, registerPriority); 234 } 235 } 236 237 private void addFixedDef(RegisterValue reg, LIRInstruction op) { 238 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 239 int defPos = op.id(); 240 if (interval.from() <= defPos) { 241 /* 242 * Update the starting point (when a range is first created for a use, its start is 243 * the beginning of the current block until a def is encountered). 244 */ 245 interval.setFrom(defPos); 246 247 } else { 248 /* 249 * Dead value - make vacuous interval also add register priority for dead intervals 250 */ 251 interval.addRange(defPos, defPos + 1); 252 if (Debug.isLogEnabled()) { 253 Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos); 254 } 255 } 256 if (Debug.isLogEnabled()) { 257 Debug.log("add fixed def: %s, at %d", interval, defPos); 258 } 259 } 260 261 private TraceInterval addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority) { 262 int defPos = op.id(); 263 264 TraceInterval interval = allocator.getOrCreateInterval(operand); 265 266 if (interval.isEmpty()) { 267 /* 268 * Dead value - make vacuous interval also add register priority for dead intervals 269 */ 270 interval.addRange(defPos, defPos + 1); 271 if (Debug.isLogEnabled()) { 272 Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos); 273 } 274 } else { 275 /* 276 * Update the starting point (when a range is first created for a use, its start is 277 * the beginning of the current block until a def is encountered). 278 */ 279 interval.setFrom(defPos); 280 } 281 if (!(op instanceof LabelOp)) { 282 // no use positions for labels 283 interval.addUsePos(defPos, registerPriority, allocator.getOptions()); 284 } 285 286 changeSpillDefinitionPos(op, operand, interval, defPos); 287 if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) { 288 // detection of method-parameters and roundfp-results 289 interval.setSpillState(SpillState.StartInMemory); 290 } 291 interval.addMaterializationValue(getMaterializedValue(op, operand, interval, allocator.neverSpillConstants(), allocator.getSpillMoveFactory())); 292 293 if (Debug.isLogEnabled()) { 294 Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name()); 295 } 296 return interval; 297 } 298 299 private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority) { 300 if (isRegister(operand)) { 301 RegisterValue reg = asRegisterValue(operand); 302 if (allocator.isAllocatable(reg)) { 303 addFixedTemp(reg, tempPos); 304 } 305 } else { 306 assert isVariable(operand) : operand; 307 addVariableTemp(asVariable(operand), tempPos, registerPriority); 308 } 309 } 310 311 private void addFixedTemp(RegisterValue reg, int tempPos) { 312 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 313 interval.addRange(tempPos, tempPos + 1); 314 if (Debug.isLogEnabled()) { 315 Debug.log("add fixed temp: %s, at %d", interval, tempPos); 316 } 317 } 318 319 private void addVariableTemp(Variable operand, int tempPos, RegisterPriority registerPriority) { 320 TraceInterval interval = allocator.getOrCreateInterval(operand); 321 322 if (interval.isEmpty()) { 323 interval.addRange(tempPos, tempPos + 1); 324 } else if (interval.from() > tempPos) { 325 interval.setFrom(tempPos); 326 } 327 328 interval.addUsePos(tempPos, registerPriority, allocator.getOptions()); 329 interval.addMaterializationValue(null); 330 331 if (Debug.isLogEnabled()) { 332 Debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name()); 333 } 334 } 335 336 /** 337 * Eliminates moves from register to stack if the stack slot is known to be correct. 338 * 339 * @param op 340 * @param operand 341 */ 342 private void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, TraceInterval interval, int defPos) { 343 assert interval.isSplitParent() : "can only be called for split parents"; 344 345 switch (interval.spillState()) { 346 case NoDefinitionFound: 347 // assert interval.spillDefinitionPos() == -1 : "must no be set before"; 348 interval.setSpillDefinitionPos(defPos); 349 if (!(op instanceof LabelOp)) { 350 // Do not update state for labels. This will be done afterwards. 351 interval.setSpillState(SpillState.NoSpillStore); 352 } 353 break; 354 355 case NoSpillStore: 356 assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created"; 357 if (defPos < interval.spillDefinitionPos() - 2) { 358 /* 359 * Second definition found, so no spill optimization possible for this 360 * interval. 361 */ 362 interval.setSpillState(SpillState.NoOptimization); 363 } else { 364 // two consecutive definitions (because of two-operand LIR form) 365 assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal"; 366 } 367 break; 368 369 case NoOptimization: 370 // nothing to do 371 break; 372 373 default: 374 throw GraalError.shouldNotReachHere("other states not allowed at this time"); 375 } 376 } 377 378 private void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) { 379 if (flags.contains(OperandFlag.HINT) && isVariableOrRegister(targetValue)) { 380 381 ValueProcedure registerHintProc = new ValueProcedure() { 382 @Override 383 public Value doValue(Value registerHint, OperandMode valueMode, EnumSet<OperandFlag> valueFlags) { 384 if (isVariableOrRegister(registerHint)) { 385 /* 386 * TODO (je): clean up 387 */ 388 final AllocatableValue fromValue; 389 final AllocatableValue toValue; 390 /* hints always point from def to use */ 391 if (hintAtDef) { 392 fromValue = (AllocatableValue) registerHint; 393 toValue = (AllocatableValue) targetValue; 394 } else { 395 fromValue = (AllocatableValue) targetValue; 396 toValue = (AllocatableValue) registerHint; 397 } 398 Debug.log("addRegisterHint %s to %s", fromValue, toValue); 399 final TraceInterval to; 400 final IntervalHint from; 401 if (isRegister(toValue)) { 402 if (isRegister(fromValue)) { 403 // fixed to fixed move 404 return null; 405 } 406 from = getIntervalHint(toValue); 407 to = allocator.getOrCreateInterval(asVariable(fromValue)); 408 } else { 409 to = allocator.getOrCreateInterval(asVariable(toValue)); 410 from = getIntervalHint(fromValue); 411 } 412 413 to.setLocationHint(from); 414 if (Debug.isLogEnabled()) { 415 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to); 416 } 417 418 return registerHint; 419 } 420 return null; 421 } 422 }; 423 op.forEachRegisterHint(targetValue, mode, registerHintProc); 424 } 425 } 426 427 private static boolean optimizeMethodArgument(Value value) { 428 /* 429 * Object method arguments that are passed on the stack are currently not optimized 430 * because this requires that the runtime visits method arguments during stack walking. 431 */ 432 return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value); 433 } 434 435 /** 436 * Determines the register priority for an instruction's output/result operand. 437 */ 438 private static RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) { 439 if (op instanceof LabelOp) { 440 // skip method header 441 return RegisterPriority.None; 442 } 443 if (ValueMoveOp.isValueMoveOp(op)) { 444 ValueMoveOp move = ValueMoveOp.asValueMoveOp(op); 445 if (optimizeMethodArgument(move.getInput())) { 446 return RegisterPriority.None; 447 } 448 } 449 450 // all other operands require a register 451 return RegisterPriority.MustHaveRegister; 452 } 453 454 /** 455 * Determines the priority which with an instruction's input operand will be allocated a 456 * register. 457 */ 458 private static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) { 459 if (flags.contains(OperandFlag.OUTGOING)) { 460 return RegisterPriority.None; 461 } 462 if (flags.contains(OperandFlag.STACK)) { 463 return RegisterPriority.ShouldHaveRegister; 464 } 465 // all other operands require a register 466 return RegisterPriority.MustHaveRegister; 467 } 468 469 @SuppressWarnings("try") 470 private void buildIntervals() { 471 472 try (Indent indent = Debug.logAndIndent("build intervals")) { 473 474 // create a list with all caller-save registers (cpu, fpu, xmm) 475 RegisterArray callerSaveRegs = getCallerSavedRegisters(); 476 int instructionIndex = numInstructions; 477 478 // iterate all blocks in reverse order 479 AbstractBlockBase<?>[] blocks = sortedBlocks(); 480 for (int blockId = blocks.length - 1; blockId >= 0; blockId--) { 481 final AbstractBlockBase<?> block = blocks[blockId]; 482 483 try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) { 484 handleBlockEnd(block, (instructionIndex - 1) << 1); 485 486 /* 487 * Iterate all instructions of the block in reverse order. definitions of 488 * intervals are processed before uses. 489 */ 490 ArrayList<LIRInstruction> instructions = getLIR().getLIRforBlock(block); 491 for (int instIdx = instructions.size() - 1; instIdx >= 1; instIdx--) { 492 final LIRInstruction op = instructions.get(instIdx); 493 // number instruction 494 instructionIndex--; 495 numberInstruction(block, op, instructionIndex); 496 final int opId = op.id(); 497 498 try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) { 499 500 /* 501 * Add a temp range for each register if operation destroys 502 * caller-save registers. 503 */ 504 if (op.destroysCallerSavedRegisters()) { 505 for (Register r : callerSaveRegs) { 506 if (allocator.attributes(r).isAllocatable()) { 507 addTemp(r.asValue(), opId, RegisterPriority.None); 508 } 509 } 510 if (Debug.isLogEnabled()) { 511 Debug.log("operation destroys all caller-save registers"); 512 } 513 } 514 515 op.visitEachOutput(outputConsumer); 516 op.visitEachTemp(tempConsumer); 517 op.visitEachAlive(aliveConsumer); 518 op.visitEachInput(inputConsumer); 519 520 /* 521 * Add uses of live locals from interpreter's point of view for 522 * proper debug information generation. Treat these operands as temp 523 * values (if the live range is extended to a call site, the value 524 * would be in a register at the call otherwise). 525 */ 526 op.visitEachState(stateProc); 527 } 528 529 } // end of instruction iteration 530 // number label instruction 531 instructionIndex--; 532 numberInstruction(block, instructions.get(0), instructionIndex); 533 AbstractBlockBase<?> pred = blockId == 0 ? null : blocks[blockId - 1]; 534 handleBlockBegin(block, pred); 535 } 536 if (Debug.isDumpEnabled(Debug.VERY_DETAILED_LEVEL)) { 537 allocator.printIntervals("After Block " + block); 538 } 539 } // end of block iteration 540 assert instructionIndex == 0 : "not at start?" + instructionIndex; 541 handleTraceBegin(blocks[0]); 542 543 // fix spill state for phi/incoming intervals 544 for (TraceInterval interval : allocator.intervals()) { 545 if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) { 546 // there was a definition in a phi/incoming 547 interval.setSpillState(SpillState.NoSpillStore); 548 } 549 } 550 if (TraceRAuseInterTraceHints.getValue(allocator.getLIR().getOptions())) { 551 addInterTraceHints(); 552 } 553 for (FixedInterval interval1 : allocator.fixedIntervals()) { 554 if (interval1 != null) { 555 /* We use [-1, 0] to avoid intersection with incoming values. */ 556 interval1.addRange(-1, 0); 557 } 558 } 559 } 560 } 561 562 private void handleTraceBegin(AbstractBlockBase<?> block) { 563 LIRInstruction op = getLIR().getLIRforBlock(block).get(0); 564 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo(); 565 for (int varNum : livenessInfo.getBlockIn(block)) { 566 if (isAliveAtBlockBegin(varNum)) { 567 addVariableDef(livenessInfo.getVariable(varNum), op, RegisterPriority.None); 568 } 569 } 570 } 571 572 private boolean isAliveAtBlockBegin(int varNum) { 573 return allocator.intervalFor(varNum) != null; 574 } 575 576 private void handleBlockBegin(AbstractBlockBase<?> block, AbstractBlockBase<?> pred) { 577 if (SSAUtil.isMerge(block)) { 578 // handle phis 579 // method parameters are fixed later on (see end of #buildIntervals) 580 LabelOp label = SSAUtil.phiIn(getLIR(), block); 581 JumpOp jump = pred == null ? null : SSAUtil.phiOut(getLIR(), pred); 582 for (int i = 0; i < label.getPhiSize(); i++) { 583 Variable var = asVariable(label.getIncomingValue(i)); 584 TraceInterval toInterval = addVariableDef(var, label, RegisterPriority.ShouldHaveRegister); 585 // set hint for phis 586 if (jump != null) { 587 Value out = jump.getOutgoingValue(i); 588 if (isVariable(out)) { 589 TraceInterval fromInterval = allocator.getOrCreateInterval(asVariable(out)); 590 toInterval.setLocationHint(fromInterval); 591 } 592 } 593 } 594 } 595 } 596 597 private void handleBlockEnd(AbstractBlockBase<?> block, int opId) { 598 // always alive until the end of the block 599 int aliveOpId = opId + 1; 600 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo(); 601 for (int varNum : livenessInfo.getBlockOut(block)) { 602 if (allocator.intervalFor(varNum) == null) { 603 addVariableUse(livenessInfo.getVariable(varNum), 0, aliveOpId, RegisterPriority.None); 604 } 605 } 606 } 607 608 private void numberInstruction(AbstractBlockBase<?> block, LIRInstruction op, int index) { 609 int opId = index << 1; 610 assert op.id() == -1 || op.id() == opId : "must match"; 611 op.setId(opId); 612 allocator.putOpIdMaps(index, op, block); 613 assert allocator.instructionForId(opId) == op : "must match"; 614 } 615 616 @SuppressWarnings("try") 617 private void addInterTraceHints() { 618 try (Scope s = Debug.scope("InterTraceHints", allocator)) { 619 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo(); 620 // set hints for phi/incoming intervals 621 for (AbstractBlockBase<?> block : sortedBlocks()) { 622 LabelOp label = (LabelOp) getLIR().getLIRforBlock(block).get(0); 623 for (AbstractBlockBase<?> pred : block.getPredecessors()) { 624 addInterTraceHints(livenessInfo, pred, block, label); 625 } 626 } 627 } catch (Throwable e) { 628 throw Debug.handle(e); 629 } 630 } 631 632 private void addInterTraceHints(GlobalLivenessInfo livenessInfo, AbstractBlockBase<?> from, AbstractBlockBase<?> to, LabelOp label) { 633 if (isAllocated(to, from)) { 634 int[] liveVars = livenessInfo.getBlockIn(to); 635 Value[] outLocation = livenessInfo.getOutLocation(from); 636 637 for (int i = 0; i < liveVars.length; i++) { 638 int varNum = liveVars[i]; 639 TraceInterval toInterval = allocator.intervalFor(varNum); 640 if (toInterval != null && !toInterval.hasHint()) { 641 Value fromValue = outLocation[i]; 642 if (!LIRValueUtil.isConstantValue(fromValue)) { 643 addInterTraceHint(label, varNum, fromValue); 644 } 645 } 646 } 647 } 648 } 649 650 private void addInterTraceHint(LabelOp label, int varNum, Value fromValue) { 651 assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue; 652 TraceInterval to = allocator.intervalFor(varNum); 653 if (to == null) { 654 // variable not live -> do nothing 655 return; 656 } 657 if (isVariableOrRegister(fromValue)) { 658 IntervalHint from = getIntervalHint((AllocatableValue) fromValue); 659 setHint(label, to, from); 660 } else if (isStackSlotValue(fromValue)) { 661 setSpillSlot(label, to, (AllocatableValue) fromValue); 662 } else if (TraceRAshareSpillInformation.getValue(allocator.getLIR().getOptions()) && isShadowedRegisterValue(fromValue)) { 663 ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue); 664 IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister()); 665 setHint(label, to, from); 666 setSpillSlot(label, to, shadowedRegisterValue.getStackSlot()); 667 } else { 668 throw GraalError.shouldNotReachHere(); 669 } 670 } 671 672 private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) { 673 IntervalHint currentHint = to.locationHint(false); 674 if (currentHint == null) { 675 /* 676 * Update hint if there was none or if the hint interval starts after the hinted 677 * interval. 678 */ 679 to.setLocationHint(from); 680 if (Debug.isLogEnabled()) { 681 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to); 682 } 683 } 684 } 685 686 private static void setSpillSlot(LIRInstruction op, TraceInterval interval, AllocatableValue spillSlot) { 687 if (interval.spillSlot() == null) { 688 interval.setSpillSlot(spillSlot); 689 interval.setSpillState(SpillState.StartInMemory); 690 if (Debug.isLogEnabled()) { 691 Debug.log("operation at opId %d: added spill slot %s to interval %s", op.id(), spillSlot, interval); 692 } 693 } else if (Debug.isLogEnabled()) { 694 Debug.log("operation at opId %d: has already a slot assigned %s", op.id(), interval.spillSlot()); 695 } 696 } 697 698 private IntervalHint getIntervalHint(AllocatableValue from) { 699 if (isRegister(from)) { 700 return allocator.getOrCreateFixedInterval(asRegisterValue(from)); 701 } 702 return allocator.getOrCreateInterval(asVariable(from)); 703 } 704 705 } 706 707 /** 708 * Returns a value for a interval definition, which can be used for re-materialization. 709 * 710 * @param op An instruction which defines a value 711 * @param operand The destination operand of the instruction 712 * @param interval The interval for this defined value. 713 * @return Returns the value which is moved to the instruction and which can be reused at all 714 * reload-locations in case the interval of this instruction is spilled. Currently this 715 * can only be a {@link JavaConstant}. 716 */ 717 private static JavaConstant getMaterializedValue(LIRInstruction op, Value operand, TraceInterval interval, boolean neverSpillConstants, MoveFactory spillMoveFactory) { 718 if (LoadConstantOp.isLoadConstantOp(op)) { 719 LoadConstantOp move = LoadConstantOp.asLoadConstantOp(op); 720 if (move.getConstant() instanceof JavaConstant) { 721 if (!neverSpillConstants) { 722 if (!spillMoveFactory.allowConstantToStackMove(move.getConstant())) { 723 return null; 724 } 725 /* 726 * Check if the interval has any uses which would accept an stack location 727 * (priority == ShouldHaveRegister). Rematerialization of such intervals can 728 * result in a degradation, because rematerialization always inserts a constant 729 * load, even if the value is not needed in a register. 730 */ 731 int numUsePos = interval.numUsePos(); 732 for (int useIdx = 0; useIdx < numUsePos; useIdx++) { 733 TraceInterval.RegisterPriority priority = interval.getUsePosRegisterPriority(useIdx); 734 if (priority == TraceInterval.RegisterPriority.ShouldHaveRegister) { 735 return null; 736 } 737 } 738 } 739 return (JavaConstant) move.getConstant(); 740 } 741 } 742 return null; 743 } 744 745 }