1 /* 2 * Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.lir.alloc.trace.lsra; 24 25 import static jdk.vm.ci.code.ValueUtil.asRegisterValue; 26 import static jdk.vm.ci.code.ValueUtil.asStackSlot; 27 import static jdk.vm.ci.code.ValueUtil.isRegister; 28 import static jdk.vm.ci.code.ValueUtil.isStackSlot; 29 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable; 30 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; 31 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; 32 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation; 33 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints; 34 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.asShadowedRegisterValue; 35 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.isShadowedRegisterValue; 36 import static org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.isVariableOrRegister; 37 38 import java.util.ArrayList; 39 import java.util.EnumSet; 40 41 import org.graalvm.compiler.core.common.LIRKind; 42 import org.graalvm.compiler.core.common.alloc.Trace; 43 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult; 44 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; 45 import org.graalvm.compiler.debug.Debug; 46 import org.graalvm.compiler.debug.Debug.Scope; 47 import org.graalvm.compiler.debug.GraalError; 48 import org.graalvm.compiler.debug.Indent; 49 import org.graalvm.compiler.lir.InstructionValueConsumer; 50 import org.graalvm.compiler.lir.LIR; 51 import org.graalvm.compiler.lir.LIRInstruction; 52 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag; 53 import org.graalvm.compiler.lir.LIRInstruction.OperandMode; 54 import org.graalvm.compiler.lir.LIRValueUtil; 55 import org.graalvm.compiler.lir.StandardOp.JumpOp; 56 import org.graalvm.compiler.lir.StandardOp.LabelOp; 57 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp; 58 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp; 59 import org.graalvm.compiler.lir.ValueProcedure; 60 import org.graalvm.compiler.lir.Variable; 61 import org.graalvm.compiler.lir.alloc.trace.GlobalLivenessInfo; 62 import org.graalvm.compiler.lir.alloc.trace.ShadowedRegisterValue; 63 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority; 64 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState; 65 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan; 66 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 67 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory; 68 import org.graalvm.compiler.lir.ssa.SSAUtil; 69 70 import jdk.vm.ci.code.Register; 71 import jdk.vm.ci.code.RegisterArray; 72 import jdk.vm.ci.code.RegisterValue; 73 import jdk.vm.ci.code.TargetDescription; 74 import jdk.vm.ci.meta.AllocatableValue; 75 import jdk.vm.ci.meta.JavaConstant; 76 import jdk.vm.ci.meta.Value; 77 78 public final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase { 79 80 @Override 81 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, TraceLinearScanAllocationContext context) { 82 TraceBuilderResult traceBuilderResult = context.resultTraces; 83 TraceLinearScan allocator = context.allocator; 84 new Analyser(allocator, traceBuilderResult).analyze(); 85 } 86 87 public static final class Analyser { 88 private static final int DUMP_DURING_ANALYSIS_LEVEL = 4; 89 private final TraceLinearScan allocator; 90 private final TraceBuilderResult traceBuilderResult; 91 private int numInstructions; 92 93 public Analyser(TraceLinearScan allocator, TraceBuilderResult traceBuilderResult) { 94 this.allocator = allocator; 95 this.traceBuilderResult = traceBuilderResult; 96 } 97 98 private AbstractBlockBase<?>[] sortedBlocks() { 99 return allocator.sortedBlocks(); 100 } 101 102 private LIR getLIR() { 103 return allocator.getLIR(); 104 } 105 106 private RegisterArray getCallerSavedRegisters() { 107 return allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters(); 108 } 109 110 public void analyze() { 111 countInstructions(); 112 buildIntervals(); 113 } 114 115 private boolean isAllocated(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) { 116 return traceBuilderResult.getTraceForBlock(other).getId() < traceBuilderResult.getTraceForBlock(currentBlock).getId(); 117 } 118 119 /** 120 * Count instructions in all blocks. The numbering follows the 121 * {@linkplain TraceLinearScan#sortedBlocks() register allocation order}. 122 */ 123 private void countInstructions() { 124 125 allocator.initIntervals(); 126 127 int numberInstructions = 0; 128 for (AbstractBlockBase<?> block : sortedBlocks()) { 129 numberInstructions += getLIR().getLIRforBlock(block).size(); 130 } 131 numInstructions = numberInstructions; 132 133 // initialize with correct length 134 allocator.initOpIdMaps(numberInstructions); 135 } 136 137 private final InstructionValueConsumer outputConsumer = new InstructionValueConsumer() { 138 @Override 139 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 140 if (isVariableOrRegister(operand)) { 141 addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op)); 142 addRegisterHint(op, operand, mode, flags, true); 143 } 144 } 145 }; 146 147 private final InstructionValueConsumer tempConsumer = new InstructionValueConsumer() { 148 @Override 149 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 150 if (isVariableOrRegister(operand)) { 151 addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister); 152 addRegisterHint(op, operand, mode, flags, false); 153 } 154 } 155 }; 156 private final InstructionValueConsumer aliveConsumer = new InstructionValueConsumer() { 157 @Override 158 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 159 if (isVariableOrRegister(operand)) { 160 RegisterPriority p = registerPriorityOfInputOperand(flags); 161 int opId = op.id(); 162 int blockFrom = 0; 163 addUse((AllocatableValue) operand, blockFrom, opId + 1, p); 164 addRegisterHint(op, operand, mode, flags, false); 165 } 166 } 167 }; 168 169 private final InstructionValueConsumer inputConsumer = new InstructionValueConsumer() { 170 @Override 171 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 172 if (isVariableOrRegister(operand)) { 173 int opId = op.id(); 174 RegisterPriority p = registerPriorityOfInputOperand(flags); 175 int blockFrom = 0; 176 addUse((AllocatableValue) operand, blockFrom, opId, p); 177 addRegisterHint(op, operand, mode, flags, false); 178 } 179 } 180 181 }; 182 183 private final InstructionValueConsumer stateProc = new InstructionValueConsumer() { 184 @Override 185 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 186 if (isVariableOrRegister(operand)) { 187 int opId = op.id(); 188 int blockFrom = 0; 189 addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None); 190 } 191 } 192 }; 193 194 private void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority) { 195 if (!allocator.isProcessed(operand)) { 196 return; 197 } 198 if (isRegister(operand)) { 199 addFixedUse(asRegisterValue(operand), from, to); 200 } else { 201 assert isVariable(operand) : operand; 202 addVariableUse(asVariable(operand), from, to, registerPriority); 203 } 204 } 205 206 private void addFixedUse(RegisterValue reg, int from, int to) { 207 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 208 interval.addRange(from, to); 209 if (Debug.isLogEnabled()) { 210 Debug.log("add fixed use: %s, at %d", interval, to); 211 } 212 } 213 214 private void addVariableUse(Variable operand, int from, int to, RegisterPriority registerPriority) { 215 TraceInterval interval = allocator.getOrCreateInterval(operand); 216 interval.addRange(from, to); 217 218 // Register use position at even instruction id. 219 interval.addUsePos(to & ~1, registerPriority); 220 221 if (Debug.isLogEnabled()) { 222 Debug.log("add use: %s, at %d (%s)", interval, to, registerPriority.name()); 223 } 224 } 225 226 private void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority) { 227 if (!allocator.isProcessed(operand)) { 228 return; 229 } 230 if (isRegister(operand)) { 231 addFixedDef(asRegisterValue(operand), op); 232 } else { 233 assert isVariable(operand) : operand; 234 addVariableDef(asVariable(operand), op, registerPriority); 235 } 236 } 237 238 private void addFixedDef(RegisterValue reg, LIRInstruction op) { 239 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 240 int defPos = op.id(); 241 if (interval.from() <= defPos) { 242 /* 243 * Update the starting point (when a range is first created for a use, its start is 244 * the beginning of the current block until a def is encountered). 245 */ 246 interval.setFrom(defPos); 247 248 } else { 249 /* 250 * Dead value - make vacuous interval also add register priority for dead intervals 251 */ 252 interval.addRange(defPos, defPos + 1); 253 if (Debug.isLogEnabled()) { 254 Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos); 255 } 256 } 257 if (Debug.isLogEnabled()) { 258 Debug.log("add fixed def: %s, at %d", interval, defPos); 259 } 260 } 261 262 private TraceInterval addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority) { 263 int defPos = op.id(); 264 265 TraceInterval interval = allocator.getOrCreateInterval(operand); 266 267 if (interval.isEmpty()) { 268 /* 269 * Dead value - make vacuous interval also add register priority for dead intervals 270 */ 271 interval.addRange(defPos, defPos + 1); 272 if (Debug.isLogEnabled()) { 273 Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos); 274 } 275 } else { 276 /* 277 * Update the starting point (when a range is first created for a use, its start is 278 * the beginning of the current block until a def is encountered). 279 */ 280 interval.setFrom(defPos); 281 } 282 if (!(op instanceof LabelOp)) { 283 // no use positions for labels 284 interval.addUsePos(defPos, registerPriority); 285 } 286 287 changeSpillDefinitionPos(op, operand, interval, defPos); 288 if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) { 289 // detection of method-parameters and roundfp-results 290 interval.setSpillState(SpillState.StartInMemory); 291 } 292 interval.addMaterializationValue(getMaterializedValue(op, operand, interval, allocator.neverSpillConstants(), allocator.getSpillMoveFactory())); 293 294 if (Debug.isLogEnabled()) { 295 Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name()); 296 } 297 return interval; 298 } 299 300 private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority) { 301 if (!allocator.isProcessed(operand)) { 302 return; 303 } 304 if (isRegister(operand)) { 305 addFixedTemp(asRegisterValue(operand), tempPos); 306 } else { 307 assert isVariable(operand) : operand; 308 addVariableTemp(asVariable(operand), tempPos, registerPriority); 309 } 310 } 311 312 private void addFixedTemp(RegisterValue reg, int tempPos) { 313 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 314 interval.addRange(tempPos, tempPos + 1); 315 if (Debug.isLogEnabled()) { 316 Debug.log("add fixed temp: %s, at %d", interval, tempPos); 317 } 318 } 319 320 private void addVariableTemp(Variable operand, int tempPos, RegisterPriority registerPriority) { 321 TraceInterval interval = allocator.getOrCreateInterval(operand); 322 323 if (interval.isEmpty()) { 324 interval.addRange(tempPos, tempPos + 1); 325 } else if (interval.from() > tempPos) { 326 interval.setFrom(tempPos); 327 } 328 329 interval.addUsePos(tempPos, registerPriority); 330 interval.addMaterializationValue(null); 331 332 if (Debug.isLogEnabled()) { 333 Debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name()); 334 } 335 } 336 337 /** 338 * Eliminates moves from register to stack if the stack slot is known to be correct. 339 * 340 * @param op 341 * @param operand 342 */ 343 private void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, TraceInterval interval, int defPos) { 344 assert interval.isSplitParent() : "can only be called for split parents"; 345 346 switch (interval.spillState()) { 347 case NoDefinitionFound: 348 // assert interval.spillDefinitionPos() == -1 : "must no be set before"; 349 interval.setSpillDefinitionPos(defPos); 350 if (!(op instanceof LabelOp)) { 351 // Do not update state for labels. This will be done afterwards. 352 interval.setSpillState(SpillState.NoSpillStore); 353 } 354 break; 355 356 case NoSpillStore: 357 assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created"; 358 if (defPos < interval.spillDefinitionPos() - 2) { 359 /* 360 * Second definition found, so no spill optimization possible for this 361 * interval. 362 */ 363 interval.setSpillState(SpillState.NoOptimization); 364 } else { 365 // two consecutive definitions (because of two-operand LIR form) 366 assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal"; 367 } 368 break; 369 370 case NoOptimization: 371 // nothing to do 372 break; 373 374 default: 375 throw GraalError.shouldNotReachHere("other states not allowed at this time"); 376 } 377 } 378 379 private void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) { 380 if (flags.contains(OperandFlag.HINT) && isVariableOrRegister(targetValue)) { 381 382 ValueProcedure registerHintProc = new ValueProcedure() { 383 @Override 384 public Value doValue(Value registerHint, OperandMode valueMode, EnumSet<OperandFlag> valueFlags) { 385 if (isVariableOrRegister(registerHint)) { 386 /* 387 * TODO (je): clean up 388 */ 389 final AllocatableValue fromValue; 390 final AllocatableValue toValue; 391 /* hints always point from def to use */ 392 if (hintAtDef) { 393 fromValue = (AllocatableValue) registerHint; 394 toValue = (AllocatableValue) targetValue; 395 } else { 396 fromValue = (AllocatableValue) targetValue; 397 toValue = (AllocatableValue) registerHint; 398 } 399 Debug.log("addRegisterHint %s to %s", fromValue, toValue); 400 final TraceInterval to; 401 final IntervalHint from; 402 if (isRegister(toValue)) { 403 if (isRegister(fromValue)) { 404 // fixed to fixed move 405 return null; 406 } 407 from = getIntervalHint(toValue); 408 to = allocator.getOrCreateInterval(fromValue); 409 } else { 410 to = allocator.getOrCreateInterval(toValue); 411 from = getIntervalHint(fromValue); 412 } 413 414 to.setLocationHint(from); 415 if (Debug.isLogEnabled()) { 416 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to); 417 } 418 419 return registerHint; 420 } 421 return null; 422 } 423 }; 424 op.forEachRegisterHint(targetValue, mode, registerHintProc); 425 } 426 } 427 428 private static boolean optimizeMethodArgument(Value value) { 429 /* 430 * Object method arguments that are passed on the stack are currently not optimized 431 * because this requires that the runtime visits method arguments during stack walking. 432 */ 433 return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value); 434 } 435 436 /** 437 * Determines the register priority for an instruction's output/result operand. 438 */ 439 private static RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) { 440 if (op instanceof LabelOp) { 441 // skip method header 442 return RegisterPriority.None; 443 } 444 if (op instanceof ValueMoveOp) { 445 ValueMoveOp move = (ValueMoveOp) op; 446 if (optimizeMethodArgument(move.getInput())) { 447 return RegisterPriority.None; 448 } 449 } 450 451 // all other operands require a register 452 return RegisterPriority.MustHaveRegister; 453 } 454 455 /** 456 * Determines the priority which with an instruction's input operand will be allocated a 457 * register. 458 */ 459 private static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) { 460 if (flags.contains(OperandFlag.OUTGOING)) { 461 return RegisterPriority.None; 462 } 463 if (flags.contains(OperandFlag.STACK)) { 464 return RegisterPriority.ShouldHaveRegister; 465 } 466 // all other operands require a register 467 return RegisterPriority.MustHaveRegister; 468 } 469 470 @SuppressWarnings("try") 471 private void buildIntervals() { 472 473 try (Indent indent = Debug.logAndIndent("build intervals")) { 474 475 // create a list with all caller-save registers (cpu, fpu, xmm) 476 RegisterArray callerSaveRegs = getCallerSavedRegisters(); 477 int instructionIndex = numInstructions; 478 479 // iterate all blocks in reverse order 480 AbstractBlockBase<?>[] blocks = sortedBlocks(); 481 for (int blockId = blocks.length - 1; blockId >= 0; blockId--) { 482 final AbstractBlockBase<?> block = blocks[blockId]; 483 484 try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) { 485 handleBlockEnd(block, (instructionIndex - 1) << 1); 486 487 /* 488 * Iterate all instructions of the block in reverse order. definitions of 489 * intervals are processed before uses. 490 */ 491 ArrayList<LIRInstruction> instructions = getLIR().getLIRforBlock(block); 492 for (int instIdx = instructions.size() - 1; instIdx >= 1; instIdx--) { 493 final LIRInstruction op = instructions.get(instIdx); 494 // number instruction 495 instructionIndex--; 496 numberInstruction(block, op, instructionIndex); 497 final int opId = op.id(); 498 499 try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) { 500 501 /* 502 * Add a temp range for each register if operation destroys 503 * caller-save registers. 504 */ 505 if (op.destroysCallerSavedRegisters()) { 506 for (Register r : callerSaveRegs) { 507 if (allocator.attributes(r).isAllocatable()) { 508 addTemp(r.asValue(), opId, RegisterPriority.None); 509 } 510 } 511 if (Debug.isLogEnabled()) { 512 Debug.log("operation destroys all caller-save registers"); 513 } 514 } 515 516 op.visitEachOutput(outputConsumer); 517 op.visitEachTemp(tempConsumer); 518 op.visitEachAlive(aliveConsumer); 519 op.visitEachInput(inputConsumer); 520 521 /* 522 * Add uses of live locals from interpreter's point of view for 523 * proper debug information generation. Treat these operands as temp 524 * values (if the live range is extended to a call site, the value 525 * would be in a register at the call otherwise). 526 */ 527 op.visitEachState(stateProc); 528 } 529 530 } // end of instruction iteration 531 // number label instruction 532 instructionIndex--; 533 numberInstruction(block, instructions.get(0), instructionIndex); 534 AbstractBlockBase<?> pred = blockId == 0 ? null : blocks[blockId - 1]; 535 handleBlockBegin(block, pred); 536 } 537 if (Debug.isDumpEnabled(DUMP_DURING_ANALYSIS_LEVEL)) { 538 allocator.printIntervals("After Block " + block); 539 } 540 } // end of block iteration 541 assert instructionIndex == 0 : "not at start?" + instructionIndex; 542 handleTraceBegin(blocks[0]); 543 544 // fix spill state for phi/incoming intervals 545 for (TraceInterval interval : allocator.intervals()) { 546 if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) { 547 // there was a definition in a phi/incoming 548 interval.setSpillState(SpillState.NoSpillStore); 549 } 550 } 551 if (TraceRAuseInterTraceHints.getValue(allocator.getLIR().getOptions())) { 552 addInterTraceHints(); 553 } 554 for (FixedInterval interval1 : allocator.fixedIntervals()) { 555 if (interval1 != null) { 556 /* We use [-1, 0] to avoid intersection with incoming values. */ 557 interval1.addRange(-1, 0); 558 } 559 } 560 } 561 } 562 563 private void handleTraceBegin(AbstractBlockBase<?> block) { 564 LIRInstruction op = getLIR().getLIRforBlock(block).get(0); 565 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo(); 566 for (int varNum : livenessInfo.getBlockIn(block)) { 567 if (isAliveAtBlockBegin(varNum)) { 568 addVariableDef(livenessInfo.getVariable(varNum), op, RegisterPriority.None); 569 } 570 } 571 } 572 573 private boolean isAliveAtBlockBegin(int varNum) { 574 return allocator.intervalFor(varNum) != null; 575 } 576 577 private void handleBlockBegin(AbstractBlockBase<?> block, AbstractBlockBase<?> pred) { 578 if (SSAUtil.isMerge(block)) { 579 // handle phis 580 // method parameters are fixed later on (see end of #buildIntervals) 581 LabelOp label = SSAUtil.phiIn(getLIR(), block); 582 JumpOp jump = pred == null ? null : SSAUtil.phiOut(getLIR(), pred); 583 for (int i = 0; i < label.getPhiSize(); i++) { 584 Variable var = asVariable(label.getIncomingValue(i)); 585 TraceInterval toInterval = addVariableDef(var, label, RegisterPriority.ShouldHaveRegister); 586 // set hint for phis 587 if (jump != null) { 588 Value out = jump.getOutgoingValue(i); 589 if (isVariable(out)) { 590 TraceInterval fromInterval = allocator.getOrCreateInterval(asVariable(out)); 591 toInterval.setLocationHint(fromInterval); 592 } 593 } 594 } 595 } 596 } 597 598 private void handleBlockEnd(AbstractBlockBase<?> block, int opId) { 599 // always alive until the end of the block 600 int aliveOpId = opId + 1; 601 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo(); 602 for (int varNum : livenessInfo.getBlockOut(block)) { 603 if (allocator.intervalFor(varNum) == null) { 604 addVariableUse(livenessInfo.getVariable(varNum), 0, aliveOpId, RegisterPriority.None); 605 } 606 } 607 } 608 609 private void numberInstruction(AbstractBlockBase<?> block, LIRInstruction op, int index) { 610 int opId = index << 1; 611 assert op.id() == -1 || op.id() == opId : "must match"; 612 op.setId(opId); 613 allocator.putOpIdMaps(index, op, block); 614 assert allocator.instructionForId(opId) == op : "must match"; 615 } 616 617 @SuppressWarnings("try") 618 private void addInterTraceHints() { 619 try (Scope s = Debug.scope("InterTraceHints", allocator)) { 620 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo(); 621 // set hints for phi/incoming intervals 622 for (AbstractBlockBase<?> block : sortedBlocks()) { 623 LabelOp label = (LabelOp) getLIR().getLIRforBlock(block).get(0); 624 for (AbstractBlockBase<?> pred : block.getPredecessors()) { 625 addInterTraceHints(livenessInfo, pred, block, label); 626 } 627 } 628 } catch (Throwable e) { 629 throw Debug.handle(e); 630 } 631 } 632 633 private void addInterTraceHints(GlobalLivenessInfo livenessInfo, AbstractBlockBase<?> from, AbstractBlockBase<?> to, LabelOp label) { 634 if (isAllocated(to, from)) { 635 int[] liveVars = livenessInfo.getBlockIn(to); 636 Value[] outLocation = livenessInfo.getOutLocation(from); 637 638 for (int i = 0; i < liveVars.length; i++) { 639 int varNum = liveVars[i]; 640 TraceInterval toInterval = allocator.intervalFor(varNum); 641 if (toInterval != null && !toInterval.hasHint()) { 642 Value fromValue = outLocation[i]; 643 if (!LIRValueUtil.isConstantValue(fromValue)) { 644 addInterTraceHint(label, varNum, fromValue); 645 } 646 } 647 } 648 } 649 } 650 651 private void addInterTraceHint(LabelOp label, int varNum, Value fromValue) { 652 assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue; 653 TraceInterval to = allocator.intervalFor(varNum); 654 if (to == null) { 655 // variable not live -> do nothing 656 return; 657 } 658 if (isVariableOrRegister(fromValue)) { 659 IntervalHint from = getIntervalHint((AllocatableValue) fromValue); 660 setHint(label, to, from); 661 } else if (isStackSlotValue(fromValue)) { 662 setSpillSlot(label, to, (AllocatableValue) fromValue); 663 } else if (TraceRAshareSpillInformation.getValue(allocator.getLIR().getOptions()) && isShadowedRegisterValue(fromValue)) { 664 ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue); 665 IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister()); 666 setHint(label, to, from); 667 setSpillSlot(label, to, shadowedRegisterValue.getStackSlot()); 668 } else { 669 throw GraalError.shouldNotReachHere(); 670 } 671 } 672 673 private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) { 674 IntervalHint currentHint = to.locationHint(false); 675 if (currentHint == null) { 676 /* 677 * Update hint if there was none or if the hint interval starts after the hinted 678 * interval. 679 */ 680 to.setLocationHint(from); 681 if (Debug.isLogEnabled()) { 682 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to); 683 } 684 } 685 } 686 687 private static void setSpillSlot(LIRInstruction op, TraceInterval interval, AllocatableValue spillSlot) { 688 if (interval.spillSlot() == null) { 689 interval.setSpillSlot(spillSlot); 690 interval.setSpillState(SpillState.StartInMemory); 691 if (Debug.isLogEnabled()) { 692 Debug.log("operation at opId %d: added spill slot %s to interval %s", op.id(), spillSlot, interval); 693 } 694 } else if (Debug.isLogEnabled()) { 695 Debug.log("operation at opId %d: has already a slot assigned %s", op.id(), interval.spillSlot()); 696 } 697 } 698 699 private IntervalHint getIntervalHint(AllocatableValue from) { 700 if (isRegister(from)) { 701 return allocator.getOrCreateFixedInterval(asRegisterValue(from)); 702 } 703 return allocator.getOrCreateInterval(from); 704 } 705 706 } 707 708 /** 709 * Returns a value for a interval definition, which can be used for re-materialization. 710 * 711 * @param op An instruction which defines a value 712 * @param operand The destination operand of the instruction 713 * @param interval The interval for this defined value. 714 * @return Returns the value which is moved to the instruction and which can be reused at all 715 * reload-locations in case the interval of this instruction is spilled. Currently this 716 * can only be a {@link JavaConstant}. 717 */ 718 private static JavaConstant getMaterializedValue(LIRInstruction op, Value operand, TraceInterval interval, boolean neverSpillConstants, MoveFactory spillMoveFactory) { 719 if (op instanceof LoadConstantOp) { 720 LoadConstantOp move = (LoadConstantOp) op; 721 if (move.getConstant() instanceof JavaConstant) { 722 if (!neverSpillConstants) { 723 if (!spillMoveFactory.allowConstantToStackMove(move.getConstant())) { 724 return null; 725 } 726 /* 727 * Check if the interval has any uses which would accept an stack location 728 * (priority == ShouldHaveRegister). Rematerialization of such intervals can 729 * result in a degradation, because rematerialization always inserts a constant 730 * load, even if the value is not needed in a register. 731 */ 732 int numUsePos = interval.numUsePos(); 733 for (int useIdx = 0; useIdx < numUsePos; useIdx++) { 734 TraceInterval.RegisterPriority priority = interval.getUsePosRegisterPriority(useIdx); 735 if (priority == TraceInterval.RegisterPriority.ShouldHaveRegister) { 736 return null; 737 } 738 } 739 } 740 return (JavaConstant) move.getConstant(); 741 } 742 } 743 return null; 744 } 745 746 }