1 /* 2 * Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.lir.alloc.trace.lsra; 24 25 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable; 26 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; 27 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable; 28 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation; 29 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints; 30 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.asShadowedRegisterValue; 31 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.isShadowedRegisterValue; 32 import static org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.isVariableOrRegister; 33 import static jdk.vm.ci.code.ValueUtil.asRegisterValue; 34 import static jdk.vm.ci.code.ValueUtil.asStackSlot; 35 import static jdk.vm.ci.code.ValueUtil.isRegister; 36 import static jdk.vm.ci.code.ValueUtil.isStackSlot; 37 38 import java.util.EnumSet; 39 import java.util.List; 40 import java.util.ListIterator; 41 42 import org.graalvm.compiler.core.common.LIRKind; 43 import org.graalvm.compiler.core.common.alloc.Trace; 44 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult; 45 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; 46 import org.graalvm.compiler.debug.Debug; 47 import org.graalvm.compiler.debug.Debug.Scope; 48 import org.graalvm.compiler.debug.GraalError; 49 import org.graalvm.compiler.debug.Indent; 50 import org.graalvm.compiler.lir.InstructionValueConsumer; 51 import org.graalvm.compiler.lir.LIR; 52 import org.graalvm.compiler.lir.LIRInstruction; 53 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag; 54 import org.graalvm.compiler.lir.LIRInstruction.OperandMode; 55 import org.graalvm.compiler.lir.LIRValueUtil; 56 import org.graalvm.compiler.lir.StandardOp.BlockEndOp; 57 import org.graalvm.compiler.lir.StandardOp.LabelOp; 58 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp; 59 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp; 60 import org.graalvm.compiler.lir.ValueProcedure; 61 import org.graalvm.compiler.lir.Variable; 62 import org.graalvm.compiler.lir.alloc.trace.ShadowedRegisterValue; 63 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority; 64 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState; 65 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan; 66 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 67 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory; 68 import org.graalvm.compiler.lir.ssi.SSIUtil; 69 70 import jdk.vm.ci.code.Register; 71 import jdk.vm.ci.code.RegisterArray; 72 import jdk.vm.ci.code.RegisterValue; 73 import jdk.vm.ci.code.TargetDescription; 74 import jdk.vm.ci.meta.AllocatableValue; 75 import jdk.vm.ci.meta.JavaConstant; 76 import jdk.vm.ci.meta.Value; 77 78 public final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase { 79 80 @Override 81 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, TraceLinearScanAllocationContext context) { 82 TraceBuilderResult traceBuilderResult = context.resultTraces; 83 TraceLinearScan allocator = context.allocator; 84 new Analyser(allocator, traceBuilderResult).analyze(); 85 } 86 87 public static final class Analyser { 88 private static final int DUMP_DURING_ANALYSIS_LEVEL = 4; 89 private final TraceLinearScan allocator; 90 private final TraceBuilderResult traceBuilderResult; 91 private int numInstructions; 92 93 public Analyser(TraceLinearScan allocator, TraceBuilderResult traceBuilderResult) { 94 this.allocator = allocator; 95 this.traceBuilderResult = traceBuilderResult; 96 } 97 98 private AbstractBlockBase<?>[] sortedBlocks() { 99 return allocator.sortedBlocks(); 100 } 101 102 private LIR getLIR() { 103 return allocator.getLIR(); 104 } 105 106 private RegisterArray getCallerSavedRegisters() { 107 return allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters(); 108 } 109 110 public void analyze() { 111 countInstructions(); 112 buildIntervals(); 113 } 114 115 private boolean sameTrace(AbstractBlockBase<?> a, AbstractBlockBase<?> b) { 116 return traceBuilderResult.getTraceForBlock(b) == traceBuilderResult.getTraceForBlock(a); 117 } 118 119 private boolean isAllocatedOrCurrent(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) { 120 return traceBuilderResult.getTraceForBlock(other).getId() <= traceBuilderResult.getTraceForBlock(currentBlock).getId(); 121 } 122 123 /** 124 * Count instructions in all blocks. The numbering follows the 125 * {@linkplain TraceLinearScan#sortedBlocks() register allocation order}. 126 */ 127 private void countInstructions() { 128 129 allocator.initIntervals(); 130 131 int numberInstructions = 0; 132 for (AbstractBlockBase<?> block : sortedBlocks()) { 133 numberInstructions += getLIR().getLIRforBlock(block).size(); 134 } 135 numInstructions = numberInstructions; 136 137 // initialize with correct length 138 allocator.initOpIdMaps(numberInstructions); 139 } 140 141 private final InstructionValueConsumer outputConsumer = new InstructionValueConsumer() { 142 @Override 143 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 144 if (isVariableOrRegister(operand)) { 145 addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op)); 146 addRegisterHint(op, operand, mode, flags, true); 147 } 148 } 149 }; 150 151 private final InstructionValueConsumer tempConsumer = new InstructionValueConsumer() { 152 @Override 153 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 154 if (isVariableOrRegister(operand)) { 155 addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister); 156 addRegisterHint(op, operand, mode, flags, false); 157 } 158 } 159 }; 160 private final InstructionValueConsumer aliveConsumer = new InstructionValueConsumer() { 161 @Override 162 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 163 if (isVariableOrRegister(operand)) { 164 RegisterPriority p = registerPriorityOfInputOperand(flags); 165 int opId = op.id(); 166 int blockFrom = 0; 167 addUse((AllocatableValue) operand, blockFrom, opId + 1, p); 168 addRegisterHint(op, operand, mode, flags, false); 169 } 170 } 171 }; 172 173 private final InstructionValueConsumer inputConsumer = new InstructionValueConsumer() { 174 @Override 175 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 176 if (isVariableOrRegister(operand)) { 177 int opId = op.id(); 178 RegisterPriority p = registerPriorityOfInputOperand(flags); 179 int blockFrom = 0; 180 addUse((AllocatableValue) operand, blockFrom, opId, p); 181 addRegisterHint(op, operand, mode, flags, false); 182 } 183 } 184 185 }; 186 187 private final InstructionValueConsumer stateProc = new InstructionValueConsumer() { 188 @Override 189 public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) { 190 if (isVariableOrRegister(operand)) { 191 int opId = op.id(); 192 int blockFrom = 0; 193 addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None); 194 } 195 } 196 }; 197 198 private void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority) { 199 if (!allocator.isProcessed(operand)) { 200 return; 201 } 202 if (isRegister(operand)) { 203 addFixedUse(asRegisterValue(operand), from, to); 204 } else { 205 assert isVariable(operand) : operand; 206 addVariableUse(asVariable(operand), from, to, registerPriority); 207 } 208 } 209 210 private void addFixedUse(RegisterValue reg, int from, int to) { 211 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 212 interval.addRange(from, to); 213 if (Debug.isLogEnabled()) { 214 Debug.log("add fixed use: %s, at %d", interval, to); 215 } 216 } 217 218 private void addVariableUse(Variable operand, int from, int to, RegisterPriority registerPriority) { 219 TraceInterval interval = allocator.getOrCreateInterval(operand); 220 interval.addRange(from, to); 221 222 // Register use position at even instruction id. 223 interval.addUsePos(to & ~1, registerPriority); 224 225 if (Debug.isLogEnabled()) { 226 Debug.log("add use: %s, at %d (%s)", interval, to, registerPriority.name()); 227 } 228 } 229 230 private void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority) { 231 if (!allocator.isProcessed(operand)) { 232 return; 233 } 234 if (isRegister(operand)) { 235 addFixedDef(asRegisterValue(operand), op); 236 } else { 237 assert isVariable(operand) : operand; 238 addVariableDef(asVariable(operand), op, registerPriority); 239 } 240 } 241 242 private void addFixedDef(RegisterValue reg, LIRInstruction op) { 243 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 244 int defPos = op.id(); 245 if (interval.from() <= defPos) { 246 /* 247 * Update the starting point (when a range is first created for a use, its start is 248 * the beginning of the current block until a def is encountered). 249 */ 250 interval.setFrom(defPos); 251 252 } else { 253 /* 254 * Dead value - make vacuous interval also add register priority for dead intervals 255 */ 256 interval.addRange(defPos, defPos + 1); 257 if (Debug.isLogEnabled()) { 258 Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos); 259 } 260 } 261 if (Debug.isLogEnabled()) { 262 Debug.log("add fixed def: %s, at %d", interval, defPos); 263 } 264 } 265 266 private void addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority) { 267 int defPos = op.id(); 268 269 TraceInterval interval = allocator.getOrCreateInterval(operand); 270 271 if (interval.isEmpty()) { 272 /* 273 * Dead value - make vacuous interval also add register priority for dead intervals 274 */ 275 interval.addRange(defPos, defPos + 1); 276 if (Debug.isLogEnabled()) { 277 Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos); 278 } 279 } else { 280 /* 281 * Update the starting point (when a range is first created for a use, its start is 282 * the beginning of the current block until a def is encountered). 283 */ 284 interval.setFrom(defPos); 285 } 286 if (!(op instanceof LabelOp)) { 287 // no use positions for labels 288 interval.addUsePos(defPos, registerPriority); 289 } 290 291 changeSpillDefinitionPos(op, operand, interval, defPos); 292 if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) { 293 // detection of method-parameters and roundfp-results 294 interval.setSpillState(SpillState.StartInMemory); 295 } 296 interval.addMaterializationValue(getMaterializedValue(op, operand, interval, allocator.neverSpillConstants(), allocator.getSpillMoveFactory())); 297 298 if (Debug.isLogEnabled()) { 299 Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name()); 300 } 301 } 302 303 private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority) { 304 if (!allocator.isProcessed(operand)) { 305 return; 306 } 307 if (isRegister(operand)) { 308 addFixedTemp(asRegisterValue(operand), tempPos); 309 } else { 310 assert isVariable(operand) : operand; 311 addVariableTemp(asVariable(operand), tempPos, registerPriority); 312 } 313 } 314 315 private void addFixedTemp(RegisterValue reg, int tempPos) { 316 FixedInterval interval = allocator.getOrCreateFixedInterval(reg); 317 interval.addRange(tempPos, tempPos + 1); 318 if (Debug.isLogEnabled()) { 319 Debug.log("add fixed temp: %s, at %d", interval, tempPos); 320 } 321 } 322 323 private void addVariableTemp(Variable operand, int tempPos, RegisterPriority registerPriority) { 324 TraceInterval interval = allocator.getOrCreateInterval(operand); 325 326 if (interval.isEmpty()) { 327 interval.addRange(tempPos, tempPos + 1); 328 } else if (interval.from() > tempPos) { 329 interval.setFrom(tempPos); 330 } 331 332 interval.addUsePos(tempPos, registerPriority); 333 interval.addMaterializationValue(null); 334 335 if (Debug.isLogEnabled()) { 336 Debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name()); 337 } 338 } 339 340 /** 341 * Eliminates moves from register to stack if the stack slot is known to be correct. 342 * 343 * @param op 344 * @param operand 345 */ 346 private void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, TraceInterval interval, int defPos) { 347 assert interval.isSplitParent() : "can only be called for split parents"; 348 349 switch (interval.spillState()) { 350 case NoDefinitionFound: 351 // assert interval.spillDefinitionPos() == -1 : "must no be set before"; 352 interval.setSpillDefinitionPos(defPos); 353 if (!(op instanceof LabelOp)) { 354 // Do not update state for labels. This will be done afterwards. 355 interval.setSpillState(SpillState.NoSpillStore); 356 } 357 break; 358 359 case NoSpillStore: 360 assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created"; 361 if (defPos < interval.spillDefinitionPos() - 2) { 362 /* 363 * Second definition found, so no spill optimization possible for this 364 * interval. 365 */ 366 interval.setSpillState(SpillState.NoOptimization); 367 } else { 368 // two consecutive definitions (because of two-operand LIR form) 369 assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal"; 370 } 371 break; 372 373 case NoOptimization: 374 // nothing to do 375 break; 376 377 default: 378 throw GraalError.shouldNotReachHere("other states not allowed at this time"); 379 } 380 } 381 382 private void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) { 383 if (flags.contains(OperandFlag.HINT) && isVariableOrRegister(targetValue)) { 384 385 ValueProcedure registerHintProc = new ValueProcedure() { 386 @Override 387 public Value doValue(Value registerHint, OperandMode valueMode, EnumSet<OperandFlag> valueFlags) { 388 if (isVariableOrRegister(registerHint)) { 389 /* 390 * TODO (je): clean up 391 */ 392 final AllocatableValue fromValue; 393 final AllocatableValue toValue; 394 /* hints always point from def to use */ 395 if (hintAtDef) { 396 fromValue = (AllocatableValue) registerHint; 397 toValue = (AllocatableValue) targetValue; 398 } else { 399 fromValue = (AllocatableValue) targetValue; 400 toValue = (AllocatableValue) registerHint; 401 } 402 Debug.log("addRegisterHint %s to %s", fromValue, toValue); 403 final TraceInterval to; 404 final IntervalHint from; 405 if (isRegister(toValue)) { 406 if (isRegister(fromValue)) { 407 // fixed to fixed move 408 return null; 409 } 410 from = getIntervalHint(toValue); 411 to = allocator.getOrCreateInterval(fromValue); 412 } else { 413 to = allocator.getOrCreateInterval(toValue); 414 from = getIntervalHint(fromValue); 415 } 416 417 to.setLocationHint(from); 418 if (Debug.isLogEnabled()) { 419 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to); 420 } 421 422 return registerHint; 423 } 424 return null; 425 } 426 }; 427 op.forEachRegisterHint(targetValue, mode, registerHintProc); 428 } 429 } 430 431 private static boolean optimizeMethodArgument(Value value) { 432 /* 433 * Object method arguments that are passed on the stack are currently not optimized 434 * because this requires that the runtime visits method arguments during stack walking. 435 */ 436 return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value); 437 } 438 439 /** 440 * Determines the register priority for an instruction's output/result operand. 441 */ 442 private static RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) { 443 if (op instanceof LabelOp) { 444 // skip method header 445 return RegisterPriority.None; 446 } 447 if (op instanceof ValueMoveOp) { 448 ValueMoveOp move = (ValueMoveOp) op; 449 if (optimizeMethodArgument(move.getInput())) { 450 return RegisterPriority.None; 451 } 452 } 453 454 // all other operands require a register 455 return RegisterPriority.MustHaveRegister; 456 } 457 458 /** 459 * Determines the priority which with an instruction's input operand will be allocated a 460 * register. 461 */ 462 private static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) { 463 if (flags.contains(OperandFlag.OUTGOING)) { 464 return RegisterPriority.None; 465 } 466 if (flags.contains(OperandFlag.STACK)) { 467 return RegisterPriority.ShouldHaveRegister; 468 } 469 // all other operands require a register 470 return RegisterPriority.MustHaveRegister; 471 } 472 473 @SuppressWarnings("try") 474 private void buildIntervals() { 475 476 try (Indent indent = Debug.logAndIndent("build intervals")) { 477 478 // create a list with all caller-save registers (cpu, fpu, xmm) 479 RegisterArray callerSaveRegs = getCallerSavedRegisters(); 480 int instructionIndex = numInstructions; 481 482 // iterate all blocks in reverse order 483 AbstractBlockBase<?>[] blocks = sortedBlocks(); 484 for (int i = blocks.length - 1; i >= 0; i--) { 485 final AbstractBlockBase<?> block = blocks[i]; 486 487 try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) { 488 489 /* 490 * Iterate all instructions of the block in reverse order. definitions of 491 * intervals are processed before uses. 492 */ 493 List<LIRInstruction> instructions = getLIR().getLIRforBlock(block); 494 ListIterator<LIRInstruction> instIt = instructions.listIterator(instructions.size()); 495 while (instIt.hasPrevious()) { 496 final LIRInstruction op = instIt.previous(); 497 // number instruction 498 instructionIndex--; 499 final int opId = instructionIndex << 1; 500 numberInstruction(block, op, instructionIndex); 501 502 try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) { 503 504 /* 505 * Add a temp range for each register if operation destroys 506 * caller-save registers. 507 */ 508 if (op.destroysCallerSavedRegisters()) { 509 for (Register r : callerSaveRegs) { 510 if (allocator.attributes(r).isAllocatable()) { 511 addTemp(r.asValue(), opId, RegisterPriority.None); 512 } 513 } 514 if (Debug.isLogEnabled()) { 515 Debug.log("operation destroys all caller-save registers"); 516 } 517 } 518 519 op.visitEachOutput(outputConsumer); 520 op.visitEachTemp(tempConsumer); 521 op.visitEachAlive(aliveConsumer); 522 op.visitEachInput(inputConsumer); 523 524 /* 525 * Add uses of live locals from interpreter's point of view for 526 * proper debug information generation. Treat these operands as temp 527 * values (if the live range is extended to a call site, the value 528 * would be in a register at the call otherwise). 529 */ 530 op.visitEachState(stateProc); 531 } 532 533 } // end of instruction iteration 534 } 535 if (Debug.isDumpEnabled(DUMP_DURING_ANALYSIS_LEVEL)) { 536 allocator.printIntervals("After Block " + block); 537 } 538 } // end of block iteration 539 assert instructionIndex == 0 : "not at start?" + instructionIndex; 540 541 // fix spill state for phi/sigma intervals 542 for (TraceInterval interval : allocator.intervals()) { 543 if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) { 544 // there was a definition in a phi/sigma 545 interval.setSpillState(SpillState.NoSpillStore); 546 } 547 } 548 if (TraceRAuseInterTraceHints.getValue()) { 549 addInterTraceHints(); 550 } 551 for (FixedInterval interval1 : allocator.fixedIntervals()) { 552 if (interval1 != null) { 553 /* We use [-1, 0] to avoid intersection with incoming values. */ 554 interval1.addRange(-1, 0); 555 } 556 } 557 } 558 } 559 560 private void numberInstruction(AbstractBlockBase<?> block, LIRInstruction op, int index) { 561 int opId = index << 1; 562 assert op.id() == -1 || op.id() == opId : "must match"; 563 op.setId(opId); 564 allocator.putOpIdMaps(index, op, block); 565 assert allocator.instructionForId(opId) == op : "must match"; 566 } 567 568 @SuppressWarnings("try") 569 private void addInterTraceHints() { 570 try (Scope s = Debug.scope("InterTraceHints", allocator)) { 571 // set hints for phi/sigma intervals 572 for (AbstractBlockBase<?> block : sortedBlocks()) { 573 LabelOp label = SSIUtil.incoming(getLIR(), block); 574 for (AbstractBlockBase<?> pred : block.getPredecessors()) { 575 if (isAllocatedOrCurrent(block, pred)) { 576 BlockEndOp outgoing = SSIUtil.outgoing(getLIR(), pred); 577 // do not look at phi variables as they are not same value! 578 for (int i = outgoing.getPhiSize(); i < outgoing.getOutgoingSize(); i++) { 579 Value toValue = label.getIncomingValue(i); 580 assert !isShadowedRegisterValue(toValue) : "Shadowed Registers are not allowed here: " + toValue; 581 if (isVariable(toValue)) { 582 Value fromValue = outgoing.getOutgoingValue(i); 583 assert sameTrace(block, pred) || !isVariable(fromValue) : "Unallocated variable: " + fromValue; 584 if (!LIRValueUtil.isConstantValue(fromValue)) { 585 addInterTraceHint(label, (AllocatableValue) toValue, fromValue); 586 } 587 } 588 } 589 } 590 } 591 } 592 } catch (Throwable e) { 593 throw Debug.handle(e); 594 } 595 } 596 597 private void addInterTraceHint(LabelOp label, AllocatableValue toValue, Value fromValue) { 598 assert isVariable(toValue) : "Wrong toValue: " + toValue; 599 assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue; 600 TraceInterval to = allocator.getOrCreateInterval(toValue); 601 if (isVariableOrRegister(fromValue)) { 602 IntervalHint from = getIntervalHint((AllocatableValue) fromValue); 603 setHint(label, to, from); 604 } else if (isStackSlotValue(fromValue)) { 605 setSpillSlot(label, to, (AllocatableValue) fromValue); 606 } else if (TraceRAshareSpillInformation.getValue() && isShadowedRegisterValue(fromValue)) { 607 ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue); 608 IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister()); 609 setHint(label, to, from); 610 setSpillSlot(label, to, shadowedRegisterValue.getStackSlot()); 611 } else { 612 throw GraalError.shouldNotReachHere(); 613 } 614 } 615 616 private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) { 617 IntervalHint currentHint = to.locationHint(false); 618 if (currentHint == null) { 619 /* 620 * Update hint if there was none or if the hint interval starts after the hinted 621 * interval. 622 */ 623 to.setLocationHint(from); 624 if (Debug.isLogEnabled()) { 625 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to); 626 } 627 } 628 } 629 630 private static void setSpillSlot(LIRInstruction op, TraceInterval interval, AllocatableValue spillSlot) { 631 if (interval.spillSlot() == null) { 632 interval.setSpillSlot(spillSlot); 633 interval.setSpillState(SpillState.StartInMemory); 634 if (Debug.isLogEnabled()) { 635 Debug.log("operation at opId %d: added spill slot %s to interval %s", op.id(), spillSlot, interval); 636 } 637 } else if (Debug.isLogEnabled()) { 638 Debug.log("operation at opId %d: has already a slot assigned %s", op.id(), interval.spillSlot()); 639 } 640 } 641 642 private IntervalHint getIntervalHint(AllocatableValue from) { 643 if (isRegister(from)) { 644 return allocator.getOrCreateFixedInterval(asRegisterValue(from)); 645 } 646 return allocator.getOrCreateInterval(from); 647 } 648 649 } 650 651 /** 652 * Returns a value for a interval definition, which can be used for re-materialization. 653 * 654 * @param op An instruction which defines a value 655 * @param operand The destination operand of the instruction 656 * @param interval The interval for this defined value. 657 * @return Returns the value which is moved to the instruction and which can be reused at all 658 * reload-locations in case the interval of this instruction is spilled. Currently this 659 * can only be a {@link JavaConstant}. 660 */ 661 private static JavaConstant getMaterializedValue(LIRInstruction op, Value operand, TraceInterval interval, boolean neverSpillConstants, MoveFactory spillMoveFactory) { 662 if (op instanceof LoadConstantOp) { 663 LoadConstantOp move = (LoadConstantOp) op; 664 if (move.getConstant() instanceof JavaConstant) { 665 if (!neverSpillConstants) { 666 if (!spillMoveFactory.allowConstantToStackMove(move.getConstant())) { 667 return null; 668 } 669 /* 670 * Check if the interval has any uses which would accept an stack location 671 * (priority == ShouldHaveRegister). Rematerialization of such intervals can 672 * result in a degradation, because rematerialization always inserts a constant 673 * load, even if the value is not needed in a register. 674 */ 675 int numUsePos = interval.numUsePos(); 676 for (int useIdx = 0; useIdx < numUsePos; useIdx++) { 677 TraceInterval.RegisterPriority priority = interval.getUsePosRegisterPriority(useIdx); 678 if (priority == TraceInterval.RegisterPriority.ShouldHaveRegister) { 679 return null; 680 } 681 } 682 } 683 return (JavaConstant) move.getConstant(); 684 } 685 } 686 return null; 687 } 688 689 }