26 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
27 import static jdk.vm.ci.code.ValueUtil.isRegister;
28 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
29 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
30 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
31 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
32 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
33 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints;
34 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.asShadowedRegisterValue;
35 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.isShadowedRegisterValue;
36 import static org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.isVariableOrRegister;
37
38 import java.util.ArrayList;
39 import java.util.EnumSet;
40
41 import org.graalvm.compiler.core.common.LIRKind;
42 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig;
43 import org.graalvm.compiler.core.common.alloc.Trace;
44 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult;
45 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
46 import org.graalvm.compiler.debug.Debug;
47 import org.graalvm.compiler.debug.Debug.Scope;
48 import org.graalvm.compiler.debug.GraalError;
49 import org.graalvm.compiler.debug.Indent;
50 import org.graalvm.compiler.lir.InstructionValueConsumer;
51 import org.graalvm.compiler.lir.LIR;
52 import org.graalvm.compiler.lir.LIRInstruction;
53 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag;
54 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
55 import org.graalvm.compiler.lir.LIRValueUtil;
56 import org.graalvm.compiler.lir.StandardOp.JumpOp;
57 import org.graalvm.compiler.lir.StandardOp.LabelOp;
58 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
59 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
60 import org.graalvm.compiler.lir.ValueProcedure;
61 import org.graalvm.compiler.lir.Variable;
62 import org.graalvm.compiler.lir.alloc.trace.GlobalLivenessInfo;
63 import org.graalvm.compiler.lir.alloc.trace.ShadowedRegisterValue;
64 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority;
65 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState;
66 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan;
67 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
69 import org.graalvm.compiler.lir.ssa.SSAUtil;
70
71 import jdk.vm.ci.code.Register;
72 import jdk.vm.ci.code.RegisterArray;
73 import jdk.vm.ci.code.RegisterValue;
74 import jdk.vm.ci.code.TargetDescription;
75 import jdk.vm.ci.meta.AllocatableValue;
76 import jdk.vm.ci.meta.JavaConstant;
77 import jdk.vm.ci.meta.Value;
78
79 public final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase {
80
81 @Override
82 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, MoveFactory spillMoveFactory, RegisterAllocationConfig registerAllocationConfig,
83 TraceBuilderResult traceBuilderResult, TraceLinearScan allocator) {
84 new Analyser(allocator, traceBuilderResult).analyze();
85 }
86
87 public static final class Analyser {
88 private final TraceLinearScan allocator;
89 private final TraceBuilderResult traceBuilderResult;
90 private int numInstructions;
91
92 public Analyser(TraceLinearScan allocator, TraceBuilderResult traceBuilderResult) {
93 this.allocator = allocator;
94 this.traceBuilderResult = traceBuilderResult;
95 }
96
97 private AbstractBlockBase<?>[] sortedBlocks() {
98 return allocator.sortedBlocks();
99 }
100
101 private LIR getLIR() {
102 return allocator.getLIR();
103 }
104
105 private RegisterArray getCallerSavedRegisters() {
106 return allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
107 }
108
109 public void analyze() {
110 countInstructions();
111 buildIntervals();
112 }
113
188 addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None);
189 }
190 }
191 };
192
193 private void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority) {
194 if (isRegister(operand)) {
195 RegisterValue reg = asRegisterValue(operand);
196 if (allocator.isAllocatable(reg)) {
197 addFixedUse(reg, from, to);
198 }
199 } else {
200 assert isVariable(operand) : operand;
201 addVariableUse(asVariable(operand), from, to, registerPriority);
202 }
203 }
204
205 private void addFixedUse(RegisterValue reg, int from, int to) {
206 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
207 interval.addRange(from, to);
208 if (Debug.isLogEnabled()) {
209 Debug.log("add fixed use: %s, at %d", interval, to);
210 }
211 }
212
213 private void addVariableUse(Variable operand, int from, int to, RegisterPriority registerPriority) {
214 TraceInterval interval = allocator.getOrCreateInterval(operand);
215 interval.addRange(from, to);
216
217 // Register use position at even instruction id.
218 interval.addUsePos(to & ~1, registerPriority, allocator.getOptions());
219
220 if (Debug.isLogEnabled()) {
221 Debug.log("add use: %s, at %d (%s)", interval, to, registerPriority.name());
222 }
223 }
224
225 private void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority) {
226 if (isRegister(operand)) {
227 RegisterValue reg = asRegisterValue(operand);
228 if (allocator.isAllocatable(reg)) {
229 addFixedDef(reg, op);
230 }
231 } else {
232 assert isVariable(operand) : operand;
233 addVariableDef(asVariable(operand), op, registerPriority);
234 }
235 }
236
237 private void addFixedDef(RegisterValue reg, LIRInstruction op) {
238 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
239 int defPos = op.id();
240 if (interval.from() <= defPos) {
241 /*
242 * Update the starting point (when a range is first created for a use, its start is
243 * the beginning of the current block until a def is encountered).
244 */
245 interval.setFrom(defPos);
246
247 } else {
248 /*
249 * Dead value - make vacuous interval also add register priority for dead intervals
250 */
251 interval.addRange(defPos, defPos + 1);
252 if (Debug.isLogEnabled()) {
253 Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos);
254 }
255 }
256 if (Debug.isLogEnabled()) {
257 Debug.log("add fixed def: %s, at %d", interval, defPos);
258 }
259 }
260
261 private TraceInterval addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority) {
262 int defPos = op.id();
263
264 TraceInterval interval = allocator.getOrCreateInterval(operand);
265
266 if (interval.isEmpty()) {
267 /*
268 * Dead value - make vacuous interval also add register priority for dead intervals
269 */
270 interval.addRange(defPos, defPos + 1);
271 if (Debug.isLogEnabled()) {
272 Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
273 }
274 } else {
275 /*
276 * Update the starting point (when a range is first created for a use, its start is
277 * the beginning of the current block until a def is encountered).
278 */
279 interval.setFrom(defPos);
280 }
281 if (!(op instanceof LabelOp)) {
282 // no use positions for labels
283 interval.addUsePos(defPos, registerPriority, allocator.getOptions());
284 }
285
286 changeSpillDefinitionPos(op, operand, interval, defPos);
287 if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
288 // detection of method-parameters and roundfp-results
289 interval.setSpillState(SpillState.StartInMemory);
290 }
291 interval.addMaterializationValue(getMaterializedValue(op, operand, interval, allocator.neverSpillConstants(), allocator.getSpillMoveFactory()));
292
293 if (Debug.isLogEnabled()) {
294 Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
295 }
296 return interval;
297 }
298
299 private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority) {
300 if (isRegister(operand)) {
301 RegisterValue reg = asRegisterValue(operand);
302 if (allocator.isAllocatable(reg)) {
303 addFixedTemp(reg, tempPos);
304 }
305 } else {
306 assert isVariable(operand) : operand;
307 addVariableTemp(asVariable(operand), tempPos, registerPriority);
308 }
309 }
310
311 private void addFixedTemp(RegisterValue reg, int tempPos) {
312 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
313 interval.addRange(tempPos, tempPos + 1);
314 if (Debug.isLogEnabled()) {
315 Debug.log("add fixed temp: %s, at %d", interval, tempPos);
316 }
317 }
318
319 private void addVariableTemp(Variable operand, int tempPos, RegisterPriority registerPriority) {
320 TraceInterval interval = allocator.getOrCreateInterval(operand);
321
322 if (interval.isEmpty()) {
323 interval.addRange(tempPos, tempPos + 1);
324 } else if (interval.from() > tempPos) {
325 interval.setFrom(tempPos);
326 }
327
328 interval.addUsePos(tempPos, registerPriority, allocator.getOptions());
329 interval.addMaterializationValue(null);
330
331 if (Debug.isLogEnabled()) {
332 Debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name());
333 }
334 }
335
336 /**
337 * Eliminates moves from register to stack if the stack slot is known to be correct.
338 *
339 * @param op
340 * @param operand
341 */
342 private void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, TraceInterval interval, int defPos) {
343 assert interval.isSplitParent() : "can only be called for split parents";
344
345 switch (interval.spillState()) {
346 case NoDefinitionFound:
347 // assert interval.spillDefinitionPos() == -1 : "must no be set before";
348 interval.setSpillDefinitionPos(defPos);
349 if (!(op instanceof LabelOp)) {
350 // Do not update state for labels. This will be done afterwards.
351 interval.setSpillState(SpillState.NoSpillStore);
352 }
378 private void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) {
379 if (flags.contains(OperandFlag.HINT) && isVariableOrRegister(targetValue)) {
380
381 ValueProcedure registerHintProc = new ValueProcedure() {
382 @Override
383 public Value doValue(Value registerHint, OperandMode valueMode, EnumSet<OperandFlag> valueFlags) {
384 if (isVariableOrRegister(registerHint)) {
385 /*
386 * TODO (je): clean up
387 */
388 final AllocatableValue fromValue;
389 final AllocatableValue toValue;
390 /* hints always point from def to use */
391 if (hintAtDef) {
392 fromValue = (AllocatableValue) registerHint;
393 toValue = (AllocatableValue) targetValue;
394 } else {
395 fromValue = (AllocatableValue) targetValue;
396 toValue = (AllocatableValue) registerHint;
397 }
398 Debug.log("addRegisterHint %s to %s", fromValue, toValue);
399 final TraceInterval to;
400 final IntervalHint from;
401 if (isRegister(toValue)) {
402 if (isRegister(fromValue)) {
403 // fixed to fixed move
404 return null;
405 }
406 from = getIntervalHint(toValue);
407 to = allocator.getOrCreateInterval(asVariable(fromValue));
408 } else {
409 to = allocator.getOrCreateInterval(asVariable(toValue));
410 from = getIntervalHint(fromValue);
411 }
412
413 to.setLocationHint(from);
414 if (Debug.isLogEnabled()) {
415 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
416 }
417
418 return registerHint;
419 }
420 return null;
421 }
422 };
423 op.forEachRegisterHint(targetValue, mode, registerHintProc);
424 }
425 }
426
427 private static boolean optimizeMethodArgument(Value value) {
428 /*
429 * Object method arguments that are passed on the stack are currently not optimized
430 * because this requires that the runtime visits method arguments during stack walking.
431 */
432 return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value);
433 }
434
435 /**
452 }
453
454 /**
455 * Determines the priority which with an instruction's input operand will be allocated a
456 * register.
457 */
458 private static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) {
459 if (flags.contains(OperandFlag.OUTGOING)) {
460 return RegisterPriority.None;
461 }
462 if (flags.contains(OperandFlag.STACK)) {
463 return RegisterPriority.ShouldHaveRegister;
464 }
465 // all other operands require a register
466 return RegisterPriority.MustHaveRegister;
467 }
468
469 @SuppressWarnings("try")
470 private void buildIntervals() {
471
472 try (Indent indent = Debug.logAndIndent("build intervals")) {
473
474 // create a list with all caller-save registers (cpu, fpu, xmm)
475 RegisterArray callerSaveRegs = getCallerSavedRegisters();
476 int instructionIndex = numInstructions;
477
478 // iterate all blocks in reverse order
479 AbstractBlockBase<?>[] blocks = sortedBlocks();
480 for (int blockId = blocks.length - 1; blockId >= 0; blockId--) {
481 final AbstractBlockBase<?> block = blocks[blockId];
482
483 try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) {
484 handleBlockEnd(block, (instructionIndex - 1) << 1);
485
486 /*
487 * Iterate all instructions of the block in reverse order. definitions of
488 * intervals are processed before uses.
489 */
490 ArrayList<LIRInstruction> instructions = getLIR().getLIRforBlock(block);
491 for (int instIdx = instructions.size() - 1; instIdx >= 1; instIdx--) {
492 final LIRInstruction op = instructions.get(instIdx);
493 // number instruction
494 instructionIndex--;
495 numberInstruction(block, op, instructionIndex);
496 final int opId = op.id();
497
498 try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) {
499
500 /*
501 * Add a temp range for each register if operation destroys
502 * caller-save registers.
503 */
504 if (op.destroysCallerSavedRegisters()) {
505 for (Register r : callerSaveRegs) {
506 if (allocator.attributes(r).isAllocatable()) {
507 addTemp(r.asValue(), opId, RegisterPriority.None);
508 }
509 }
510 if (Debug.isLogEnabled()) {
511 Debug.log("operation destroys all caller-save registers");
512 }
513 }
514
515 op.visitEachOutput(outputConsumer);
516 op.visitEachTemp(tempConsumer);
517 op.visitEachAlive(aliveConsumer);
518 op.visitEachInput(inputConsumer);
519
520 /*
521 * Add uses of live locals from interpreter's point of view for
522 * proper debug information generation. Treat these operands as temp
523 * values (if the live range is extended to a call site, the value
524 * would be in a register at the call otherwise).
525 */
526 op.visitEachState(stateProc);
527 }
528
529 } // end of instruction iteration
530 // number label instruction
531 instructionIndex--;
532 numberInstruction(block, instructions.get(0), instructionIndex);
533 AbstractBlockBase<?> pred = blockId == 0 ? null : blocks[blockId - 1];
534 handleBlockBegin(block, pred);
535 }
536 if (Debug.isDumpEnabled(Debug.VERY_DETAILED_LEVEL)) {
537 allocator.printIntervals("After Block " + block);
538 }
539 } // end of block iteration
540 assert instructionIndex == 0 : "not at start?" + instructionIndex;
541 handleTraceBegin(blocks[0]);
542
543 // fix spill state for phi/incoming intervals
544 for (TraceInterval interval : allocator.intervals()) {
545 if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) {
546 // there was a definition in a phi/incoming
547 interval.setSpillState(SpillState.NoSpillStore);
548 }
549 }
550 if (TraceRAuseInterTraceHints.getValue(allocator.getLIR().getOptions())) {
551 addInterTraceHints();
552 }
553 for (FixedInterval interval1 : allocator.fixedIntervals()) {
554 if (interval1 != null) {
555 /* We use [-1, 0] to avoid intersection with incoming values. */
556 interval1.addRange(-1, 0);
598 // always alive until the end of the block
599 int aliveOpId = opId + 1;
600 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo();
601 for (int varNum : livenessInfo.getBlockOut(block)) {
602 if (allocator.intervalFor(varNum) == null) {
603 addVariableUse(livenessInfo.getVariable(varNum), 0, aliveOpId, RegisterPriority.None);
604 }
605 }
606 }
607
608 private void numberInstruction(AbstractBlockBase<?> block, LIRInstruction op, int index) {
609 int opId = index << 1;
610 assert op.id() == -1 || op.id() == opId : "must match";
611 op.setId(opId);
612 allocator.putOpIdMaps(index, op, block);
613 assert allocator.instructionForId(opId) == op : "must match";
614 }
615
616 @SuppressWarnings("try")
617 private void addInterTraceHints() {
618 try (Scope s = Debug.scope("InterTraceHints", allocator)) {
619 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo();
620 // set hints for phi/incoming intervals
621 for (AbstractBlockBase<?> block : sortedBlocks()) {
622 LabelOp label = (LabelOp) getLIR().getLIRforBlock(block).get(0);
623 for (AbstractBlockBase<?> pred : block.getPredecessors()) {
624 addInterTraceHints(livenessInfo, pred, block, label);
625 }
626 }
627 } catch (Throwable e) {
628 throw Debug.handle(e);
629 }
630 }
631
632 private void addInterTraceHints(GlobalLivenessInfo livenessInfo, AbstractBlockBase<?> from, AbstractBlockBase<?> to, LabelOp label) {
633 if (isAllocated(to, from)) {
634 int[] liveVars = livenessInfo.getBlockIn(to);
635 Value[] outLocation = livenessInfo.getOutLocation(from);
636
637 for (int i = 0; i < liveVars.length; i++) {
638 int varNum = liveVars[i];
639 TraceInterval toInterval = allocator.intervalFor(varNum);
640 if (toInterval != null && !toInterval.hasHint()) {
641 Value fromValue = outLocation[i];
642 if (!LIRValueUtil.isConstantValue(fromValue)) {
643 addInterTraceHint(label, varNum, fromValue);
644 }
645 }
646 }
647 }
648 }
649
650 private void addInterTraceHint(LabelOp label, int varNum, Value fromValue) {
651 assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue;
652 TraceInterval to = allocator.intervalFor(varNum);
653 if (to == null) {
654 // variable not live -> do nothing
655 return;
656 }
657 if (isVariableOrRegister(fromValue)) {
658 IntervalHint from = getIntervalHint((AllocatableValue) fromValue);
659 setHint(label, to, from);
660 } else if (isStackSlotValue(fromValue)) {
661 setSpillSlot(label, to, (AllocatableValue) fromValue);
662 } else if (TraceRAshareSpillInformation.getValue(allocator.getLIR().getOptions()) && isShadowedRegisterValue(fromValue)) {
663 ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue);
664 IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister());
665 setHint(label, to, from);
666 setSpillSlot(label, to, shadowedRegisterValue.getStackSlot());
667 } else {
668 throw GraalError.shouldNotReachHere();
669 }
670 }
671
672 private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) {
673 IntervalHint currentHint = to.locationHint(false);
674 if (currentHint == null) {
675 /*
676 * Update hint if there was none or if the hint interval starts after the hinted
677 * interval.
678 */
679 to.setLocationHint(from);
680 if (Debug.isLogEnabled()) {
681 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
682 }
683 }
684 }
685
686 private static void setSpillSlot(LIRInstruction op, TraceInterval interval, AllocatableValue spillSlot) {
687 if (interval.spillSlot() == null) {
688 interval.setSpillSlot(spillSlot);
689 interval.setSpillState(SpillState.StartInMemory);
690 if (Debug.isLogEnabled()) {
691 Debug.log("operation at opId %d: added spill slot %s to interval %s", op.id(), spillSlot, interval);
692 }
693 } else if (Debug.isLogEnabled()) {
694 Debug.log("operation at opId %d: has already a slot assigned %s", op.id(), interval.spillSlot());
695 }
696 }
697
698 private IntervalHint getIntervalHint(AllocatableValue from) {
699 if (isRegister(from)) {
700 return allocator.getOrCreateFixedInterval(asRegisterValue(from));
701 }
702 return allocator.getOrCreateInterval(asVariable(from));
703 }
704
705 }
706
707 /**
708 * Returns a value for a interval definition, which can be used for re-materialization.
709 *
710 * @param op An instruction which defines a value
711 * @param operand The destination operand of the instruction
712 * @param interval The interval for this defined value.
713 * @return Returns the value which is moved to the instruction and which can be reused at all
714 * reload-locations in case the interval of this instruction is spilled. Currently this
|
26 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
27 import static jdk.vm.ci.code.ValueUtil.isRegister;
28 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
29 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
30 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
31 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
32 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
33 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints;
34 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.asShadowedRegisterValue;
35 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.isShadowedRegisterValue;
36 import static org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.isVariableOrRegister;
37
38 import java.util.ArrayList;
39 import java.util.EnumSet;
40
41 import org.graalvm.compiler.core.common.LIRKind;
42 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig;
43 import org.graalvm.compiler.core.common.alloc.Trace;
44 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult;
45 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
46 import org.graalvm.compiler.debug.DebugContext;
47 import org.graalvm.compiler.debug.GraalError;
48 import org.graalvm.compiler.debug.Indent;
49 import org.graalvm.compiler.lir.InstructionValueConsumer;
50 import org.graalvm.compiler.lir.LIR;
51 import org.graalvm.compiler.lir.LIRInstruction;
52 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag;
53 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
54 import org.graalvm.compiler.lir.LIRValueUtil;
55 import org.graalvm.compiler.lir.StandardOp.JumpOp;
56 import org.graalvm.compiler.lir.StandardOp.LabelOp;
57 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
58 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
59 import org.graalvm.compiler.lir.ValueProcedure;
60 import org.graalvm.compiler.lir.Variable;
61 import org.graalvm.compiler.lir.alloc.trace.GlobalLivenessInfo;
62 import org.graalvm.compiler.lir.alloc.trace.ShadowedRegisterValue;
63 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority;
64 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState;
65 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan;
66 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
68 import org.graalvm.compiler.lir.ssa.SSAUtil;
69
70 import jdk.vm.ci.code.Register;
71 import jdk.vm.ci.code.RegisterArray;
72 import jdk.vm.ci.code.RegisterValue;
73 import jdk.vm.ci.code.TargetDescription;
74 import jdk.vm.ci.meta.AllocatableValue;
75 import jdk.vm.ci.meta.JavaConstant;
76 import jdk.vm.ci.meta.Value;
77
78 public final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase {
79
80 @Override
81 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, MoveFactory spillMoveFactory, RegisterAllocationConfig registerAllocationConfig,
82 TraceBuilderResult traceBuilderResult, TraceLinearScan allocator) {
83 new Analyser(allocator, traceBuilderResult).analyze();
84 }
85
86 public static final class Analyser {
87 private final TraceLinearScan allocator;
88 private final DebugContext debug;
89 private final TraceBuilderResult traceBuilderResult;
90 private int numInstructions;
91
92 public Analyser(TraceLinearScan allocator, TraceBuilderResult traceBuilderResult) {
93 this.allocator = allocator;
94 this.debug = allocator.getDebug();
95 this.traceBuilderResult = traceBuilderResult;
96 }
97
98 private AbstractBlockBase<?>[] sortedBlocks() {
99 return allocator.sortedBlocks();
100 }
101
102 private LIR getLIR() {
103 return allocator.getLIR();
104 }
105
106 private RegisterArray getCallerSavedRegisters() {
107 return allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
108 }
109
110 public void analyze() {
111 countInstructions();
112 buildIntervals();
113 }
114
189 addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None);
190 }
191 }
192 };
193
194 private void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority) {
195 if (isRegister(operand)) {
196 RegisterValue reg = asRegisterValue(operand);
197 if (allocator.isAllocatable(reg)) {
198 addFixedUse(reg, from, to);
199 }
200 } else {
201 assert isVariable(operand) : operand;
202 addVariableUse(asVariable(operand), from, to, registerPriority);
203 }
204 }
205
206 private void addFixedUse(RegisterValue reg, int from, int to) {
207 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
208 interval.addRange(from, to);
209 if (debug.isLogEnabled()) {
210 debug.log("add fixed use: %s, at %d", interval, to);
211 }
212 }
213
214 private void addVariableUse(Variable operand, int from, int to, RegisterPriority registerPriority) {
215 TraceInterval interval = allocator.getOrCreateInterval(operand);
216 interval.addRange(from, to);
217
218 // Register use position at even instruction id.
219 interval.addUsePos(to & ~1, registerPriority, allocator.getOptions());
220
221 if (debug.isLogEnabled()) {
222 debug.log("add use: %s, at %d (%s)", interval, to, registerPriority.name());
223 }
224 }
225
226 private void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority) {
227 if (isRegister(operand)) {
228 RegisterValue reg = asRegisterValue(operand);
229 if (allocator.isAllocatable(reg)) {
230 addFixedDef(reg, op);
231 }
232 } else {
233 assert isVariable(operand) : operand;
234 addVariableDef(asVariable(operand), op, registerPriority);
235 }
236 }
237
238 private void addFixedDef(RegisterValue reg, LIRInstruction op) {
239 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
240 int defPos = op.id();
241 if (interval.from() <= defPos) {
242 /*
243 * Update the starting point (when a range is first created for a use, its start is
244 * the beginning of the current block until a def is encountered).
245 */
246 interval.setFrom(defPos);
247
248 } else {
249 /*
250 * Dead value - make vacuous interval also add register priority for dead intervals
251 */
252 interval.addRange(defPos, defPos + 1);
253 if (debug.isLogEnabled()) {
254 debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos);
255 }
256 }
257 if (debug.isLogEnabled()) {
258 debug.log("add fixed def: %s, at %d", interval, defPos);
259 }
260 }
261
262 private TraceInterval addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority) {
263 int defPos = op.id();
264
265 TraceInterval interval = allocator.getOrCreateInterval(operand);
266
267 if (interval.isEmpty()) {
268 /*
269 * Dead value - make vacuous interval also add register priority for dead intervals
270 */
271 interval.addRange(defPos, defPos + 1);
272 if (debug.isLogEnabled()) {
273 debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
274 }
275 } else {
276 /*
277 * Update the starting point (when a range is first created for a use, its start is
278 * the beginning of the current block until a def is encountered).
279 */
280 interval.setFrom(defPos);
281 }
282 if (!(op instanceof LabelOp)) {
283 // no use positions for labels
284 interval.addUsePos(defPos, registerPriority, allocator.getOptions());
285 }
286
287 changeSpillDefinitionPos(op, operand, interval, defPos);
288 if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
289 // detection of method-parameters and roundfp-results
290 interval.setSpillState(SpillState.StartInMemory);
291 }
292 interval.addMaterializationValue(getMaterializedValue(op, operand, interval, allocator.neverSpillConstants(), allocator.getSpillMoveFactory()));
293
294 if (debug.isLogEnabled()) {
295 debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
296 }
297 return interval;
298 }
299
300 private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority) {
301 if (isRegister(operand)) {
302 RegisterValue reg = asRegisterValue(operand);
303 if (allocator.isAllocatable(reg)) {
304 addFixedTemp(reg, tempPos);
305 }
306 } else {
307 assert isVariable(operand) : operand;
308 addVariableTemp(asVariable(operand), tempPos, registerPriority);
309 }
310 }
311
312 private void addFixedTemp(RegisterValue reg, int tempPos) {
313 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
314 interval.addRange(tempPos, tempPos + 1);
315 if (debug.isLogEnabled()) {
316 debug.log("add fixed temp: %s, at %d", interval, tempPos);
317 }
318 }
319
320 private void addVariableTemp(Variable operand, int tempPos, RegisterPriority registerPriority) {
321 TraceInterval interval = allocator.getOrCreateInterval(operand);
322
323 if (interval.isEmpty()) {
324 interval.addRange(tempPos, tempPos + 1);
325 } else if (interval.from() > tempPos) {
326 interval.setFrom(tempPos);
327 }
328
329 interval.addUsePos(tempPos, registerPriority, allocator.getOptions());
330 interval.addMaterializationValue(null);
331
332 if (debug.isLogEnabled()) {
333 debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name());
334 }
335 }
336
337 /**
338 * Eliminates moves from register to stack if the stack slot is known to be correct.
339 *
340 * @param op
341 * @param operand
342 */
343 private void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, TraceInterval interval, int defPos) {
344 assert interval.isSplitParent() : "can only be called for split parents";
345
346 switch (interval.spillState()) {
347 case NoDefinitionFound:
348 // assert interval.spillDefinitionPos() == -1 : "must no be set before";
349 interval.setSpillDefinitionPos(defPos);
350 if (!(op instanceof LabelOp)) {
351 // Do not update state for labels. This will be done afterwards.
352 interval.setSpillState(SpillState.NoSpillStore);
353 }
379 private void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) {
380 if (flags.contains(OperandFlag.HINT) && isVariableOrRegister(targetValue)) {
381
382 ValueProcedure registerHintProc = new ValueProcedure() {
383 @Override
384 public Value doValue(Value registerHint, OperandMode valueMode, EnumSet<OperandFlag> valueFlags) {
385 if (isVariableOrRegister(registerHint)) {
386 /*
387 * TODO (je): clean up
388 */
389 final AllocatableValue fromValue;
390 final AllocatableValue toValue;
391 /* hints always point from def to use */
392 if (hintAtDef) {
393 fromValue = (AllocatableValue) registerHint;
394 toValue = (AllocatableValue) targetValue;
395 } else {
396 fromValue = (AllocatableValue) targetValue;
397 toValue = (AllocatableValue) registerHint;
398 }
399 debug.log("addRegisterHint %s to %s", fromValue, toValue);
400 final TraceInterval to;
401 final IntervalHint from;
402 if (isRegister(toValue)) {
403 if (isRegister(fromValue)) {
404 // fixed to fixed move
405 return null;
406 }
407 from = getIntervalHint(toValue);
408 to = allocator.getOrCreateInterval(asVariable(fromValue));
409 } else {
410 to = allocator.getOrCreateInterval(asVariable(toValue));
411 from = getIntervalHint(fromValue);
412 }
413
414 to.setLocationHint(from);
415 if (debug.isLogEnabled()) {
416 debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
417 }
418
419 return registerHint;
420 }
421 return null;
422 }
423 };
424 op.forEachRegisterHint(targetValue, mode, registerHintProc);
425 }
426 }
427
428 private static boolean optimizeMethodArgument(Value value) {
429 /*
430 * Object method arguments that are passed on the stack are currently not optimized
431 * because this requires that the runtime visits method arguments during stack walking.
432 */
433 return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value);
434 }
435
436 /**
453 }
454
455 /**
456 * Determines the priority which with an instruction's input operand will be allocated a
457 * register.
458 */
459 private static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) {
460 if (flags.contains(OperandFlag.OUTGOING)) {
461 return RegisterPriority.None;
462 }
463 if (flags.contains(OperandFlag.STACK)) {
464 return RegisterPriority.ShouldHaveRegister;
465 }
466 // all other operands require a register
467 return RegisterPriority.MustHaveRegister;
468 }
469
470 @SuppressWarnings("try")
471 private void buildIntervals() {
472
473 try (Indent indent = debug.logAndIndent("build intervals")) {
474
475 // create a list with all caller-save registers (cpu, fpu, xmm)
476 RegisterArray callerSaveRegs = getCallerSavedRegisters();
477 int instructionIndex = numInstructions;
478
479 // iterate all blocks in reverse order
480 AbstractBlockBase<?>[] blocks = sortedBlocks();
481 for (int blockId = blocks.length - 1; blockId >= 0; blockId--) {
482 final AbstractBlockBase<?> block = blocks[blockId];
483
484 try (Indent indent2 = debug.logAndIndent("handle block %d", block.getId())) {
485 handleBlockEnd(block, (instructionIndex - 1) << 1);
486
487 /*
488 * Iterate all instructions of the block in reverse order. definitions of
489 * intervals are processed before uses.
490 */
491 ArrayList<LIRInstruction> instructions = getLIR().getLIRforBlock(block);
492 for (int instIdx = instructions.size() - 1; instIdx >= 1; instIdx--) {
493 final LIRInstruction op = instructions.get(instIdx);
494 // number instruction
495 instructionIndex--;
496 numberInstruction(block, op, instructionIndex);
497 final int opId = op.id();
498
499 try (Indent indent3 = debug.logAndIndent("handle inst %d: %s", opId, op)) {
500
501 /*
502 * Add a temp range for each register if operation destroys
503 * caller-save registers.
504 */
505 if (op.destroysCallerSavedRegisters()) {
506 for (Register r : callerSaveRegs) {
507 if (allocator.attributes(r).isAllocatable()) {
508 addTemp(r.asValue(), opId, RegisterPriority.None);
509 }
510 }
511 if (debug.isLogEnabled()) {
512 debug.log("operation destroys all caller-save registers");
513 }
514 }
515
516 op.visitEachOutput(outputConsumer);
517 op.visitEachTemp(tempConsumer);
518 op.visitEachAlive(aliveConsumer);
519 op.visitEachInput(inputConsumer);
520
521 /*
522 * Add uses of live locals from interpreter's point of view for
523 * proper debug information generation. Treat these operands as temp
524 * values (if the live range is extended to a call site, the value
525 * would be in a register at the call otherwise).
526 */
527 op.visitEachState(stateProc);
528 }
529
530 } // end of instruction iteration
531 // number label instruction
532 instructionIndex--;
533 numberInstruction(block, instructions.get(0), instructionIndex);
534 AbstractBlockBase<?> pred = blockId == 0 ? null : blocks[blockId - 1];
535 handleBlockBegin(block, pred);
536 }
537 if (debug.isDumpEnabled(DebugContext.VERY_DETAILED_LEVEL)) {
538 allocator.printIntervals("After Block " + block);
539 }
540 } // end of block iteration
541 assert instructionIndex == 0 : "not at start?" + instructionIndex;
542 handleTraceBegin(blocks[0]);
543
544 // fix spill state for phi/incoming intervals
545 for (TraceInterval interval : allocator.intervals()) {
546 if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) {
547 // there was a definition in a phi/incoming
548 interval.setSpillState(SpillState.NoSpillStore);
549 }
550 }
551 if (TraceRAuseInterTraceHints.getValue(allocator.getLIR().getOptions())) {
552 addInterTraceHints();
553 }
554 for (FixedInterval interval1 : allocator.fixedIntervals()) {
555 if (interval1 != null) {
556 /* We use [-1, 0] to avoid intersection with incoming values. */
557 interval1.addRange(-1, 0);
599 // always alive until the end of the block
600 int aliveOpId = opId + 1;
601 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo();
602 for (int varNum : livenessInfo.getBlockOut(block)) {
603 if (allocator.intervalFor(varNum) == null) {
604 addVariableUse(livenessInfo.getVariable(varNum), 0, aliveOpId, RegisterPriority.None);
605 }
606 }
607 }
608
609 private void numberInstruction(AbstractBlockBase<?> block, LIRInstruction op, int index) {
610 int opId = index << 1;
611 assert op.id() == -1 || op.id() == opId : "must match";
612 op.setId(opId);
613 allocator.putOpIdMaps(index, op, block);
614 assert allocator.instructionForId(opId) == op : "must match";
615 }
616
617 @SuppressWarnings("try")
618 private void addInterTraceHints() {
619 try (DebugContext.Scope s = debug.scope("InterTraceHints", allocator)) {
620 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo();
621 // set hints for phi/incoming intervals
622 for (AbstractBlockBase<?> block : sortedBlocks()) {
623 LabelOp label = (LabelOp) getLIR().getLIRforBlock(block).get(0);
624 for (AbstractBlockBase<?> pred : block.getPredecessors()) {
625 addInterTraceHints(livenessInfo, pred, block, label);
626 }
627 }
628 } catch (Throwable e) {
629 throw debug.handle(e);
630 }
631 }
632
633 private void addInterTraceHints(GlobalLivenessInfo livenessInfo, AbstractBlockBase<?> from, AbstractBlockBase<?> to, LabelOp label) {
634 if (isAllocated(to, from)) {
635 int[] liveVars = livenessInfo.getBlockIn(to);
636 Value[] outLocation = livenessInfo.getOutLocation(from);
637
638 for (int i = 0; i < liveVars.length; i++) {
639 int varNum = liveVars[i];
640 TraceInterval toInterval = allocator.intervalFor(varNum);
641 if (toInterval != null && !toInterval.hasHint()) {
642 Value fromValue = outLocation[i];
643 if (!LIRValueUtil.isConstantValue(fromValue)) {
644 addInterTraceHint(label, varNum, fromValue);
645 }
646 }
647 }
648 }
649 }
650
651 private void addInterTraceHint(LabelOp label, int varNum, Value fromValue) {
652 assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue;
653 TraceInterval to = allocator.intervalFor(varNum);
654 if (to == null) {
655 // variable not live -> do nothing
656 return;
657 }
658 if (isVariableOrRegister(fromValue)) {
659 IntervalHint from = getIntervalHint((AllocatableValue) fromValue);
660 setHint(label, to, from, debug);
661 } else if (isStackSlotValue(fromValue)) {
662 setSpillSlot(label, to, (AllocatableValue) fromValue, debug);
663 } else if (TraceRAshareSpillInformation.getValue(allocator.getLIR().getOptions()) && isShadowedRegisterValue(fromValue)) {
664 ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue);
665 IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister());
666 setHint(label, to, from, debug);
667 setSpillSlot(label, to, shadowedRegisterValue.getStackSlot(), debug);
668 } else {
669 throw GraalError.shouldNotReachHere();
670 }
671 }
672
673 private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from, DebugContext debug) {
674 IntervalHint currentHint = to.locationHint(false);
675 if (currentHint == null) {
676 /*
677 * Update hint if there was none or if the hint interval starts after the hinted
678 * interval.
679 */
680 to.setLocationHint(from);
681 if (debug.isLogEnabled()) {
682 debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
683 }
684 }
685 }
686
687 private static void setSpillSlot(LIRInstruction op, TraceInterval interval, AllocatableValue spillSlot, DebugContext debug) {
688 if (interval.spillSlot() == null) {
689 interval.setSpillSlot(spillSlot);
690 interval.setSpillState(SpillState.StartInMemory);
691 if (debug.isLogEnabled()) {
692 debug.log("operation at opId %d: added spill slot %s to interval %s", op.id(), spillSlot, interval);
693 }
694 } else if (debug.isLogEnabled()) {
695 debug.log("operation at opId %d: has already a slot assigned %s", op.id(), interval.spillSlot());
696 }
697 }
698
699 private IntervalHint getIntervalHint(AllocatableValue from) {
700 if (isRegister(from)) {
701 return allocator.getOrCreateFixedInterval(asRegisterValue(from));
702 }
703 return allocator.getOrCreateInterval(asVariable(from));
704 }
705
706 }
707
708 /**
709 * Returns a value for a interval definition, which can be used for re-materialization.
710 *
711 * @param op An instruction which defines a value
712 * @param operand The destination operand of the instruction
713 * @param interval The interval for this defined value.
714 * @return Returns the value which is moved to the instruction and which can be reused at all
715 * reload-locations in case the interval of this instruction is spilled. Currently this
|