5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23 package org.graalvm.compiler.lir.alloc.trace.lsra;
24
25 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
26 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
27 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
28 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
29 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints;
30 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.asShadowedRegisterValue;
31 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.isShadowedRegisterValue;
32 import static org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.isVariableOrRegister;
33 import static jdk.vm.ci.code.ValueUtil.asRegisterValue;
34 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
35 import static jdk.vm.ci.code.ValueUtil.isRegister;
36 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
37
38 import java.util.EnumSet;
39 import java.util.List;
40 import java.util.ListIterator;
41
42 import org.graalvm.compiler.core.common.LIRKind;
43 import org.graalvm.compiler.core.common.alloc.Trace;
44 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult;
45 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
46 import org.graalvm.compiler.debug.Debug;
47 import org.graalvm.compiler.debug.Debug.Scope;
48 import org.graalvm.compiler.debug.GraalError;
49 import org.graalvm.compiler.debug.Indent;
50 import org.graalvm.compiler.lir.InstructionValueConsumer;
51 import org.graalvm.compiler.lir.LIR;
52 import org.graalvm.compiler.lir.LIRInstruction;
53 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag;
54 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
55 import org.graalvm.compiler.lir.LIRValueUtil;
56 import org.graalvm.compiler.lir.StandardOp.BlockEndOp;
57 import org.graalvm.compiler.lir.StandardOp.LabelOp;
58 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
59 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
60 import org.graalvm.compiler.lir.ValueProcedure;
61 import org.graalvm.compiler.lir.Variable;
62 import org.graalvm.compiler.lir.alloc.trace.ShadowedRegisterValue;
63 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority;
64 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState;
65 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan;
66 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
67 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory;
68 import org.graalvm.compiler.lir.ssi.SSIUtil;
69
70 import jdk.vm.ci.code.Register;
71 import jdk.vm.ci.code.RegisterArray;
72 import jdk.vm.ci.code.RegisterValue;
73 import jdk.vm.ci.code.TargetDescription;
74 import jdk.vm.ci.meta.AllocatableValue;
75 import jdk.vm.ci.meta.JavaConstant;
76 import jdk.vm.ci.meta.Value;
77
78 public final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase {
79
80 @Override
81 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, TraceLinearScanAllocationContext context) {
82 TraceBuilderResult traceBuilderResult = context.resultTraces;
83 TraceLinearScan allocator = context.allocator;
84 new Analyser(allocator, traceBuilderResult).analyze();
85 }
86
87 public static final class Analyser {
88 private static final int DUMP_DURING_ANALYSIS_LEVEL = 4;
95 this.traceBuilderResult = traceBuilderResult;
96 }
97
98 private AbstractBlockBase<?>[] sortedBlocks() {
99 return allocator.sortedBlocks();
100 }
101
102 private LIR getLIR() {
103 return allocator.getLIR();
104 }
105
106 private RegisterArray getCallerSavedRegisters() {
107 return allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
108 }
109
110 public void analyze() {
111 countInstructions();
112 buildIntervals();
113 }
114
115 private boolean sameTrace(AbstractBlockBase<?> a, AbstractBlockBase<?> b) {
116 return traceBuilderResult.getTraceForBlock(b) == traceBuilderResult.getTraceForBlock(a);
117 }
118
119 private boolean isAllocatedOrCurrent(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) {
120 return traceBuilderResult.getTraceForBlock(other).getId() <= traceBuilderResult.getTraceForBlock(currentBlock).getId();
121 }
122
123 /**
124 * Count instructions in all blocks. The numbering follows the
125 * {@linkplain TraceLinearScan#sortedBlocks() register allocation order}.
126 */
127 private void countInstructions() {
128
129 allocator.initIntervals();
130
131 int numberInstructions = 0;
132 for (AbstractBlockBase<?> block : sortedBlocks()) {
133 numberInstructions += getLIR().getLIRforBlock(block).size();
134 }
135 numInstructions = numberInstructions;
136
137 // initialize with correct length
138 allocator.initOpIdMaps(numberInstructions);
139 }
140
246 /*
247 * Update the starting point (when a range is first created for a use, its start is
248 * the beginning of the current block until a def is encountered).
249 */
250 interval.setFrom(defPos);
251
252 } else {
253 /*
254 * Dead value - make vacuous interval also add register priority for dead intervals
255 */
256 interval.addRange(defPos, defPos + 1);
257 if (Debug.isLogEnabled()) {
258 Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos);
259 }
260 }
261 if (Debug.isLogEnabled()) {
262 Debug.log("add fixed def: %s, at %d", interval, defPos);
263 }
264 }
265
266 private void addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority) {
267 int defPos = op.id();
268
269 TraceInterval interval = allocator.getOrCreateInterval(operand);
270
271 if (interval.isEmpty()) {
272 /*
273 * Dead value - make vacuous interval also add register priority for dead intervals
274 */
275 interval.addRange(defPos, defPos + 1);
276 if (Debug.isLogEnabled()) {
277 Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
278 }
279 } else {
280 /*
281 * Update the starting point (when a range is first created for a use, its start is
282 * the beginning of the current block until a def is encountered).
283 */
284 interval.setFrom(defPos);
285 }
286 if (!(op instanceof LabelOp)) {
287 // no use positions for labels
288 interval.addUsePos(defPos, registerPriority);
289 }
290
291 changeSpillDefinitionPos(op, operand, interval, defPos);
292 if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
293 // detection of method-parameters and roundfp-results
294 interval.setSpillState(SpillState.StartInMemory);
295 }
296 interval.addMaterializationValue(getMaterializedValue(op, operand, interval, allocator.neverSpillConstants(), allocator.getSpillMoveFactory()));
297
298 if (Debug.isLogEnabled()) {
299 Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
300 }
301 }
302
303 private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority) {
304 if (!allocator.isProcessed(operand)) {
305 return;
306 }
307 if (isRegister(operand)) {
308 addFixedTemp(asRegisterValue(operand), tempPos);
309 } else {
310 assert isVariable(operand) : operand;
311 addVariableTemp(asVariable(operand), tempPos, registerPriority);
312 }
313 }
314
315 private void addFixedTemp(RegisterValue reg, int tempPos) {
316 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
317 interval.addRange(tempPos, tempPos + 1);
318 if (Debug.isLogEnabled()) {
319 Debug.log("add fixed temp: %s, at %d", interval, tempPos);
320 }
464 return RegisterPriority.None;
465 }
466 if (flags.contains(OperandFlag.STACK)) {
467 return RegisterPriority.ShouldHaveRegister;
468 }
469 // all other operands require a register
470 return RegisterPriority.MustHaveRegister;
471 }
472
473 @SuppressWarnings("try")
474 private void buildIntervals() {
475
476 try (Indent indent = Debug.logAndIndent("build intervals")) {
477
478 // create a list with all caller-save registers (cpu, fpu, xmm)
479 RegisterArray callerSaveRegs = getCallerSavedRegisters();
480 int instructionIndex = numInstructions;
481
482 // iterate all blocks in reverse order
483 AbstractBlockBase<?>[] blocks = sortedBlocks();
484 for (int i = blocks.length - 1; i >= 0; i--) {
485 final AbstractBlockBase<?> block = blocks[i];
486
487 try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) {
488
489 /*
490 * Iterate all instructions of the block in reverse order. definitions of
491 * intervals are processed before uses.
492 */
493 List<LIRInstruction> instructions = getLIR().getLIRforBlock(block);
494 ListIterator<LIRInstruction> instIt = instructions.listIterator(instructions.size());
495 while (instIt.hasPrevious()) {
496 final LIRInstruction op = instIt.previous();
497 // number instruction
498 instructionIndex--;
499 final int opId = instructionIndex << 1;
500 numberInstruction(block, op, instructionIndex);
501
502 try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) {
503
504 /*
505 * Add a temp range for each register if operation destroys
506 * caller-save registers.
507 */
508 if (op.destroysCallerSavedRegisters()) {
509 for (Register r : callerSaveRegs) {
510 if (allocator.attributes(r).isAllocatable()) {
511 addTemp(r.asValue(), opId, RegisterPriority.None);
512 }
513 }
514 if (Debug.isLogEnabled()) {
515 Debug.log("operation destroys all caller-save registers");
516 }
517 }
518
519 op.visitEachOutput(outputConsumer);
520 op.visitEachTemp(tempConsumer);
521 op.visitEachAlive(aliveConsumer);
522 op.visitEachInput(inputConsumer);
523
524 /*
525 * Add uses of live locals from interpreter's point of view for
526 * proper debug information generation. Treat these operands as temp
527 * values (if the live range is extended to a call site, the value
528 * would be in a register at the call otherwise).
529 */
530 op.visitEachState(stateProc);
531 }
532
533 } // end of instruction iteration
534 }
535 if (Debug.isDumpEnabled(DUMP_DURING_ANALYSIS_LEVEL)) {
536 allocator.printIntervals("After Block " + block);
537 }
538 } // end of block iteration
539 assert instructionIndex == 0 : "not at start?" + instructionIndex;
540
541 // fix spill state for phi/sigma intervals
542 for (TraceInterval interval : allocator.intervals()) {
543 if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) {
544 // there was a definition in a phi/sigma
545 interval.setSpillState(SpillState.NoSpillStore);
546 }
547 }
548 if (TraceRAuseInterTraceHints.getValue()) {
549 addInterTraceHints();
550 }
551 for (FixedInterval interval1 : allocator.fixedIntervals()) {
552 if (interval1 != null) {
553 /* We use [-1, 0] to avoid intersection with incoming values. */
554 interval1.addRange(-1, 0);
555 }
556 }
557 }
558 }
559
560 private void numberInstruction(AbstractBlockBase<?> block, LIRInstruction op, int index) {
561 int opId = index << 1;
562 assert op.id() == -1 || op.id() == opId : "must match";
563 op.setId(opId);
564 allocator.putOpIdMaps(index, op, block);
565 assert allocator.instructionForId(opId) == op : "must match";
566 }
567
568 @SuppressWarnings("try")
569 private void addInterTraceHints() {
570 try (Scope s = Debug.scope("InterTraceHints", allocator)) {
571 // set hints for phi/sigma intervals
572 for (AbstractBlockBase<?> block : sortedBlocks()) {
573 LabelOp label = SSIUtil.incoming(getLIR(), block);
574 for (AbstractBlockBase<?> pred : block.getPredecessors()) {
575 if (isAllocatedOrCurrent(block, pred)) {
576 BlockEndOp outgoing = SSIUtil.outgoing(getLIR(), pred);
577 // do not look at phi variables as they are not same value!
578 for (int i = outgoing.getPhiSize(); i < outgoing.getOutgoingSize(); i++) {
579 Value toValue = label.getIncomingValue(i);
580 assert !isShadowedRegisterValue(toValue) : "Shadowed Registers are not allowed here: " + toValue;
581 if (isVariable(toValue)) {
582 Value fromValue = outgoing.getOutgoingValue(i);
583 assert sameTrace(block, pred) || !isVariable(fromValue) : "Unallocated variable: " + fromValue;
584 if (!LIRValueUtil.isConstantValue(fromValue)) {
585 addInterTraceHint(label, (AllocatableValue) toValue, fromValue);
586 }
587 }
588 }
589 }
590 }
591 }
592 } catch (Throwable e) {
593 throw Debug.handle(e);
594 }
595 }
596
597 private void addInterTraceHint(LabelOp label, AllocatableValue toValue, Value fromValue) {
598 assert isVariable(toValue) : "Wrong toValue: " + toValue;
599 assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue;
600 TraceInterval to = allocator.getOrCreateInterval(toValue);
601 if (isVariableOrRegister(fromValue)) {
602 IntervalHint from = getIntervalHint((AllocatableValue) fromValue);
603 setHint(label, to, from);
604 } else if (isStackSlotValue(fromValue)) {
605 setSpillSlot(label, to, (AllocatableValue) fromValue);
606 } else if (TraceRAshareSpillInformation.getValue() && isShadowedRegisterValue(fromValue)) {
607 ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue);
608 IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister());
609 setHint(label, to, from);
610 setSpillSlot(label, to, shadowedRegisterValue.getStackSlot());
611 } else {
612 throw GraalError.shouldNotReachHere();
613 }
614 }
615
616 private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) {
617 IntervalHint currentHint = to.locationHint(false);
618 if (currentHint == null) {
619 /*
620 * Update hint if there was none or if the hint interval starts after the hinted
621 * interval.
622 */
623 to.setLocationHint(from);
624 if (Debug.isLogEnabled()) {
625 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
626 }
|
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23 package org.graalvm.compiler.lir.alloc.trace.lsra;
24
25 import static jdk.vm.ci.code.ValueUtil.asRegisterValue;
26 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
27 import static jdk.vm.ci.code.ValueUtil.isRegister;
28 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
29 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
30 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
31 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
32 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
33 import static org.graalvm.compiler.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints;
34 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.asShadowedRegisterValue;
35 import static org.graalvm.compiler.lir.alloc.trace.TraceUtil.isShadowedRegisterValue;
36 import static org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.isVariableOrRegister;
37
38 import java.util.ArrayList;
39 import java.util.EnumSet;
40
41 import org.graalvm.compiler.core.common.LIRKind;
42 import org.graalvm.compiler.core.common.alloc.Trace;
43 import org.graalvm.compiler.core.common.alloc.TraceBuilderResult;
44 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
45 import org.graalvm.compiler.debug.Debug;
46 import org.graalvm.compiler.debug.Debug.Scope;
47 import org.graalvm.compiler.debug.GraalError;
48 import org.graalvm.compiler.debug.Indent;
49 import org.graalvm.compiler.lir.InstructionValueConsumer;
50 import org.graalvm.compiler.lir.LIR;
51 import org.graalvm.compiler.lir.LIRInstruction;
52 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag;
53 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
54 import org.graalvm.compiler.lir.LIRValueUtil;
55 import org.graalvm.compiler.lir.StandardOp.JumpOp;
56 import org.graalvm.compiler.lir.StandardOp.LabelOp;
57 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
58 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
59 import org.graalvm.compiler.lir.ValueProcedure;
60 import org.graalvm.compiler.lir.Variable;
61 import org.graalvm.compiler.lir.alloc.trace.GlobalLivenessInfo;
62 import org.graalvm.compiler.lir.alloc.trace.ShadowedRegisterValue;
63 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.RegisterPriority;
64 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceInterval.SpillState;
65 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan;
66 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
67 import org.graalvm.compiler.lir.gen.LIRGeneratorTool.MoveFactory;
68 import org.graalvm.compiler.lir.ssa.SSAUtil;
69
70 import jdk.vm.ci.code.Register;
71 import jdk.vm.ci.code.RegisterArray;
72 import jdk.vm.ci.code.RegisterValue;
73 import jdk.vm.ci.code.TargetDescription;
74 import jdk.vm.ci.meta.AllocatableValue;
75 import jdk.vm.ci.meta.JavaConstant;
76 import jdk.vm.ci.meta.Value;
77
78 public final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase {
79
80 @Override
81 protected void run(TargetDescription target, LIRGenerationResult lirGenRes, Trace trace, TraceLinearScanAllocationContext context) {
82 TraceBuilderResult traceBuilderResult = context.resultTraces;
83 TraceLinearScan allocator = context.allocator;
84 new Analyser(allocator, traceBuilderResult).analyze();
85 }
86
87 public static final class Analyser {
88 private static final int DUMP_DURING_ANALYSIS_LEVEL = 4;
95 this.traceBuilderResult = traceBuilderResult;
96 }
97
98 private AbstractBlockBase<?>[] sortedBlocks() {
99 return allocator.sortedBlocks();
100 }
101
102 private LIR getLIR() {
103 return allocator.getLIR();
104 }
105
106 private RegisterArray getCallerSavedRegisters() {
107 return allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
108 }
109
110 public void analyze() {
111 countInstructions();
112 buildIntervals();
113 }
114
115 private boolean isAllocated(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) {
116 return traceBuilderResult.getTraceForBlock(other).getId() < traceBuilderResult.getTraceForBlock(currentBlock).getId();
117 }
118
119 /**
120 * Count instructions in all blocks. The numbering follows the
121 * {@linkplain TraceLinearScan#sortedBlocks() register allocation order}.
122 */
123 private void countInstructions() {
124
125 allocator.initIntervals();
126
127 int numberInstructions = 0;
128 for (AbstractBlockBase<?> block : sortedBlocks()) {
129 numberInstructions += getLIR().getLIRforBlock(block).size();
130 }
131 numInstructions = numberInstructions;
132
133 // initialize with correct length
134 allocator.initOpIdMaps(numberInstructions);
135 }
136
242 /*
243 * Update the starting point (when a range is first created for a use, its start is
244 * the beginning of the current block until a def is encountered).
245 */
246 interval.setFrom(defPos);
247
248 } else {
249 /*
250 * Dead value - make vacuous interval also add register priority for dead intervals
251 */
252 interval.addRange(defPos, defPos + 1);
253 if (Debug.isLogEnabled()) {
254 Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos);
255 }
256 }
257 if (Debug.isLogEnabled()) {
258 Debug.log("add fixed def: %s, at %d", interval, defPos);
259 }
260 }
261
262 private TraceInterval addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority) {
263 int defPos = op.id();
264
265 TraceInterval interval = allocator.getOrCreateInterval(operand);
266
267 if (interval.isEmpty()) {
268 /*
269 * Dead value - make vacuous interval also add register priority for dead intervals
270 */
271 interval.addRange(defPos, defPos + 1);
272 if (Debug.isLogEnabled()) {
273 Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
274 }
275 } else {
276 /*
277 * Update the starting point (when a range is first created for a use, its start is
278 * the beginning of the current block until a def is encountered).
279 */
280 interval.setFrom(defPos);
281 }
282 if (!(op instanceof LabelOp)) {
283 // no use positions for labels
284 interval.addUsePos(defPos, registerPriority);
285 }
286
287 changeSpillDefinitionPos(op, operand, interval, defPos);
288 if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
289 // detection of method-parameters and roundfp-results
290 interval.setSpillState(SpillState.StartInMemory);
291 }
292 interval.addMaterializationValue(getMaterializedValue(op, operand, interval, allocator.neverSpillConstants(), allocator.getSpillMoveFactory()));
293
294 if (Debug.isLogEnabled()) {
295 Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
296 }
297 return interval;
298 }
299
300 private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority) {
301 if (!allocator.isProcessed(operand)) {
302 return;
303 }
304 if (isRegister(operand)) {
305 addFixedTemp(asRegisterValue(operand), tempPos);
306 } else {
307 assert isVariable(operand) : operand;
308 addVariableTemp(asVariable(operand), tempPos, registerPriority);
309 }
310 }
311
312 private void addFixedTemp(RegisterValue reg, int tempPos) {
313 FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
314 interval.addRange(tempPos, tempPos + 1);
315 if (Debug.isLogEnabled()) {
316 Debug.log("add fixed temp: %s, at %d", interval, tempPos);
317 }
461 return RegisterPriority.None;
462 }
463 if (flags.contains(OperandFlag.STACK)) {
464 return RegisterPriority.ShouldHaveRegister;
465 }
466 // all other operands require a register
467 return RegisterPriority.MustHaveRegister;
468 }
469
470 @SuppressWarnings("try")
471 private void buildIntervals() {
472
473 try (Indent indent = Debug.logAndIndent("build intervals")) {
474
475 // create a list with all caller-save registers (cpu, fpu, xmm)
476 RegisterArray callerSaveRegs = getCallerSavedRegisters();
477 int instructionIndex = numInstructions;
478
479 // iterate all blocks in reverse order
480 AbstractBlockBase<?>[] blocks = sortedBlocks();
481 for (int blockId = blocks.length - 1; blockId >= 0; blockId--) {
482 final AbstractBlockBase<?> block = blocks[blockId];
483
484 try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) {
485 handleBlockEnd(block, (instructionIndex - 1) << 1);
486
487 /*
488 * Iterate all instructions of the block in reverse order. definitions of
489 * intervals are processed before uses.
490 */
491 ArrayList<LIRInstruction> instructions = getLIR().getLIRforBlock(block);
492 for (int instIdx = instructions.size() - 1; instIdx >= 1; instIdx--) {
493 final LIRInstruction op = instructions.get(instIdx);
494 // number instruction
495 instructionIndex--;
496 numberInstruction(block, op, instructionIndex);
497 final int opId = op.id();
498
499 try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) {
500
501 /*
502 * Add a temp range for each register if operation destroys
503 * caller-save registers.
504 */
505 if (op.destroysCallerSavedRegisters()) {
506 for (Register r : callerSaveRegs) {
507 if (allocator.attributes(r).isAllocatable()) {
508 addTemp(r.asValue(), opId, RegisterPriority.None);
509 }
510 }
511 if (Debug.isLogEnabled()) {
512 Debug.log("operation destroys all caller-save registers");
513 }
514 }
515
516 op.visitEachOutput(outputConsumer);
517 op.visitEachTemp(tempConsumer);
518 op.visitEachAlive(aliveConsumer);
519 op.visitEachInput(inputConsumer);
520
521 /*
522 * Add uses of live locals from interpreter's point of view for
523 * proper debug information generation. Treat these operands as temp
524 * values (if the live range is extended to a call site, the value
525 * would be in a register at the call otherwise).
526 */
527 op.visitEachState(stateProc);
528 }
529
530 } // end of instruction iteration
531 // number label instruction
532 instructionIndex--;
533 numberInstruction(block, instructions.get(0), instructionIndex);
534 AbstractBlockBase<?> pred = blockId == 0 ? null : blocks[blockId - 1];
535 handleBlockBegin(block, pred);
536 }
537 if (Debug.isDumpEnabled(DUMP_DURING_ANALYSIS_LEVEL)) {
538 allocator.printIntervals("After Block " + block);
539 }
540 } // end of block iteration
541 assert instructionIndex == 0 : "not at start?" + instructionIndex;
542 handleTraceBegin(blocks[0]);
543
544 // fix spill state for phi/incoming intervals
545 for (TraceInterval interval : allocator.intervals()) {
546 if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) {
547 // there was a definition in a phi/incoming
548 interval.setSpillState(SpillState.NoSpillStore);
549 }
550 }
551 if (TraceRAuseInterTraceHints.getValue(allocator.getLIR().getOptions())) {
552 addInterTraceHints();
553 }
554 for (FixedInterval interval1 : allocator.fixedIntervals()) {
555 if (interval1 != null) {
556 /* We use [-1, 0] to avoid intersection with incoming values. */
557 interval1.addRange(-1, 0);
558 }
559 }
560 }
561 }
562
563 private void handleTraceBegin(AbstractBlockBase<?> block) {
564 LIRInstruction op = getLIR().getLIRforBlock(block).get(0);
565 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo();
566 for (int varNum : livenessInfo.getBlockIn(block)) {
567 if (isAliveAtBlockBegin(varNum)) {
568 addVariableDef(livenessInfo.getVariable(varNum), op, RegisterPriority.None);
569 }
570 }
571 }
572
573 private boolean isAliveAtBlockBegin(int varNum) {
574 return allocator.intervalFor(varNum) != null;
575 }
576
577 private void handleBlockBegin(AbstractBlockBase<?> block, AbstractBlockBase<?> pred) {
578 if (SSAUtil.isMerge(block)) {
579 // handle phis
580 // method parameters are fixed later on (see end of #buildIntervals)
581 LabelOp label = SSAUtil.phiIn(getLIR(), block);
582 JumpOp jump = pred == null ? null : SSAUtil.phiOut(getLIR(), pred);
583 for (int i = 0; i < label.getPhiSize(); i++) {
584 Variable var = asVariable(label.getIncomingValue(i));
585 TraceInterval toInterval = addVariableDef(var, label, RegisterPriority.ShouldHaveRegister);
586 // set hint for phis
587 if (jump != null) {
588 Value out = jump.getOutgoingValue(i);
589 if (isVariable(out)) {
590 TraceInterval fromInterval = allocator.getOrCreateInterval(asVariable(out));
591 toInterval.setLocationHint(fromInterval);
592 }
593 }
594 }
595 }
596 }
597
598 private void handleBlockEnd(AbstractBlockBase<?> block, int opId) {
599 // always alive until the end of the block
600 int aliveOpId = opId + 1;
601 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo();
602 for (int varNum : livenessInfo.getBlockOut(block)) {
603 if (allocator.intervalFor(varNum) == null) {
604 addVariableUse(livenessInfo.getVariable(varNum), 0, aliveOpId, RegisterPriority.None);
605 }
606 }
607 }
608
609 private void numberInstruction(AbstractBlockBase<?> block, LIRInstruction op, int index) {
610 int opId = index << 1;
611 assert op.id() == -1 || op.id() == opId : "must match";
612 op.setId(opId);
613 allocator.putOpIdMaps(index, op, block);
614 assert allocator.instructionForId(opId) == op : "must match";
615 }
616
617 @SuppressWarnings("try")
618 private void addInterTraceHints() {
619 try (Scope s = Debug.scope("InterTraceHints", allocator)) {
620 GlobalLivenessInfo livenessInfo = allocator.getGlobalLivenessInfo();
621 // set hints for phi/incoming intervals
622 for (AbstractBlockBase<?> block : sortedBlocks()) {
623 LabelOp label = (LabelOp) getLIR().getLIRforBlock(block).get(0);
624 for (AbstractBlockBase<?> pred : block.getPredecessors()) {
625 addInterTraceHints(livenessInfo, pred, block, label);
626 }
627 }
628 } catch (Throwable e) {
629 throw Debug.handle(e);
630 }
631 }
632
633 private void addInterTraceHints(GlobalLivenessInfo livenessInfo, AbstractBlockBase<?> from, AbstractBlockBase<?> to, LabelOp label) {
634 if (isAllocated(to, from)) {
635 int[] liveVars = livenessInfo.getBlockIn(to);
636 Value[] outLocation = livenessInfo.getOutLocation(from);
637
638 for (int i = 0; i < liveVars.length; i++) {
639 int varNum = liveVars[i];
640 TraceInterval toInterval = allocator.intervalFor(varNum);
641 if (toInterval != null && !toInterval.hasHint()) {
642 Value fromValue = outLocation[i];
643 if (!LIRValueUtil.isConstantValue(fromValue)) {
644 addInterTraceHint(label, varNum, fromValue);
645 }
646 }
647 }
648 }
649 }
650
651 private void addInterTraceHint(LabelOp label, int varNum, Value fromValue) {
652 assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue;
653 TraceInterval to = allocator.intervalFor(varNum);
654 if (to == null) {
655 // variable not live -> do nothing
656 return;
657 }
658 if (isVariableOrRegister(fromValue)) {
659 IntervalHint from = getIntervalHint((AllocatableValue) fromValue);
660 setHint(label, to, from);
661 } else if (isStackSlotValue(fromValue)) {
662 setSpillSlot(label, to, (AllocatableValue) fromValue);
663 } else if (TraceRAshareSpillInformation.getValue(allocator.getLIR().getOptions()) && isShadowedRegisterValue(fromValue)) {
664 ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue);
665 IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister());
666 setHint(label, to, from);
667 setSpillSlot(label, to, shadowedRegisterValue.getStackSlot());
668 } else {
669 throw GraalError.shouldNotReachHere();
670 }
671 }
672
673 private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) {
674 IntervalHint currentHint = to.locationHint(false);
675 if (currentHint == null) {
676 /*
677 * Update hint if there was none or if the hint interval starts after the hinted
678 * interval.
679 */
680 to.setLocationHint(from);
681 if (Debug.isLogEnabled()) {
682 Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
683 }
|