20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23 package org.graalvm.compiler.lir.alloc.trace.lsra;
24
25 import static jdk.vm.ci.code.ValueUtil.asRegister;
26 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
27 import static jdk.vm.ci.code.ValueUtil.isIllegal;
28 import static jdk.vm.ci.code.ValueUtil.isRegister;
29 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
30 import static org.graalvm.compiler.lir.LIRValueUtil.asVirtualStackSlot;
31 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
32 import static org.graalvm.compiler.lir.LIRValueUtil.isVirtualStackSlot;
33
34 import java.util.ArrayList;
35 import java.util.Arrays;
36 import java.util.HashSet;
37 import java.util.List;
38
39 import org.graalvm.compiler.core.common.LIRKind;
40 import org.graalvm.compiler.debug.Debug;
41 import org.graalvm.compiler.debug.DebugCounter;
42 import org.graalvm.compiler.debug.GraalError;
43 import org.graalvm.compiler.debug.Indent;
44 import org.graalvm.compiler.lir.LIRInsertionBuffer;
45 import org.graalvm.compiler.lir.LIRInstruction;
46 import org.graalvm.compiler.lir.VirtualStackSlot;
47 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan;
48 import org.graalvm.compiler.lir.framemap.FrameMap;
49 import org.graalvm.compiler.lir.framemap.FrameMapBuilderTool;
50
51 import jdk.vm.ci.code.StackSlot;
52 import jdk.vm.ci.meta.AllocatableValue;
53 import jdk.vm.ci.meta.Constant;
54 import jdk.vm.ci.meta.JavaConstant;
55 import jdk.vm.ci.meta.Value;
56
57 /**
58 */
59 final class TraceLocalMoveResolver {
60
61 private static final DebugCounter cycleBreakingSlotsAllocated = Debug.counter("TraceRA[cycleBreakingSlotsAllocated(local)]");
62
63 private static final int STACK_SLOT_IN_CALLER_FRAME_IDX = -1;
64 private final TraceLinearScan allocator;
65
66 private int insertIdx;
67 private LIRInsertionBuffer insertionBuffer; // buffer where moves are inserted
68
69 private final ArrayList<TraceInterval> mappingFrom;
70 private final ArrayList<Constant> mappingFromOpr;
71 private final ArrayList<TraceInterval> mappingTo;
72 private final int[] registerBlocked;
73
74 private int[] stackBlocked;
75 private final int firstVirtualStackIndex;
76
77 private int getStackArrayIndex(Value stackSlotValue) {
78 if (isStackSlot(stackSlotValue)) {
79 return getStackArrayIndex(asStackSlot(stackSlotValue));
80 }
81 if (isVirtualStackSlot(stackSlotValue)) {
82 return getStackArrayIndex(asVirtualStackSlot(stackSlotValue));
83 }
84 throw GraalError.shouldNotReachHere("value is not a stack slot: " + stackSlotValue);
85 }
86
87 private int getStackArrayIndex(StackSlot stackSlot) {
88 int stackIdx;
89 if (stackSlot.isInCallerFrame()) {
90 // incoming stack arguments can be ignored
91 stackIdx = STACK_SLOT_IN_CALLER_FRAME_IDX;
92 } else {
93 assert stackSlot.getRawAddFrameSize() : "Unexpected stack slot: " + stackSlot;
94 int offset = -stackSlot.getRawOffset();
95 assert 0 <= offset && offset < firstVirtualStackIndex : String.format("Wrong stack slot offset: %d (first virtual stack slot index: %d", offset, firstVirtualStackIndex);
96 stackIdx = offset;
151 }
152
153 /*
154 * TODO (je) remove?
155 */
156 protected static boolean areMultipleReadsAllowed() {
157 return true;
158 }
159
160 boolean hasMappings() {
161 return mappingFrom.size() > 0;
162 }
163
164 protected TraceLinearScan getAllocator() {
165 return allocator;
166 }
167
168 protected TraceLocalMoveResolver(TraceLinearScan allocator) {
169
170 this.allocator = allocator;
171 this.mappingFrom = new ArrayList<>(8);
172 this.mappingFromOpr = new ArrayList<>(8);
173 this.mappingTo = new ArrayList<>(8);
174 this.insertIdx = -1;
175 this.insertionBuffer = new LIRInsertionBuffer();
176 this.registerBlocked = new int[allocator.getRegisters().size()];
177 FrameMapBuilderTool frameMapBuilderTool = (FrameMapBuilderTool) allocator.getFrameMapBuilder();
178 FrameMap frameMap = frameMapBuilderTool.getFrameMap();
179 this.stackBlocked = new int[frameMapBuilderTool.getNumberOfStackSlots()];
180 this.firstVirtualStackIndex = !frameMap.frameNeedsAllocating() ? 0 : frameMap.currentFrameSize() + 1;
181 }
182
183 protected boolean checkEmpty() {
184 assert mappingFrom.size() == 0 && mappingFromOpr.size() == 0 && mappingTo.size() == 0 : "list must be empty before and after processing";
185 for (int i = 0; i < stackBlocked.length; i++) {
186 assert stackBlocked[i] == 0 : "stack map must be empty before and after processing";
187 }
188 for (int i = 0; i < getAllocator().getRegisters().size(); i++) {
189 assert registerBlocked[i] == 0 : "register map must be empty before and after processing";
190 }
239 boolean unique = usedRegs.add(interval.location());
240 assert unique : "cannot write to same register twice";
241 }
242
243 verifyStackSlotMapping();
244
245 return true;
246 }
247
248 protected void verifyStackSlotMapping() {
249 // relax disjoint stack maps invariant
250 }
251
252 // mark assignedReg and assignedRegHi of the interval as blocked
253 private void blockRegisters(TraceInterval interval) {
254 Value location = interval.location();
255 if (mightBeBlocked(location)) {
256 assert areMultipleReadsAllowed() || valueBlocked(location) == 0 : "location already marked as used: " + location;
257 int direction = 1;
258 setValueBlocked(location, direction);
259 Debug.log("block %s", location);
260 }
261 }
262
263 // mark assignedReg and assignedRegHi of the interval as unblocked
264 private void unblockRegisters(TraceInterval interval) {
265 Value location = interval.location();
266 if (mightBeBlocked(location)) {
267 assert valueBlocked(location) > 0 : "location already marked as unused: " + location;
268 setValueBlocked(location, -1);
269 Debug.log("unblock %s", location);
270 }
271 }
272
273 /**
274 * Checks if the {@linkplain TraceInterval#location() location} of {@code to} is not blocked or
275 * is only blocked by {@code from}.
276 */
277 private boolean safeToProcessMove(TraceInterval from, TraceInterval to) {
278 Value fromReg = from != null ? from.location() : null;
279
280 Value location = to.location();
281 if (mightBeBlocked(location)) {
282 if ((valueBlocked(location) > 1 || (valueBlocked(location) == 1 && !isMoveToSelf(fromReg, location)))) {
283 return false;
284 }
285 }
286
287 return true;
288 }
289
312 assert !insertionBuffer.initialized() : "overwriting existing buffer";
313 insertionBuffer.init(list);
314 }
315
316 private void appendInsertionBuffer() {
317 if (insertionBuffer.initialized()) {
318 insertionBuffer.finish();
319 }
320 assert !insertionBuffer.initialized() : "must be uninitialized now";
321
322 insertIdx = -1;
323 }
324
325 private void insertMove(TraceInterval fromInterval, TraceInterval toInterval) {
326 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval;
327 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : "move between different types";
328 assert insertIdx != -1 : "must setup insert position first";
329
330 insertionBuffer.append(insertIdx, createMove(allocator.getOperand(fromInterval), allocator.getOperand(toInterval), fromInterval.location(), toInterval.location()));
331
332 if (Debug.isLogEnabled()) {
333 Debug.log("insert move from %s to %s at %d", fromInterval, toInterval, insertIdx);
334 }
335 }
336
337 /**
338 * @param fromOpr {@link TraceInterval operand} of the {@code from} interval
339 * @param toOpr {@link TraceInterval operand} of the {@code to} interval
340 * @param fromLocation {@link TraceInterval#location() location} of the {@code to} interval
341 * @param toLocation {@link TraceInterval#location() location} of the {@code to} interval
342 */
343 protected LIRInstruction createMove(AllocatableValue fromOpr, AllocatableValue toOpr, AllocatableValue fromLocation, AllocatableValue toLocation) {
344 if (isStackSlotValue(toLocation) && isStackSlotValue(fromLocation)) {
345 return getAllocator().getSpillMoveFactory().createStackMove(toOpr, fromOpr);
346 }
347 return getAllocator().getSpillMoveFactory().createMove(toOpr, fromOpr);
348 }
349
350 private void insertMove(Constant fromOpr, TraceInterval toInterval) {
351 assert insertIdx != -1 : "must setup insert position first";
352
353 AllocatableValue toOpr = allocator.getOperand(toInterval);
354 LIRInstruction move = getAllocator().getSpillMoveFactory().createLoad(toOpr, fromOpr);
355 insertionBuffer.append(insertIdx, move);
356
357 if (Debug.isLogEnabled()) {
358 Debug.log("insert move from value %s to %s at %d", fromOpr, toInterval, insertIdx);
359 }
360 }
361
362 @SuppressWarnings("try")
363 private void resolveMappings() {
364 try (Indent indent = Debug.logAndIndent("resolveMapping")) {
365 assert verifyBeforeResolve();
366 if (Debug.isLogEnabled()) {
367 printMapping();
368 }
369
370 // Block all registers that are used as input operands of a move.
371 // When a register is blocked, no move to this register is emitted.
372 // This is necessary for detecting cycles in moves.
373 int i;
374 for (i = mappingFrom.size() - 1; i >= 0; i--) {
375 TraceInterval fromInterval = mappingFrom.get(i);
376 if (fromInterval != null) {
377 blockRegisters(fromInterval);
378 }
379 }
380
381 ArrayList<AllocatableValue> busySpillSlots = null;
382 while (mappingFrom.size() > 0) {
383 boolean processedInterval = false;
384
385 int spillCandidate = -1;
386 for (i = mappingFrom.size() - 1; i >= 0; i--) {
421
422 // check that all intervals have been processed
423 assert checkEmpty();
424 }
425
426 protected void breakCycle(int spillCandidate) {
427 if (spillCandidate != -1) {
428 // no move could be processed because there is a cycle in the move list
429 // (e.g. r1 . r2, r2 . r1), so one interval must be spilled to memory
430 assert spillCandidate != -1 : "no interval in register for spilling found";
431
432 // create a new spill interval and assign a stack slot to it
433 TraceInterval fromInterval1 = mappingFrom.get(spillCandidate);
434 // do not allocate a new spill slot for temporary interval, but
435 // use spill slot assigned to fromInterval. Otherwise moves from
436 // one stack slot to another can happen (not allowed by LIRAssembler
437 AllocatableValue spillSlot1 = fromInterval1.spillSlot();
438 if (spillSlot1 == null) {
439 spillSlot1 = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval1));
440 fromInterval1.setSpillSlot(spillSlot1);
441 cycleBreakingSlotsAllocated.increment();
442 }
443 spillInterval(spillCandidate, fromInterval1, spillSlot1);
444 return;
445 }
446 assert mappingFromSize() > 1;
447 // Arbitrarily select the first entry for spilling.
448 int stackSpillCandidate = 0;
449 TraceInterval fromInterval = getMappingFrom(stackSpillCandidate);
450 // allocate new stack slot
451 VirtualStackSlot spillSlot = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval));
452 spillInterval(stackSpillCandidate, fromInterval, spillSlot);
453 }
454
455 protected void spillInterval(int spillCandidate, TraceInterval fromInterval, AllocatableValue spillSlot) {
456 assert mappingFrom.get(spillCandidate).equals(fromInterval);
457 TraceInterval spillInterval = getAllocator().createDerivedInterval(fromInterval);
458
459 // add a dummy range because real position is difficult to calculate
460 // Note: this range is a special case when the integrity of the allocation is
461 // checked
462 spillInterval.addRange(1, 2);
463
464 spillInterval.assignLocation(spillSlot);
465
466 if (Debug.isLogEnabled()) {
467 Debug.log("created new Interval for spilling: %s", spillInterval);
468 }
469 blockRegisters(spillInterval);
470
471 // insert a move from register to stack and update the mapping
472 insertMove(fromInterval, spillInterval);
473 mappingFrom.set(spillCandidate, spillInterval);
474 unblockRegisters(fromInterval);
475 }
476
477 @SuppressWarnings("try")
478 private void printMapping() {
479 try (Indent indent = Debug.logAndIndent("Mapping")) {
480 for (int i = mappingFrom.size() - 1; i >= 0; i--) {
481 TraceInterval fromInterval = mappingFrom.get(i);
482 TraceInterval toInterval = mappingTo.get(i);
483 String from;
484 Value to = toInterval.location();
485 if (fromInterval == null) {
486 from = mappingFromOpr.get(i).toString();
487 } else {
488 from = fromInterval.location().toString();
489 }
490 Debug.log("move %s <- %s", from, to);
491 }
492 }
493 }
494
495 void setInsertPosition(List<LIRInstruction> insertList, int insertIdx) {
496 assert this.insertIdx == -1 : "use moveInsertPosition instead of setInsertPosition when data already set";
497
498 createInsertionBuffer(insertList);
499 this.insertIdx = insertIdx;
500 }
501
502 void moveInsertPosition(List<LIRInstruction> newInsertList, int newInsertIdx) {
503 if (insertionBuffer.lirList() != null && (insertionBuffer.lirList() != newInsertList || this.insertIdx != newInsertIdx)) {
504 // insert position changed . resolve current mappings
505 resolveMappings();
506 }
507
508 assert insertionBuffer.lirList() != newInsertList || newInsertIdx >= insertIdx : String.format("Decreasing insert index: old=%d new=%d", insertIdx, newInsertIdx);
509
510 if (insertionBuffer.lirList() != newInsertList) {
511 // block changed . append insertionBuffer because it is
512 // bound to a specific block and create a new insertionBuffer
513 appendInsertionBuffer();
514 createInsertionBuffer(newInsertList);
515 }
516
517 this.insertIdx = newInsertIdx;
518 }
519
520 public void addMapping(TraceInterval fromInterval, TraceInterval toInterval) {
521
522 if (isIllegal(toInterval.location()) && toInterval.canMaterialize()) {
523 if (Debug.isLogEnabled()) {
524 Debug.log("no store to rematerializable interval %s needed", toInterval);
525 }
526 return;
527 }
528 if (isIllegal(fromInterval.location()) && fromInterval.canMaterialize()) {
529 // Instead of a reload, re-materialize the value
530 JavaConstant rematValue = fromInterval.getMaterializedValue();
531 addMapping(rematValue, toInterval);
532 return;
533 }
534 if (Debug.isLogEnabled()) {
535 Debug.log("add move mapping from %s to %s", fromInterval, toInterval);
536 }
537
538 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval;
539 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : String.format(
540 "Kind mismatch: %s vs. %s, from=%s, to=%s", allocator.getKind(fromInterval), allocator.getKind(toInterval), fromInterval, toInterval);
541 mappingFrom.add(fromInterval);
542 mappingFromOpr.add(null);
543 mappingTo.add(toInterval);
544 }
545
546 public void addMapping(Constant fromOpr, TraceInterval toInterval) {
547 if (Debug.isLogEnabled()) {
548 Debug.log("add move mapping from %s to %s", fromOpr, toInterval);
549 }
550
551 mappingFrom.add(null);
552 mappingFromOpr.add(fromOpr);
553 mappingTo.add(toInterval);
554 }
555
556 void resolveAndAppendMoves() {
557 if (hasMappings()) {
558 resolveMappings();
559 }
560 appendInsertionBuffer();
561 }
562 }
|
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23 package org.graalvm.compiler.lir.alloc.trace.lsra;
24
25 import static jdk.vm.ci.code.ValueUtil.asRegister;
26 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
27 import static jdk.vm.ci.code.ValueUtil.isIllegal;
28 import static jdk.vm.ci.code.ValueUtil.isRegister;
29 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
30 import static org.graalvm.compiler.lir.LIRValueUtil.asVirtualStackSlot;
31 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue;
32 import static org.graalvm.compiler.lir.LIRValueUtil.isVirtualStackSlot;
33
34 import java.util.ArrayList;
35 import java.util.Arrays;
36 import java.util.HashSet;
37 import java.util.List;
38
39 import org.graalvm.compiler.core.common.LIRKind;
40 import org.graalvm.compiler.debug.CounterKey;
41 import org.graalvm.compiler.debug.DebugContext;
42 import org.graalvm.compiler.debug.GraalError;
43 import org.graalvm.compiler.debug.Indent;
44 import org.graalvm.compiler.lir.LIRInsertionBuffer;
45 import org.graalvm.compiler.lir.LIRInstruction;
46 import org.graalvm.compiler.lir.VirtualStackSlot;
47 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan;
48 import org.graalvm.compiler.lir.framemap.FrameMap;
49 import org.graalvm.compiler.lir.framemap.FrameMapBuilderTool;
50
51 import jdk.vm.ci.code.StackSlot;
52 import jdk.vm.ci.meta.AllocatableValue;
53 import jdk.vm.ci.meta.Constant;
54 import jdk.vm.ci.meta.JavaConstant;
55 import jdk.vm.ci.meta.Value;
56
57 final class TraceLocalMoveResolver {
58
59 private static final CounterKey cycleBreakingSlotsAllocated = DebugContext.counter("TraceRA[cycleBreakingSlotsAllocated(local)]");
60
61 private static final int STACK_SLOT_IN_CALLER_FRAME_IDX = -1;
62 private final TraceLinearScan allocator;
63
64 private int insertIdx;
65 private LIRInsertionBuffer insertionBuffer; // buffer where moves are inserted
66
67 private final ArrayList<TraceInterval> mappingFrom;
68 private final ArrayList<Constant> mappingFromOpr;
69 private final ArrayList<TraceInterval> mappingTo;
70 private final int[] registerBlocked;
71
72 private int[] stackBlocked;
73 private final int firstVirtualStackIndex;
74
75 private final DebugContext debug;
76
77 private int getStackArrayIndex(Value stackSlotValue) {
78 if (isStackSlot(stackSlotValue)) {
79 return getStackArrayIndex(asStackSlot(stackSlotValue));
80 }
81 if (isVirtualStackSlot(stackSlotValue)) {
82 return getStackArrayIndex(asVirtualStackSlot(stackSlotValue));
83 }
84 throw GraalError.shouldNotReachHere("value is not a stack slot: " + stackSlotValue);
85 }
86
87 private int getStackArrayIndex(StackSlot stackSlot) {
88 int stackIdx;
89 if (stackSlot.isInCallerFrame()) {
90 // incoming stack arguments can be ignored
91 stackIdx = STACK_SLOT_IN_CALLER_FRAME_IDX;
92 } else {
93 assert stackSlot.getRawAddFrameSize() : "Unexpected stack slot: " + stackSlot;
94 int offset = -stackSlot.getRawOffset();
95 assert 0 <= offset && offset < firstVirtualStackIndex : String.format("Wrong stack slot offset: %d (first virtual stack slot index: %d", offset, firstVirtualStackIndex);
96 stackIdx = offset;
151 }
152
153 /*
154 * TODO (je) remove?
155 */
156 protected static boolean areMultipleReadsAllowed() {
157 return true;
158 }
159
160 boolean hasMappings() {
161 return mappingFrom.size() > 0;
162 }
163
164 protected TraceLinearScan getAllocator() {
165 return allocator;
166 }
167
168 protected TraceLocalMoveResolver(TraceLinearScan allocator) {
169
170 this.allocator = allocator;
171 this.debug = allocator.getDebug();
172 this.mappingFrom = new ArrayList<>(8);
173 this.mappingFromOpr = new ArrayList<>(8);
174 this.mappingTo = new ArrayList<>(8);
175 this.insertIdx = -1;
176 this.insertionBuffer = new LIRInsertionBuffer();
177 this.registerBlocked = new int[allocator.getRegisters().size()];
178 FrameMapBuilderTool frameMapBuilderTool = (FrameMapBuilderTool) allocator.getFrameMapBuilder();
179 FrameMap frameMap = frameMapBuilderTool.getFrameMap();
180 this.stackBlocked = new int[frameMapBuilderTool.getNumberOfStackSlots()];
181 this.firstVirtualStackIndex = !frameMap.frameNeedsAllocating() ? 0 : frameMap.currentFrameSize() + 1;
182 }
183
184 protected boolean checkEmpty() {
185 assert mappingFrom.size() == 0 && mappingFromOpr.size() == 0 && mappingTo.size() == 0 : "list must be empty before and after processing";
186 for (int i = 0; i < stackBlocked.length; i++) {
187 assert stackBlocked[i] == 0 : "stack map must be empty before and after processing";
188 }
189 for (int i = 0; i < getAllocator().getRegisters().size(); i++) {
190 assert registerBlocked[i] == 0 : "register map must be empty before and after processing";
191 }
240 boolean unique = usedRegs.add(interval.location());
241 assert unique : "cannot write to same register twice";
242 }
243
244 verifyStackSlotMapping();
245
246 return true;
247 }
248
249 protected void verifyStackSlotMapping() {
250 // relax disjoint stack maps invariant
251 }
252
253 // mark assignedReg and assignedRegHi of the interval as blocked
254 private void blockRegisters(TraceInterval interval) {
255 Value location = interval.location();
256 if (mightBeBlocked(location)) {
257 assert areMultipleReadsAllowed() || valueBlocked(location) == 0 : "location already marked as used: " + location;
258 int direction = 1;
259 setValueBlocked(location, direction);
260 debug.log("block %s", location);
261 }
262 }
263
264 // mark assignedReg and assignedRegHi of the interval as unblocked
265 private void unblockRegisters(TraceInterval interval) {
266 Value location = interval.location();
267 if (mightBeBlocked(location)) {
268 assert valueBlocked(location) > 0 : "location already marked as unused: " + location;
269 setValueBlocked(location, -1);
270 debug.log("unblock %s", location);
271 }
272 }
273
274 /**
275 * Checks if the {@linkplain TraceInterval#location() location} of {@code to} is not blocked or
276 * is only blocked by {@code from}.
277 */
278 private boolean safeToProcessMove(TraceInterval from, TraceInterval to) {
279 Value fromReg = from != null ? from.location() : null;
280
281 Value location = to.location();
282 if (mightBeBlocked(location)) {
283 if ((valueBlocked(location) > 1 || (valueBlocked(location) == 1 && !isMoveToSelf(fromReg, location)))) {
284 return false;
285 }
286 }
287
288 return true;
289 }
290
313 assert !insertionBuffer.initialized() : "overwriting existing buffer";
314 insertionBuffer.init(list);
315 }
316
317 private void appendInsertionBuffer() {
318 if (insertionBuffer.initialized()) {
319 insertionBuffer.finish();
320 }
321 assert !insertionBuffer.initialized() : "must be uninitialized now";
322
323 insertIdx = -1;
324 }
325
326 private void insertMove(TraceInterval fromInterval, TraceInterval toInterval) {
327 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval;
328 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : "move between different types";
329 assert insertIdx != -1 : "must setup insert position first";
330
331 insertionBuffer.append(insertIdx, createMove(allocator.getOperand(fromInterval), allocator.getOperand(toInterval), fromInterval.location(), toInterval.location()));
332
333 if (debug.isLogEnabled()) {
334 debug.log("insert move from %s to %s at %d", fromInterval, toInterval, insertIdx);
335 }
336 }
337
338 /**
339 * @param fromOpr {@link TraceInterval operand} of the {@code from} interval
340 * @param toOpr {@link TraceInterval operand} of the {@code to} interval
341 * @param fromLocation {@link TraceInterval#location() location} of the {@code to} interval
342 * @param toLocation {@link TraceInterval#location() location} of the {@code to} interval
343 */
344 protected LIRInstruction createMove(AllocatableValue fromOpr, AllocatableValue toOpr, AllocatableValue fromLocation, AllocatableValue toLocation) {
345 if (isStackSlotValue(toLocation) && isStackSlotValue(fromLocation)) {
346 return getAllocator().getSpillMoveFactory().createStackMove(toOpr, fromOpr);
347 }
348 return getAllocator().getSpillMoveFactory().createMove(toOpr, fromOpr);
349 }
350
351 private void insertMove(Constant fromOpr, TraceInterval toInterval) {
352 assert insertIdx != -1 : "must setup insert position first";
353
354 AllocatableValue toOpr = allocator.getOperand(toInterval);
355 LIRInstruction move = getAllocator().getSpillMoveFactory().createLoad(toOpr, fromOpr);
356 insertionBuffer.append(insertIdx, move);
357
358 if (debug.isLogEnabled()) {
359 debug.log("insert move from value %s to %s at %d", fromOpr, toInterval, insertIdx);
360 }
361 }
362
363 @SuppressWarnings("try")
364 private void resolveMappings() {
365 try (Indent indent = debug.logAndIndent("resolveMapping")) {
366 assert verifyBeforeResolve();
367 if (debug.isLogEnabled()) {
368 printMapping();
369 }
370
371 // Block all registers that are used as input operands of a move.
372 // When a register is blocked, no move to this register is emitted.
373 // This is necessary for detecting cycles in moves.
374 int i;
375 for (i = mappingFrom.size() - 1; i >= 0; i--) {
376 TraceInterval fromInterval = mappingFrom.get(i);
377 if (fromInterval != null) {
378 blockRegisters(fromInterval);
379 }
380 }
381
382 ArrayList<AllocatableValue> busySpillSlots = null;
383 while (mappingFrom.size() > 0) {
384 boolean processedInterval = false;
385
386 int spillCandidate = -1;
387 for (i = mappingFrom.size() - 1; i >= 0; i--) {
422
423 // check that all intervals have been processed
424 assert checkEmpty();
425 }
426
427 protected void breakCycle(int spillCandidate) {
428 if (spillCandidate != -1) {
429 // no move could be processed because there is a cycle in the move list
430 // (e.g. r1 . r2, r2 . r1), so one interval must be spilled to memory
431 assert spillCandidate != -1 : "no interval in register for spilling found";
432
433 // create a new spill interval and assign a stack slot to it
434 TraceInterval fromInterval1 = mappingFrom.get(spillCandidate);
435 // do not allocate a new spill slot for temporary interval, but
436 // use spill slot assigned to fromInterval. Otherwise moves from
437 // one stack slot to another can happen (not allowed by LIRAssembler
438 AllocatableValue spillSlot1 = fromInterval1.spillSlot();
439 if (spillSlot1 == null) {
440 spillSlot1 = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval1));
441 fromInterval1.setSpillSlot(spillSlot1);
442 cycleBreakingSlotsAllocated.increment(debug);
443 }
444 spillInterval(spillCandidate, fromInterval1, spillSlot1);
445 return;
446 }
447 assert mappingFromSize() > 1;
448 // Arbitrarily select the first entry for spilling.
449 int stackSpillCandidate = 0;
450 TraceInterval fromInterval = getMappingFrom(stackSpillCandidate);
451 // allocate new stack slot
452 VirtualStackSlot spillSlot = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval));
453 spillInterval(stackSpillCandidate, fromInterval, spillSlot);
454 }
455
456 protected void spillInterval(int spillCandidate, TraceInterval fromInterval, AllocatableValue spillSlot) {
457 assert mappingFrom.get(spillCandidate).equals(fromInterval);
458 TraceInterval spillInterval = getAllocator().createDerivedInterval(fromInterval);
459
460 // add a dummy range because real position is difficult to calculate
461 // Note: this range is a special case when the integrity of the allocation is
462 // checked
463 spillInterval.addRange(1, 2);
464
465 spillInterval.assignLocation(spillSlot);
466
467 if (debug.isLogEnabled()) {
468 debug.log("created new Interval for spilling: %s", spillInterval);
469 }
470 blockRegisters(spillInterval);
471
472 // insert a move from register to stack and update the mapping
473 insertMove(fromInterval, spillInterval);
474 mappingFrom.set(spillCandidate, spillInterval);
475 unblockRegisters(fromInterval);
476 }
477
478 @SuppressWarnings("try")
479 private void printMapping() {
480 try (Indent indent = debug.logAndIndent("Mapping")) {
481 for (int i = mappingFrom.size() - 1; i >= 0; i--) {
482 TraceInterval fromInterval = mappingFrom.get(i);
483 TraceInterval toInterval = mappingTo.get(i);
484 String from;
485 Value to = toInterval.location();
486 if (fromInterval == null) {
487 from = mappingFromOpr.get(i).toString();
488 } else {
489 from = fromInterval.location().toString();
490 }
491 debug.log("move %s <- %s", from, to);
492 }
493 }
494 }
495
496 void setInsertPosition(List<LIRInstruction> insertList, int insertIdx) {
497 assert this.insertIdx == -1 : "use moveInsertPosition instead of setInsertPosition when data already set";
498
499 createInsertionBuffer(insertList);
500 this.insertIdx = insertIdx;
501 }
502
503 void moveInsertPosition(List<LIRInstruction> newInsertList, int newInsertIdx) {
504 if (insertionBuffer.lirList() != null && (insertionBuffer.lirList() != newInsertList || this.insertIdx != newInsertIdx)) {
505 // insert position changed . resolve current mappings
506 resolveMappings();
507 }
508
509 assert insertionBuffer.lirList() != newInsertList || newInsertIdx >= insertIdx : String.format("Decreasing insert index: old=%d new=%d", insertIdx, newInsertIdx);
510
511 if (insertionBuffer.lirList() != newInsertList) {
512 // block changed . append insertionBuffer because it is
513 // bound to a specific block and create a new insertionBuffer
514 appendInsertionBuffer();
515 createInsertionBuffer(newInsertList);
516 }
517
518 this.insertIdx = newInsertIdx;
519 }
520
521 public void addMapping(TraceInterval fromInterval, TraceInterval toInterval) {
522
523 if (isIllegal(toInterval.location()) && toInterval.canMaterialize()) {
524 if (debug.isLogEnabled()) {
525 debug.log("no store to rematerializable interval %s needed", toInterval);
526 }
527 return;
528 }
529 if (isIllegal(fromInterval.location()) && fromInterval.canMaterialize()) {
530 // Instead of a reload, re-materialize the value
531 JavaConstant rematValue = fromInterval.getMaterializedValue();
532 addMapping(rematValue, toInterval);
533 return;
534 }
535 if (debug.isLogEnabled()) {
536 debug.log("add move mapping from %s to %s", fromInterval, toInterval);
537 }
538
539 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval;
540 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : String.format(
541 "Kind mismatch: %s vs. %s, from=%s, to=%s", allocator.getKind(fromInterval), allocator.getKind(toInterval), fromInterval, toInterval);
542 mappingFrom.add(fromInterval);
543 mappingFromOpr.add(null);
544 mappingTo.add(toInterval);
545 }
546
547 public void addMapping(Constant fromOpr, TraceInterval toInterval) {
548 if (debug.isLogEnabled()) {
549 debug.log("add move mapping from %s to %s", fromOpr, toInterval);
550 }
551
552 mappingFrom.add(null);
553 mappingFromOpr.add(fromOpr);
554 mappingTo.add(toInterval);
555 }
556
557 void resolveAndAppendMoves() {
558 if (hasMappings()) {
559 resolveMappings();
560 }
561 appendInsertionBuffer();
562 }
563 }
|