1 /* 2 * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.lir.alloc.trace.lsra; 24 25 import static jdk.vm.ci.code.ValueUtil.asRegister; 26 import static jdk.vm.ci.code.ValueUtil.asStackSlot; 27 import static jdk.vm.ci.code.ValueUtil.isIllegal; 28 import static jdk.vm.ci.code.ValueUtil.isRegister; 29 import static jdk.vm.ci.code.ValueUtil.isStackSlot; 30 import static org.graalvm.compiler.lir.LIRValueUtil.asVirtualStackSlot; 31 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; 32 import static org.graalvm.compiler.lir.LIRValueUtil.isVirtualStackSlot; 33 34 import java.util.ArrayList; 35 import java.util.Arrays; 36 import java.util.HashSet; 37 import java.util.List; 38 39 import org.graalvm.compiler.core.common.LIRKind; 40 import org.graalvm.compiler.debug.Debug; 41 import org.graalvm.compiler.debug.DebugCounter; 42 import org.graalvm.compiler.debug.GraalError; 43 import org.graalvm.compiler.debug.Indent; 44 import org.graalvm.compiler.lir.LIRInsertionBuffer; 45 import org.graalvm.compiler.lir.LIRInstruction; 46 import org.graalvm.compiler.lir.VirtualStackSlot; 47 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan; 48 import org.graalvm.compiler.lir.framemap.FrameMap; 49 import org.graalvm.compiler.lir.framemap.FrameMapBuilderTool; 50 51 import jdk.vm.ci.code.StackSlot; 52 import jdk.vm.ci.meta.AllocatableValue; 53 import jdk.vm.ci.meta.Constant; 54 import jdk.vm.ci.meta.JavaConstant; 55 import jdk.vm.ci.meta.Value; 56 57 /** 58 */ 59 final class TraceLocalMoveResolver { 60 61 private static final DebugCounter cycleBreakingSlotsAllocated = Debug.counter("TraceRA[cycleBreakingSlotsAllocated(local)]"); 62 63 private static final int STACK_SLOT_IN_CALLER_FRAME_IDX = -1; 64 private final TraceLinearScan allocator; 65 66 private int insertIdx; 67 private LIRInsertionBuffer insertionBuffer; // buffer where moves are inserted 68 69 private final ArrayList<TraceInterval> mappingFrom; 70 private final ArrayList<Constant> mappingFromOpr; 71 private final ArrayList<TraceInterval> mappingTo; 72 private final int[] registerBlocked; 73 74 private int[] stackBlocked; 75 private final int firstVirtualStackIndex; 76 77 private int getStackArrayIndex(Value stackSlotValue) { 78 if (isStackSlot(stackSlotValue)) { 79 return getStackArrayIndex(asStackSlot(stackSlotValue)); 80 } 81 if (isVirtualStackSlot(stackSlotValue)) { 82 return getStackArrayIndex(asVirtualStackSlot(stackSlotValue)); 83 } 84 throw GraalError.shouldNotReachHere("value is not a stack slot: " + stackSlotValue); 85 } 86 87 private int getStackArrayIndex(StackSlot stackSlot) { 88 int stackIdx; 89 if (stackSlot.isInCallerFrame()) { 90 // incoming stack arguments can be ignored 91 stackIdx = STACK_SLOT_IN_CALLER_FRAME_IDX; 92 } else { 93 assert stackSlot.getRawAddFrameSize() : "Unexpected stack slot: " + stackSlot; 94 int offset = -stackSlot.getRawOffset(); 95 assert 0 <= offset && offset < firstVirtualStackIndex : String.format("Wrong stack slot offset: %d (first virtual stack slot index: %d", offset, firstVirtualStackIndex); 96 stackIdx = offset; 97 } 98 return stackIdx; 99 } 100 101 private int getStackArrayIndex(VirtualStackSlot virtualStackSlot) { 102 return firstVirtualStackIndex + virtualStackSlot.getId(); 103 } 104 105 protected void setValueBlocked(Value location, int direction) { 106 assert direction == 1 || direction == -1 : "out of bounds"; 107 if (isStackSlotValue(location)) { 108 int stackIdx = getStackArrayIndex(location); 109 if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) { 110 // incoming stack arguments can be ignored 111 return; 112 } 113 if (stackIdx >= stackBlocked.length) { 114 stackBlocked = Arrays.copyOf(stackBlocked, stackIdx + 1); 115 } 116 stackBlocked[stackIdx] += direction; 117 } else { 118 assert direction == 1 || direction == -1 : "out of bounds"; 119 if (isRegister(location)) { 120 registerBlocked[asRegister(location).number] += direction; 121 } else { 122 throw GraalError.shouldNotReachHere("unhandled value " + location); 123 } 124 } 125 } 126 127 protected TraceInterval getMappingFrom(int i) { 128 return mappingFrom.get(i); 129 } 130 131 protected int mappingFromSize() { 132 return mappingFrom.size(); 133 } 134 135 protected int valueBlocked(Value location) { 136 if (isStackSlotValue(location)) { 137 int stackIdx = getStackArrayIndex(location); 138 if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) { 139 // incoming stack arguments are always blocked (aka they can not be written) 140 return 1; 141 } 142 if (stackIdx >= stackBlocked.length) { 143 return 0; 144 } 145 return stackBlocked[stackIdx]; 146 } 147 if (isRegister(location)) { 148 return registerBlocked[asRegister(location).number]; 149 } 150 throw GraalError.shouldNotReachHere("unhandled value " + location); 151 } 152 153 /* 154 * TODO (je) remove? 155 */ 156 protected static boolean areMultipleReadsAllowed() { 157 return true; 158 } 159 160 boolean hasMappings() { 161 return mappingFrom.size() > 0; 162 } 163 164 protected TraceLinearScan getAllocator() { 165 return allocator; 166 } 167 168 protected TraceLocalMoveResolver(TraceLinearScan allocator) { 169 170 this.allocator = allocator; 171 this.mappingFrom = new ArrayList<>(8); 172 this.mappingFromOpr = new ArrayList<>(8); 173 this.mappingTo = new ArrayList<>(8); 174 this.insertIdx = -1; 175 this.insertionBuffer = new LIRInsertionBuffer(); 176 this.registerBlocked = new int[allocator.getRegisters().size()]; 177 FrameMapBuilderTool frameMapBuilderTool = (FrameMapBuilderTool) allocator.getFrameMapBuilder(); 178 FrameMap frameMap = frameMapBuilderTool.getFrameMap(); 179 this.stackBlocked = new int[frameMapBuilderTool.getNumberOfStackSlots()]; 180 this.firstVirtualStackIndex = !frameMap.frameNeedsAllocating() ? 0 : frameMap.currentFrameSize() + 1; 181 } 182 183 protected boolean checkEmpty() { 184 assert mappingFrom.size() == 0 && mappingFromOpr.size() == 0 && mappingTo.size() == 0 : "list must be empty before and after processing"; 185 for (int i = 0; i < stackBlocked.length; i++) { 186 assert stackBlocked[i] == 0 : "stack map must be empty before and after processing"; 187 } 188 for (int i = 0; i < getAllocator().getRegisters().size(); i++) { 189 assert registerBlocked[i] == 0 : "register map must be empty before and after processing"; 190 } 191 checkMultipleReads(); 192 return true; 193 } 194 195 protected void checkMultipleReads() { 196 // multiple reads are allowed in SSA LSRA 197 } 198 199 private boolean verifyBeforeResolve() { 200 assert mappingFrom.size() == mappingFromOpr.size() : "length must be equal"; 201 assert mappingFrom.size() == mappingTo.size() : "length must be equal"; 202 assert insertIdx != -1 : "insert position not set"; 203 204 int i; 205 int j; 206 if (!areMultipleReadsAllowed()) { 207 for (i = 0; i < mappingFrom.size(); i++) { 208 for (j = i + 1; j < mappingFrom.size(); j++) { 209 assert mappingFrom.get(i) == null || mappingFrom.get(i) != mappingFrom.get(j) : "cannot read from same interval twice"; 210 } 211 } 212 } 213 214 for (i = 0; i < mappingTo.size(); i++) { 215 for (j = i + 1; j < mappingTo.size(); j++) { 216 assert mappingTo.get(i) != mappingTo.get(j) : "cannot write to same interval twice"; 217 } 218 } 219 220 HashSet<Value> usedRegs = new HashSet<>(); 221 if (!areMultipleReadsAllowed()) { 222 for (i = 0; i < mappingFrom.size(); i++) { 223 TraceInterval interval = mappingFrom.get(i); 224 if (interval != null && !isIllegal(interval.location())) { 225 boolean unique = usedRegs.add(interval.location()); 226 assert unique : "cannot read from same register twice"; 227 } 228 } 229 } 230 231 usedRegs.clear(); 232 for (i = 0; i < mappingTo.size(); i++) { 233 TraceInterval interval = mappingTo.get(i); 234 if (isIllegal(interval.location())) { 235 // After insertion the location may become illegal, so don't check it since multiple 236 // intervals might be illegal. 237 continue; 238 } 239 boolean unique = usedRegs.add(interval.location()); 240 assert unique : "cannot write to same register twice"; 241 } 242 243 verifyStackSlotMapping(); 244 245 return true; 246 } 247 248 protected void verifyStackSlotMapping() { 249 // relax disjoint stack maps invariant 250 } 251 252 // mark assignedReg and assignedRegHi of the interval as blocked 253 private void blockRegisters(TraceInterval interval) { 254 Value location = interval.location(); 255 if (mightBeBlocked(location)) { 256 assert areMultipleReadsAllowed() || valueBlocked(location) == 0 : "location already marked as used: " + location; 257 int direction = 1; 258 setValueBlocked(location, direction); 259 Debug.log("block %s", location); 260 } 261 } 262 263 // mark assignedReg and assignedRegHi of the interval as unblocked 264 private void unblockRegisters(TraceInterval interval) { 265 Value location = interval.location(); 266 if (mightBeBlocked(location)) { 267 assert valueBlocked(location) > 0 : "location already marked as unused: " + location; 268 setValueBlocked(location, -1); 269 Debug.log("unblock %s", location); 270 } 271 } 272 273 /** 274 * Checks if the {@linkplain TraceInterval#location() location} of {@code to} is not blocked or 275 * is only blocked by {@code from}. 276 */ 277 private boolean safeToProcessMove(TraceInterval from, TraceInterval to) { 278 Value fromReg = from != null ? from.location() : null; 279 280 Value location = to.location(); 281 if (mightBeBlocked(location)) { 282 if ((valueBlocked(location) > 1 || (valueBlocked(location) == 1 && !isMoveToSelf(fromReg, location)))) { 283 return false; 284 } 285 } 286 287 return true; 288 } 289 290 protected static boolean isMoveToSelf(Value from, Value to) { 291 assert to != null; 292 if (to.equals(from)) { 293 return true; 294 } 295 if (from != null && isRegister(from) && isRegister(to) && asRegister(from).equals(asRegister(to))) { 296 return true; 297 } 298 return false; 299 } 300 301 protected static boolean mightBeBlocked(Value location) { 302 if (isRegister(location)) { 303 return true; 304 } 305 if (isStackSlotValue(location)) { 306 return true; 307 } 308 return false; 309 } 310 311 private void createInsertionBuffer(List<LIRInstruction> list) { 312 assert !insertionBuffer.initialized() : "overwriting existing buffer"; 313 insertionBuffer.init(list); 314 } 315 316 private void appendInsertionBuffer() { 317 if (insertionBuffer.initialized()) { 318 insertionBuffer.finish(); 319 } 320 assert !insertionBuffer.initialized() : "must be uninitialized now"; 321 322 insertIdx = -1; 323 } 324 325 private void insertMove(TraceInterval fromInterval, TraceInterval toInterval) { 326 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval; 327 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : "move between different types"; 328 assert insertIdx != -1 : "must setup insert position first"; 329 330 insertionBuffer.append(insertIdx, createMove(allocator.getOperand(fromInterval), allocator.getOperand(toInterval), fromInterval.location(), toInterval.location())); 331 332 if (Debug.isLogEnabled()) { 333 Debug.log("insert move from %s to %s at %d", fromInterval, toInterval, insertIdx); 334 } 335 } 336 337 /** 338 * @param fromOpr {@link TraceInterval operand} of the {@code from} interval 339 * @param toOpr {@link TraceInterval operand} of the {@code to} interval 340 * @param fromLocation {@link TraceInterval#location() location} of the {@code to} interval 341 * @param toLocation {@link TraceInterval#location() location} of the {@code to} interval 342 */ 343 protected LIRInstruction createMove(AllocatableValue fromOpr, AllocatableValue toOpr, AllocatableValue fromLocation, AllocatableValue toLocation) { 344 if (isStackSlotValue(toLocation) && isStackSlotValue(fromLocation)) { 345 return getAllocator().getSpillMoveFactory().createStackMove(toOpr, fromOpr); 346 } 347 return getAllocator().getSpillMoveFactory().createMove(toOpr, fromOpr); 348 } 349 350 private void insertMove(Constant fromOpr, TraceInterval toInterval) { 351 assert insertIdx != -1 : "must setup insert position first"; 352 353 AllocatableValue toOpr = allocator.getOperand(toInterval); 354 LIRInstruction move = getAllocator().getSpillMoveFactory().createLoad(toOpr, fromOpr); 355 insertionBuffer.append(insertIdx, move); 356 357 if (Debug.isLogEnabled()) { 358 Debug.log("insert move from value %s to %s at %d", fromOpr, toInterval, insertIdx); 359 } 360 } 361 362 @SuppressWarnings("try") 363 private void resolveMappings() { 364 try (Indent indent = Debug.logAndIndent("resolveMapping")) { 365 assert verifyBeforeResolve(); 366 if (Debug.isLogEnabled()) { 367 printMapping(); 368 } 369 370 // Block all registers that are used as input operands of a move. 371 // When a register is blocked, no move to this register is emitted. 372 // This is necessary for detecting cycles in moves. 373 int i; 374 for (i = mappingFrom.size() - 1; i >= 0; i--) { 375 TraceInterval fromInterval = mappingFrom.get(i); 376 if (fromInterval != null) { 377 blockRegisters(fromInterval); 378 } 379 } 380 381 ArrayList<AllocatableValue> busySpillSlots = null; 382 while (mappingFrom.size() > 0) { 383 boolean processedInterval = false; 384 385 int spillCandidate = -1; 386 for (i = mappingFrom.size() - 1; i >= 0; i--) { 387 TraceInterval fromInterval = mappingFrom.get(i); 388 TraceInterval toInterval = mappingTo.get(i); 389 390 if (safeToProcessMove(fromInterval, toInterval)) { 391 // this interval can be processed because target is free 392 if (fromInterval != null) { 393 insertMove(fromInterval, toInterval); 394 unblockRegisters(fromInterval); 395 } else { 396 insertMove(mappingFromOpr.get(i), toInterval); 397 } 398 if (isStackSlotValue(toInterval.location())) { 399 if (busySpillSlots == null) { 400 busySpillSlots = new ArrayList<>(2); 401 } 402 busySpillSlots.add(toInterval.location()); 403 } 404 mappingFrom.remove(i); 405 mappingFromOpr.remove(i); 406 mappingTo.remove(i); 407 408 processedInterval = true; 409 } else if (fromInterval != null && isRegister(fromInterval.location()) && (busySpillSlots == null || !busySpillSlots.contains(fromInterval.spillSlot()))) { 410 // this interval cannot be processed now because target is not free 411 // it starts in a register, so it is a possible candidate for spilling 412 spillCandidate = i; 413 } 414 } 415 416 if (!processedInterval) { 417 breakCycle(spillCandidate); 418 } 419 } 420 } 421 422 // check that all intervals have been processed 423 assert checkEmpty(); 424 } 425 426 protected void breakCycle(int spillCandidate) { 427 if (spillCandidate != -1) { 428 // no move could be processed because there is a cycle in the move list 429 // (e.g. r1 . r2, r2 . r1), so one interval must be spilled to memory 430 assert spillCandidate != -1 : "no interval in register for spilling found"; 431 432 // create a new spill interval and assign a stack slot to it 433 TraceInterval fromInterval1 = mappingFrom.get(spillCandidate); 434 // do not allocate a new spill slot for temporary interval, but 435 // use spill slot assigned to fromInterval. Otherwise moves from 436 // one stack slot to another can happen (not allowed by LIRAssembler 437 AllocatableValue spillSlot1 = fromInterval1.spillSlot(); 438 if (spillSlot1 == null) { 439 spillSlot1 = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval1)); 440 fromInterval1.setSpillSlot(spillSlot1); 441 cycleBreakingSlotsAllocated.increment(); 442 } 443 spillInterval(spillCandidate, fromInterval1, spillSlot1); 444 return; 445 } 446 assert mappingFromSize() > 1; 447 // Arbitrarily select the first entry for spilling. 448 int stackSpillCandidate = 0; 449 TraceInterval fromInterval = getMappingFrom(stackSpillCandidate); 450 // allocate new stack slot 451 VirtualStackSlot spillSlot = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval)); 452 spillInterval(stackSpillCandidate, fromInterval, spillSlot); 453 } 454 455 protected void spillInterval(int spillCandidate, TraceInterval fromInterval, AllocatableValue spillSlot) { 456 assert mappingFrom.get(spillCandidate).equals(fromInterval); 457 TraceInterval spillInterval = getAllocator().createDerivedInterval(fromInterval); 458 459 // add a dummy range because real position is difficult to calculate 460 // Note: this range is a special case when the integrity of the allocation is 461 // checked 462 spillInterval.addRange(1, 2); 463 464 spillInterval.assignLocation(spillSlot); 465 466 if (Debug.isLogEnabled()) { 467 Debug.log("created new Interval for spilling: %s", spillInterval); 468 } 469 blockRegisters(spillInterval); 470 471 // insert a move from register to stack and update the mapping 472 insertMove(fromInterval, spillInterval); 473 mappingFrom.set(spillCandidate, spillInterval); 474 unblockRegisters(fromInterval); 475 } 476 477 @SuppressWarnings("try") 478 private void printMapping() { 479 try (Indent indent = Debug.logAndIndent("Mapping")) { 480 for (int i = mappingFrom.size() - 1; i >= 0; i--) { 481 TraceInterval fromInterval = mappingFrom.get(i); 482 TraceInterval toInterval = mappingTo.get(i); 483 String from; 484 Value to = toInterval.location(); 485 if (fromInterval == null) { 486 from = mappingFromOpr.get(i).toString(); 487 } else { 488 from = fromInterval.location().toString(); 489 } 490 Debug.log("move %s <- %s", from, to); 491 } 492 } 493 } 494 495 void setInsertPosition(List<LIRInstruction> insertList, int insertIdx) { 496 assert this.insertIdx == -1 : "use moveInsertPosition instead of setInsertPosition when data already set"; 497 498 createInsertionBuffer(insertList); 499 this.insertIdx = insertIdx; 500 } 501 502 void moveInsertPosition(List<LIRInstruction> newInsertList, int newInsertIdx) { 503 if (insertionBuffer.lirList() != null && (insertionBuffer.lirList() != newInsertList || this.insertIdx != newInsertIdx)) { 504 // insert position changed . resolve current mappings 505 resolveMappings(); 506 } 507 508 assert insertionBuffer.lirList() != newInsertList || newInsertIdx >= insertIdx : String.format("Decreasing insert index: old=%d new=%d", insertIdx, newInsertIdx); 509 510 if (insertionBuffer.lirList() != newInsertList) { 511 // block changed . append insertionBuffer because it is 512 // bound to a specific block and create a new insertionBuffer 513 appendInsertionBuffer(); 514 createInsertionBuffer(newInsertList); 515 } 516 517 this.insertIdx = newInsertIdx; 518 } 519 520 public void addMapping(TraceInterval fromInterval, TraceInterval toInterval) { 521 522 if (isIllegal(toInterval.location()) && toInterval.canMaterialize()) { 523 if (Debug.isLogEnabled()) { 524 Debug.log("no store to rematerializable interval %s needed", toInterval); 525 } 526 return; 527 } 528 if (isIllegal(fromInterval.location()) && fromInterval.canMaterialize()) { 529 // Instead of a reload, re-materialize the value 530 JavaConstant rematValue = fromInterval.getMaterializedValue(); 531 addMapping(rematValue, toInterval); 532 return; 533 } 534 if (Debug.isLogEnabled()) { 535 Debug.log("add move mapping from %s to %s", fromInterval, toInterval); 536 } 537 538 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval; 539 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : String.format( 540 "Kind mismatch: %s vs. %s, from=%s, to=%s", allocator.getKind(fromInterval), allocator.getKind(toInterval), fromInterval, toInterval); 541 mappingFrom.add(fromInterval); 542 mappingFromOpr.add(null); 543 mappingTo.add(toInterval); 544 } 545 546 public void addMapping(Constant fromOpr, TraceInterval toInterval) { 547 if (Debug.isLogEnabled()) { 548 Debug.log("add move mapping from %s to %s", fromOpr, toInterval); 549 } 550 551 mappingFrom.add(null); 552 mappingFromOpr.add(fromOpr); 553 mappingTo.add(toInterval); 554 } 555 556 void resolveAndAppendMoves() { 557 if (hasMappings()) { 558 resolveMappings(); 559 } 560 appendInsertionBuffer(); 561 } 562 }