1 /* 2 * Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.hotspot.amd64; 24 25 import static jdk.vm.ci.amd64.AMD64.rbp; 26 import static org.graalvm.compiler.core.common.GraalOptions.GeneratePIC; 27 import static org.graalvm.compiler.hotspot.HotSpotBackend.INITIALIZE_KLASS_BY_SYMBOL; 28 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_KLASS_BY_SYMBOL; 29 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_METHOD_BY_SYMBOL_AND_LOAD_COUNTERS; 30 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_STRING_BY_SYMBOL; 31 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_DYNAMIC_INVOKE; 32 import static org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction.RESOLVE; 33 import static org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction.INITIALIZE; 34 import static org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction.LOAD_COUNTERS; 35 36 import java.util.ArrayList; 37 import java.util.List; 38 39 import org.graalvm.compiler.asm.amd64.AMD64Address.Scale; 40 import org.graalvm.compiler.core.amd64.AMD64ArithmeticLIRGenerator; 41 import org.graalvm.compiler.core.amd64.AMD64LIRGenerator; 42 import org.graalvm.compiler.core.amd64.AMD64MoveFactoryBase.BackupSlotProvider; 43 import org.graalvm.compiler.core.common.CompressEncoding; 44 import org.graalvm.compiler.core.common.LIRKind; 45 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor; 46 import org.graalvm.compiler.core.common.spi.ForeignCallLinkage; 47 import org.graalvm.compiler.core.common.spi.LIRKindTool; 48 import org.graalvm.compiler.debug.DebugContext; 49 import org.graalvm.compiler.debug.GraalError; 50 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig; 51 import org.graalvm.compiler.hotspot.HotSpotBackend; 52 import org.graalvm.compiler.hotspot.HotSpotDebugInfoBuilder; 53 import org.graalvm.compiler.hotspot.HotSpotForeignCallLinkage; 54 import org.graalvm.compiler.hotspot.HotSpotLIRGenerationResult; 55 import org.graalvm.compiler.hotspot.HotSpotLIRGenerator; 56 import org.graalvm.compiler.hotspot.HotSpotLockStack; 57 import org.graalvm.compiler.hotspot.debug.BenchmarkCounters; 58 import org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction; 59 import org.graalvm.compiler.hotspot.meta.HotSpotProviders; 60 import org.graalvm.compiler.hotspot.stubs.Stub; 61 import org.graalvm.compiler.lir.LIR; 62 import org.graalvm.compiler.lir.LIRFrameState; 63 import org.graalvm.compiler.lir.LIRInstruction; 64 import org.graalvm.compiler.lir.LIRInstructionClass; 65 import org.graalvm.compiler.lir.LabelRef; 66 import org.graalvm.compiler.lir.StandardOp.NoOp; 67 import org.graalvm.compiler.lir.SwitchStrategy; 68 import org.graalvm.compiler.lir.Variable; 69 import org.graalvm.compiler.lir.VirtualStackSlot; 70 import org.graalvm.compiler.lir.amd64.AMD64AddressValue; 71 import org.graalvm.compiler.lir.amd64.AMD64CCall; 72 import org.graalvm.compiler.lir.amd64.AMD64ControlFlow.StrategySwitchOp; 73 import org.graalvm.compiler.lir.amd64.AMD64FrameMapBuilder; 74 import org.graalvm.compiler.lir.amd64.AMD64Move; 75 import org.graalvm.compiler.lir.amd64.AMD64Move.MoveFromRegOp; 76 import org.graalvm.compiler.lir.amd64.AMD64PrefetchOp; 77 import org.graalvm.compiler.lir.amd64.AMD64ReadTimestampCounter; 78 import org.graalvm.compiler.lir.amd64.AMD64RestoreRegistersOp; 79 import org.graalvm.compiler.lir.amd64.AMD64SaveRegistersOp; 80 import org.graalvm.compiler.lir.amd64.AMD64VZeroUpper; 81 import org.graalvm.compiler.lir.asm.CompilationResultBuilder; 82 import org.graalvm.compiler.lir.framemap.FrameMapBuilder; 83 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 84 import org.graalvm.compiler.options.OptionValues; 85 86 import jdk.vm.ci.amd64.AMD64; 87 import jdk.vm.ci.amd64.AMD64Kind; 88 import jdk.vm.ci.code.CallingConvention; 89 import jdk.vm.ci.code.Register; 90 import jdk.vm.ci.code.RegisterConfig; 91 import jdk.vm.ci.code.RegisterValue; 92 import jdk.vm.ci.code.StackSlot; 93 import jdk.vm.ci.hotspot.HotSpotMetaspaceConstant; 94 import jdk.vm.ci.hotspot.HotSpotObjectConstant; 95 import jdk.vm.ci.meta.AllocatableValue; 96 import jdk.vm.ci.meta.Constant; 97 import jdk.vm.ci.meta.DeoptimizationAction; 98 import jdk.vm.ci.meta.DeoptimizationReason; 99 import jdk.vm.ci.meta.JavaConstant; 100 import jdk.vm.ci.meta.JavaKind; 101 import jdk.vm.ci.meta.PlatformKind; 102 import jdk.vm.ci.meta.PrimitiveConstant; 103 import jdk.vm.ci.meta.Value; 104 105 /** 106 * LIR generator specialized for AMD64 HotSpot. 107 */ 108 public class AMD64HotSpotLIRGenerator extends AMD64LIRGenerator implements HotSpotLIRGenerator { 109 110 final GraalHotSpotVMConfig config; 111 private HotSpotDebugInfoBuilder debugInfoBuilder; 112 113 protected AMD64HotSpotLIRGenerator(HotSpotProviders providers, GraalHotSpotVMConfig config, LIRGenerationResult lirGenRes) { 114 this(providers, config, lirGenRes, new BackupSlotProvider(lirGenRes.getFrameMapBuilder())); 115 } 116 117 private AMD64HotSpotLIRGenerator(HotSpotProviders providers, GraalHotSpotVMConfig config, LIRGenerationResult lirGenRes, BackupSlotProvider backupSlotProvider) { 118 this(new AMD64HotSpotLIRKindTool(), new AMD64HotSpotArithmeticLIRGenerator(), new AMD64HotSpotMoveFactory(backupSlotProvider), providers, config, lirGenRes); 119 } 120 121 protected AMD64HotSpotLIRGenerator(LIRKindTool lirKindTool, AMD64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, HotSpotProviders providers, GraalHotSpotVMConfig config, 122 LIRGenerationResult lirGenRes) { 123 super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes); 124 assert config.basicLockSize == 8; 125 this.config = config; 126 } 127 128 @Override 129 public HotSpotProviders getProviders() { 130 return (HotSpotProviders) super.getProviders(); 131 } 132 133 /** 134 * Utility for emitting the instruction to save RBP. 135 */ 136 class SaveRbp { 137 138 final NoOp placeholder; 139 140 /** 141 * The slot reserved for saving RBP. 142 */ 143 final StackSlot reservedSlot; 144 145 SaveRbp(NoOp placeholder) { 146 this.placeholder = placeholder; 147 AMD64FrameMapBuilder frameMapBuilder = (AMD64FrameMapBuilder) getResult().getFrameMapBuilder(); 148 this.reservedSlot = frameMapBuilder.allocateRBPSpillSlot(); 149 } 150 151 /** 152 * Replaces this operation with the appropriate move for saving rbp. 153 * 154 * @param useStack specifies if rbp must be saved to the stack 155 */ 156 public AllocatableValue finalize(boolean useStack) { 157 AllocatableValue dst; 158 if (useStack) { 159 dst = reservedSlot; 160 } else { 161 ((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).freeRBPSpillSlot(); 162 dst = newVariable(LIRKind.value(AMD64Kind.QWORD)); 163 } 164 165 placeholder.replace(getResult().getLIR(), new MoveFromRegOp(AMD64Kind.QWORD, dst, rbp.asValue(LIRKind.value(AMD64Kind.QWORD)))); 166 return dst; 167 } 168 } 169 170 private SaveRbp saveRbp; 171 172 protected void emitSaveRbp() { 173 NoOp placeholder = new NoOp(getCurrentBlock(), getResult().getLIR().getLIRforBlock(getCurrentBlock()).size()); 174 append(placeholder); 175 saveRbp = new SaveRbp(placeholder); 176 } 177 178 protected SaveRbp getSaveRbp() { 179 return saveRbp; 180 } 181 182 /** 183 * Helper instruction to reserve a stack slot for the whole method. Note that the actual users 184 * of the stack slot might be inserted after stack slot allocation. This dummy instruction 185 * ensures that the stack slot is alive and gets a real stack slot assigned. 186 */ 187 private static final class RescueSlotDummyOp extends LIRInstruction { 188 public static final LIRInstructionClass<RescueSlotDummyOp> TYPE = LIRInstructionClass.create(RescueSlotDummyOp.class); 189 190 @Alive({OperandFlag.STACK, OperandFlag.UNINITIALIZED}) private AllocatableValue slot; 191 192 RescueSlotDummyOp(FrameMapBuilder frameMapBuilder, LIRKind kind) { 193 super(TYPE); 194 slot = frameMapBuilder.allocateSpillSlot(kind); 195 } 196 197 public AllocatableValue getSlot() { 198 return slot; 199 } 200 201 @Override 202 public void emitCode(CompilationResultBuilder crb) { 203 } 204 } 205 206 private RescueSlotDummyOp rescueSlotOp; 207 208 private AllocatableValue getOrInitRescueSlot() { 209 RescueSlotDummyOp op = getOrInitRescueSlotOp(); 210 return op.getSlot(); 211 } 212 213 private RescueSlotDummyOp getOrInitRescueSlotOp() { 214 if (rescueSlotOp == null) { 215 // create dummy instruction to keep the rescue slot alive 216 rescueSlotOp = new RescueSlotDummyOp(getResult().getFrameMapBuilder(), getLIRKindTool().getWordKind()); 217 } 218 return rescueSlotOp; 219 } 220 221 /** 222 * List of epilogue operations that need to restore RBP. 223 */ 224 List<AMD64HotSpotRestoreRbpOp> epilogueOps = new ArrayList<>(2); 225 226 @Override 227 public <I extends LIRInstruction> I append(I op) { 228 I ret = super.append(op); 229 if (op instanceof AMD64HotSpotRestoreRbpOp) { 230 epilogueOps.add((AMD64HotSpotRestoreRbpOp) op); 231 } 232 return ret; 233 } 234 235 @Override 236 public VirtualStackSlot getLockSlot(int lockDepth) { 237 return getLockStack().makeLockSlot(lockDepth); 238 } 239 240 private HotSpotLockStack getLockStack() { 241 assert debugInfoBuilder != null && debugInfoBuilder.lockStack() != null; 242 return debugInfoBuilder.lockStack(); 243 } 244 245 private Register findPollOnReturnScratchRegister() { 246 RegisterConfig regConfig = getProviders().getCodeCache().getRegisterConfig(); 247 for (Register r : regConfig.getAllocatableRegisters()) { 248 if (!r.equals(regConfig.getReturnRegister(JavaKind.Long)) && !r.equals(AMD64.rbp)) { 249 return r; 250 } 251 } 252 throw GraalError.shouldNotReachHere(); 253 } 254 255 private Register pollOnReturnScratchRegister; 256 257 @Override 258 public void emitReturn(JavaKind kind, Value input) { 259 AllocatableValue operand = Value.ILLEGAL; 260 if (input != null) { 261 operand = resultOperandFor(kind, input.getValueKind()); 262 emitMove(operand, input); 263 } 264 if (pollOnReturnScratchRegister == null) { 265 pollOnReturnScratchRegister = findPollOnReturnScratchRegister(); 266 } 267 append(new AMD64HotSpotReturnOp(operand, getStub() != null, pollOnReturnScratchRegister, config)); 268 } 269 270 @Override 271 public boolean needOnlyOopMaps() { 272 // Stubs only need oop maps 273 return getResult().getStub() != null; 274 } 275 276 private LIRFrameState currentRuntimeCallInfo; 277 278 @Override 279 protected void emitForeignCallOp(ForeignCallLinkage linkage, Value result, Value[] arguments, Value[] temps, LIRFrameState info) { 280 currentRuntimeCallInfo = info; 281 HotSpotForeignCallLinkage hsLinkage = (HotSpotForeignCallLinkage) linkage; 282 AMD64 arch = (AMD64) target().arch; 283 if (arch.getFeatures().contains(AMD64.CPUFeature.AVX) && hsLinkage.mayContainFP() && !hsLinkage.isCompiledStub()) { 284 /* 285 * If the target may contain FP ops, and it is not compiled by us, we may have an 286 * AVX-SSE transition. 287 * 288 * We exclude the argument registers from the zeroing LIR instruction since it violates 289 * the LIR semantics of @Temp that values must not be live. Note that the emitted 290 * machine instruction actually zeros _all_ XMM registers which is fine since we know 291 * that their upper half is not used. 292 */ 293 append(new AMD64VZeroUpper(arguments)); 294 } 295 super.emitForeignCallOp(linkage, result, arguments, temps, info); 296 } 297 298 /** 299 * @param savedRegisters the registers saved by this operation which may be subject to pruning 300 * @param savedRegisterLocations the slots to which the registers are saved 301 * @param supportsRemove determines if registers can be pruned 302 */ 303 protected AMD64SaveRegistersOp emitSaveRegisters(Register[] savedRegisters, AllocatableValue[] savedRegisterLocations, boolean supportsRemove) { 304 AMD64SaveRegistersOp save = new AMD64SaveRegistersOp(savedRegisters, savedRegisterLocations, supportsRemove); 305 append(save); 306 return save; 307 } 308 309 /** 310 * Allocate a stack slot for saving a register. 311 */ 312 protected VirtualStackSlot allocateSaveRegisterLocation(Register register) { 313 PlatformKind kind = target().arch.getLargestStorableKind(register.getRegisterCategory()); 314 if (kind.getVectorLength() > 1) { 315 // we don't use vector registers, so there is no need to save them 316 kind = AMD64Kind.DOUBLE; 317 } 318 return getResult().getFrameMapBuilder().allocateSpillSlot(LIRKind.value(kind)); 319 } 320 321 /** 322 * Adds a node to the graph that saves all allocatable registers to the stack. 323 * 324 * @param supportsRemove determines if registers can be pruned 325 * @return the register save node 326 */ 327 private AMD64SaveRegistersOp emitSaveAllRegisters(Register[] savedRegisters, boolean supportsRemove) { 328 AllocatableValue[] savedRegisterLocations = new AllocatableValue[savedRegisters.length]; 329 for (int i = 0; i < savedRegisters.length; i++) { 330 savedRegisterLocations[i] = allocateSaveRegisterLocation(savedRegisters[i]); 331 } 332 return emitSaveRegisters(savedRegisters, savedRegisterLocations, supportsRemove); 333 } 334 335 protected void emitRestoreRegisters(AMD64SaveRegistersOp save) { 336 append(new AMD64RestoreRegistersOp(save.getSlots().clone(), save)); 337 } 338 339 /** 340 * Gets the {@link Stub} this generator is generating code for or {@code null} if a stub is not 341 * being generated. 342 */ 343 public Stub getStub() { 344 return getResult().getStub(); 345 } 346 347 @Override 348 public HotSpotLIRGenerationResult getResult() { 349 return ((HotSpotLIRGenerationResult) super.getResult()); 350 } 351 352 public void setDebugInfoBuilder(HotSpotDebugInfoBuilder debugInfoBuilder) { 353 this.debugInfoBuilder = debugInfoBuilder; 354 } 355 356 @Override 357 public Variable emitForeignCall(ForeignCallLinkage linkage, LIRFrameState state, Value... args) { 358 HotSpotForeignCallLinkage hotspotLinkage = (HotSpotForeignCallLinkage) linkage; 359 boolean destroysRegisters = hotspotLinkage.destroysRegisters(); 360 361 AMD64SaveRegistersOp save = null; 362 Stub stub = getStub(); 363 if (destroysRegisters) { 364 if (stub != null && stub.preservesRegisters()) { 365 Register[] savedRegisters = getRegisterConfig().getAllocatableRegisters().toArray(); 366 save = emitSaveAllRegisters(savedRegisters, true); 367 } 368 } 369 370 Variable result; 371 LIRFrameState debugInfo = null; 372 if (hotspotLinkage.needsDebugInfo()) { 373 debugInfo = state; 374 assert debugInfo != null || stub != null; 375 } 376 377 if (hotspotLinkage.needsJavaFrameAnchor()) { 378 Register thread = getProviders().getRegisters().getThreadRegister(); 379 append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread)); 380 result = super.emitForeignCall(hotspotLinkage, debugInfo, args); 381 append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), config.threadLastJavaPcOffset(), thread)); 382 } else { 383 result = super.emitForeignCall(hotspotLinkage, debugInfo, args); 384 } 385 386 if (destroysRegisters) { 387 if (stub != null) { 388 if (stub.preservesRegisters()) { 389 HotSpotLIRGenerationResult generationResult = getResult(); 390 LIRFrameState key = currentRuntimeCallInfo; 391 if (key == null) { 392 key = LIRFrameState.NO_STATE; 393 } 394 assert !generationResult.getCalleeSaveInfo().containsKey(key); 395 generationResult.getCalleeSaveInfo().put(key, save); 396 emitRestoreRegisters(save); 397 } 398 } 399 } 400 401 return result; 402 } 403 404 @Override 405 public Value emitLoadObjectAddress(Constant constant) { 406 HotSpotObjectConstant objectConstant = (HotSpotObjectConstant) constant; 407 LIRKind kind = objectConstant.isCompressed() ? getLIRKindTool().getNarrowOopKind() : getLIRKindTool().getObjectKind(); 408 Variable result = newVariable(kind); 409 append(new AMD64HotSpotLoadAddressOp(result, constant, HotSpotConstantLoadAction.RESOLVE)); 410 return result; 411 } 412 413 @Override 414 public Value emitLoadMetaspaceAddress(Constant constant, HotSpotConstantLoadAction action) { 415 HotSpotMetaspaceConstant metaspaceConstant = (HotSpotMetaspaceConstant) constant; 416 LIRKind kind = metaspaceConstant.isCompressed() ? getLIRKindTool().getNarrowPointerKind() : getLIRKindTool().getWordKind(); 417 Variable result = newVariable(kind); 418 append(new AMD64HotSpotLoadAddressOp(result, constant, action)); 419 return result; 420 } 421 422 private Value emitConstantRetrieval(ForeignCallDescriptor foreignCall, Object[] notes, Constant[] constants, AllocatableValue[] constantDescriptions, LIRFrameState frameState) { 423 ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(foreignCall); 424 append(new AMD64HotSpotConstantRetrievalOp(constants, constantDescriptions, frameState, linkage, notes)); 425 AllocatableValue result = linkage.getOutgoingCallingConvention().getReturn(); 426 return emitMove(result); 427 } 428 429 private Value emitConstantRetrieval(ForeignCallDescriptor foreignCall, HotSpotConstantLoadAction action, Constant constant, AllocatableValue[] constantDescriptions, LIRFrameState frameState) { 430 Constant[] constants = new Constant[]{constant}; 431 Object[] notes = new Object[]{action}; 432 return emitConstantRetrieval(foreignCall, notes, constants, constantDescriptions, frameState); 433 } 434 435 private Value emitConstantRetrieval(ForeignCallDescriptor foreignCall, HotSpotConstantLoadAction action, Constant constant, Value constantDescription, LIRFrameState frameState) { 436 AllocatableValue[] constantDescriptions = new AllocatableValue[]{asAllocatable(constantDescription)}; 437 return emitConstantRetrieval(foreignCall, action, constant, constantDescriptions, frameState); 438 } 439 440 @Override 441 public Value emitObjectConstantRetrieval(Constant constant, Value constantDescription, LIRFrameState frameState) { 442 return emitConstantRetrieval(RESOLVE_STRING_BY_SYMBOL, RESOLVE, constant, constantDescription, frameState); 443 } 444 445 @Override 446 public Value emitMetaspaceConstantRetrieval(Constant constant, Value constantDescription, LIRFrameState frameState) { 447 return emitConstantRetrieval(RESOLVE_KLASS_BY_SYMBOL, RESOLVE, constant, constantDescription, frameState); 448 } 449 450 @Override 451 public Value emitKlassInitializationAndRetrieval(Constant constant, Value constantDescription, LIRFrameState frameState) { 452 return emitConstantRetrieval(INITIALIZE_KLASS_BY_SYMBOL, INITIALIZE, constant, constantDescription, frameState); 453 } 454 455 @Override 456 public Value emitResolveMethodAndLoadCounters(Constant method, Value klassHint, Value methodDescription, LIRFrameState frameState) { 457 AllocatableValue[] constantDescriptions = new AllocatableValue[]{asAllocatable(klassHint), asAllocatable(methodDescription)}; 458 return emitConstantRetrieval(RESOLVE_METHOD_BY_SYMBOL_AND_LOAD_COUNTERS, LOAD_COUNTERS, method, constantDescriptions, frameState); 459 } 460 461 @Override 462 public Value emitResolveDynamicInvoke(Constant appendix, LIRFrameState frameState) { 463 AllocatableValue[] constantDescriptions = new AllocatableValue[0]; 464 return emitConstantRetrieval(RESOLVE_DYNAMIC_INVOKE, INITIALIZE, appendix, constantDescriptions, frameState); 465 } 466 467 @Override 468 public Value emitLoadConfigValue(int markId, LIRKind kind) { 469 Variable result = newVariable(kind); 470 append(new AMD64HotSpotLoadConfigValueOp(markId, result)); 471 return result; 472 } 473 474 @Override 475 public Value emitRandomSeed() { 476 AMD64ReadTimestampCounter timestamp = new AMD64ReadTimestampCounter(); 477 append(timestamp); 478 return emitMove(timestamp.getLowResult()); 479 } 480 481 @Override 482 public void emitTailcall(Value[] args, Value address) { 483 append(new AMD64TailcallOp(args, address)); 484 } 485 486 @Override 487 public void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args, int numberOfFloatingPointArguments) { 488 Value[] argLocations = new Value[args.length]; 489 getResult().getFrameMapBuilder().callsMethod(nativeCallingConvention); 490 // TODO(mg): in case a native function uses floating point varargs, the ABI requires that 491 // RAX contains the length of the varargs 492 PrimitiveConstant intConst = JavaConstant.forInt(numberOfFloatingPointArguments); 493 AllocatableValue numberOfFloatingPointArgumentsRegister = AMD64.rax.asValue(LIRKind.value(AMD64Kind.DWORD)); 494 emitMoveConstant(numberOfFloatingPointArgumentsRegister, intConst); 495 for (int i = 0; i < args.length; i++) { 496 Value arg = args[i]; 497 AllocatableValue loc = nativeCallingConvention.getArgument(i); 498 emitMove(loc, arg); 499 argLocations[i] = loc; 500 } 501 Value ptr = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(address)); 502 append(new AMD64CCall(nativeCallingConvention.getReturn(), ptr, numberOfFloatingPointArgumentsRegister, argLocations)); 503 } 504 505 @Override 506 public void emitUnwind(Value exception) { 507 ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(HotSpotBackend.UNWIND_EXCEPTION_TO_CALLER); 508 CallingConvention outgoingCc = linkage.getOutgoingCallingConvention(); 509 assert outgoingCc.getArgumentCount() == 2; 510 RegisterValue exceptionParameter = (RegisterValue) outgoingCc.getArgument(0); 511 emitMove(exceptionParameter, exception); 512 append(new AMD64HotSpotUnwindOp(exceptionParameter)); 513 } 514 515 private void moveDeoptValuesToThread(Value actionAndReason, Value speculation) { 516 moveValueToThread(actionAndReason, config.pendingDeoptimizationOffset); 517 moveValueToThread(speculation, config.pendingFailedSpeculationOffset); 518 } 519 520 private void moveValueToThread(Value v, int offset) { 521 LIRKind wordKind = LIRKind.value(target().arch.getWordKind()); 522 RegisterValue thread = getProviders().getRegisters().getThreadRegister().asValue(wordKind); 523 AMD64AddressValue address = new AMD64AddressValue(wordKind, thread, offset); 524 arithmeticLIRGen.emitStore(v.getValueKind(), address, v, null); 525 } 526 527 @Override 528 public void emitDeoptimize(Value actionAndReason, Value speculation, LIRFrameState state) { 529 moveDeoptValuesToThread(actionAndReason, speculation); 530 append(new AMD64DeoptimizeOp(state)); 531 } 532 533 @Override 534 public void emitDeoptimizeCaller(DeoptimizationAction action, DeoptimizationReason reason) { 535 Value actionAndReason = emitJavaConstant(getMetaAccess().encodeDeoptActionAndReason(action, reason, 0)); 536 Value nullValue = emitConstant(LIRKind.reference(AMD64Kind.QWORD), JavaConstant.NULL_POINTER); 537 moveDeoptValuesToThread(actionAndReason, nullValue); 538 append(new AMD64HotSpotDeoptimizeCallerOp()); 539 } 540 541 @Override 542 public void beforeRegisterAllocation() { 543 super.beforeRegisterAllocation(); 544 boolean hasDebugInfo = getResult().getLIR().hasDebugInfo(); 545 AllocatableValue savedRbp = saveRbp.finalize(hasDebugInfo); 546 if (hasDebugInfo) { 547 getResult().setDeoptimizationRescueSlot(((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).allocateDeoptimizationRescueSlot()); 548 } 549 550 getResult().setMaxInterpreterFrameSize(debugInfoBuilder.maxInterpreterFrameSize()); 551 552 for (AMD64HotSpotRestoreRbpOp op : epilogueOps) { 553 op.setSavedRbp(savedRbp); 554 } 555 if (BenchmarkCounters.enabled) { 556 // ensure that the rescue slot is available 557 LIRInstruction op = getOrInitRescueSlotOp(); 558 // insert dummy instruction into the start block 559 LIR lir = getResult().getLIR(); 560 ArrayList<LIRInstruction> instructions = lir.getLIRforBlock(lir.getControlFlowGraph().getStartBlock()); 561 instructions.add(1, op); 562 lir.getDebug().dump(DebugContext.INFO_LEVEL, lir, "created rescue dummy op"); 563 } 564 } 565 566 @Override 567 public Value emitCompress(Value pointer, CompressEncoding encoding, boolean nonNull) { 568 LIRKind inputKind = pointer.getValueKind(LIRKind.class); 569 LIRKindTool lirKindTool = getLIRKindTool(); 570 assert inputKind.getPlatformKind() == lirKindTool.getObjectKind().getPlatformKind(); 571 if (inputKind.isReference(0)) { 572 // oop 573 Variable result = newVariable(lirKindTool.getNarrowOopKind()); 574 append(new AMD64Move.CompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull, getLIRKindTool())); 575 return result; 576 } else { 577 // metaspace pointer 578 Variable result = newVariable(lirKindTool.getNarrowPointerKind()); 579 AllocatableValue base = Value.ILLEGAL; 580 OptionValues options = getResult().getLIR().getOptions(); 581 if (encoding.hasBase() || GeneratePIC.getValue(options)) { 582 if (GeneratePIC.getValue(options)) { 583 Variable baseAddress = newVariable(lirKindTool.getWordKind()); 584 AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config); 585 append(move); 586 base = baseAddress; 587 } else { 588 base = emitLoadConstant(lirKindTool.getWordKind(), JavaConstant.forLong(encoding.getBase())); 589 } 590 } 591 append(new AMD64Move.CompressPointer(result, asAllocatable(pointer), base, encoding, nonNull, getLIRKindTool())); 592 return result; 593 } 594 } 595 596 @Override 597 public Value emitUncompress(Value pointer, CompressEncoding encoding, boolean nonNull) { 598 LIRKind inputKind = pointer.getValueKind(LIRKind.class); 599 LIRKindTool lirKindTool = getLIRKindTool(); 600 assert inputKind.getPlatformKind() == lirKindTool.getNarrowOopKind().getPlatformKind(); 601 if (inputKind.isReference(0)) { 602 // oop 603 Variable result = newVariable(lirKindTool.getObjectKind()); 604 append(new AMD64Move.UncompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull, lirKindTool)); 605 return result; 606 } else { 607 // metaspace pointer 608 LIRKind uncompressedKind = lirKindTool.getWordKind(); 609 Variable result = newVariable(uncompressedKind); 610 AllocatableValue base = Value.ILLEGAL; 611 OptionValues options = getResult().getLIR().getOptions(); 612 if (encoding.hasBase() || GeneratePIC.getValue(options)) { 613 if (GeneratePIC.getValue(options)) { 614 Variable baseAddress = newVariable(uncompressedKind); 615 AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config); 616 append(move); 617 base = baseAddress; 618 } else { 619 base = emitLoadConstant(uncompressedKind, JavaConstant.forLong(encoding.getBase())); 620 } 621 } 622 append(new AMD64Move.UncompressPointer(result, asAllocatable(pointer), base, encoding, nonNull, lirKindTool)); 623 return result; 624 } 625 } 626 627 @Override 628 public void emitNullCheck(Value address, LIRFrameState state) { 629 if (address.getValueKind().getPlatformKind() == getLIRKindTool().getNarrowOopKind().getPlatformKind()) { 630 CompressEncoding encoding = config.getOopEncoding(); 631 Value uncompressed; 632 if (encoding.getShift() <= 3) { 633 LIRKind wordKind = LIRKind.unknownReference(target().arch.getWordKind()); 634 uncompressed = new AMD64AddressValue(wordKind, getProviders().getRegisters().getHeapBaseRegister().asValue(wordKind), asAllocatable(address), Scale.fromInt(1 << encoding.getShift()), 635 0); 636 } else { 637 uncompressed = emitUncompress(address, encoding, false); 638 } 639 append(new AMD64Move.NullCheckOp(asAddressValue(uncompressed), state)); 640 return; 641 } 642 super.emitNullCheck(address, state); 643 } 644 645 @Override 646 public LIRInstruction createBenchmarkCounter(String name, String group, Value increment) { 647 if (BenchmarkCounters.enabled) { 648 return new AMD64HotSpotCounterOp(name, group, increment, getProviders().getRegisters(), config, getOrInitRescueSlot()); 649 } 650 throw GraalError.shouldNotReachHere("BenchmarkCounters are not enabled!"); 651 } 652 653 @Override 654 public LIRInstruction createMultiBenchmarkCounter(String[] names, String[] groups, Value[] increments) { 655 if (BenchmarkCounters.enabled) { 656 return new AMD64HotSpotCounterOp(names, groups, increments, getProviders().getRegisters(), config, getOrInitRescueSlot()); 657 } 658 throw GraalError.shouldNotReachHere("BenchmarkCounters are not enabled!"); 659 } 660 661 @Override 662 public void emitPrefetchAllocate(Value address) { 663 append(new AMD64PrefetchOp(asAddressValue(address), config.allocatePrefetchInstr)); 664 } 665 666 @Override 667 protected StrategySwitchOp createStrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Variable key, AllocatableValue temp) { 668 return new AMD64HotSpotStrategySwitchOp(strategy, keyTargets, defaultTarget, key, temp); 669 } 670 }