1 /* 2 * Copyright (c) 2012, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.hotspot.amd64; 26 27 import static jdk.vm.ci.amd64.AMD64.rbp; 28 import static org.graalvm.compiler.core.common.GraalOptions.GeneratePIC; 29 import static org.graalvm.compiler.hotspot.HotSpotBackend.INITIALIZE_KLASS_BY_SYMBOL; 30 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_DYNAMIC_INVOKE; 31 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_KLASS_BY_SYMBOL; 32 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_METHOD_BY_SYMBOL_AND_LOAD_COUNTERS; 33 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_STRING_BY_SYMBOL; 34 import static org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction.INITIALIZE; 35 import static org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction.LOAD_COUNTERS; 36 import static org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction.RESOLVE; 37 38 import java.util.ArrayList; 39 import java.util.List; 40 41 import org.graalvm.compiler.asm.amd64.AMD64Address.Scale; 42 import org.graalvm.compiler.core.amd64.AMD64ArithmeticLIRGenerator; 43 import org.graalvm.compiler.core.amd64.AMD64LIRGenerator; 44 import org.graalvm.compiler.core.amd64.AMD64MoveFactoryBase.BackupSlotProvider; 45 import org.graalvm.compiler.core.common.CompressEncoding; 46 import org.graalvm.compiler.core.common.LIRKind; 47 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor; 48 import org.graalvm.compiler.core.common.spi.ForeignCallLinkage; 49 import org.graalvm.compiler.core.common.spi.LIRKindTool; 50 import org.graalvm.compiler.debug.DebugContext; 51 import org.graalvm.compiler.debug.GraalError; 52 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig; 53 import org.graalvm.compiler.hotspot.HotSpotBackend; 54 import org.graalvm.compiler.hotspot.HotSpotDebugInfoBuilder; 55 import org.graalvm.compiler.hotspot.HotSpotForeignCallLinkage; 56 import org.graalvm.compiler.hotspot.HotSpotLIRGenerationResult; 57 import org.graalvm.compiler.hotspot.HotSpotLIRGenerator; 58 import org.graalvm.compiler.hotspot.HotSpotLockStack; 59 import org.graalvm.compiler.hotspot.debug.BenchmarkCounters; 60 import org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction; 61 import org.graalvm.compiler.hotspot.meta.HotSpotProviders; 62 import org.graalvm.compiler.hotspot.stubs.Stub; 63 import org.graalvm.compiler.lir.LIR; 64 import org.graalvm.compiler.lir.LIRFrameState; 65 import org.graalvm.compiler.lir.LIRInstruction; 66 import org.graalvm.compiler.lir.LIRInstructionClass; 67 import org.graalvm.compiler.lir.LabelRef; 68 import org.graalvm.compiler.lir.StandardOp.NoOp; 69 import org.graalvm.compiler.lir.SwitchStrategy; 70 import org.graalvm.compiler.lir.Variable; 71 import org.graalvm.compiler.lir.VirtualStackSlot; 72 import org.graalvm.compiler.lir.amd64.AMD64AddressValue; 73 import org.graalvm.compiler.lir.amd64.AMD64CCall; 74 import org.graalvm.compiler.lir.amd64.AMD64ControlFlow.StrategySwitchOp; 75 import org.graalvm.compiler.lir.amd64.AMD64FrameMapBuilder; 76 import org.graalvm.compiler.lir.amd64.AMD64Move; 77 import org.graalvm.compiler.lir.amd64.AMD64Move.MoveFromRegOp; 78 import org.graalvm.compiler.lir.amd64.AMD64PrefetchOp; 79 import org.graalvm.compiler.lir.amd64.AMD64ReadTimestampCounter; 80 import org.graalvm.compiler.lir.amd64.AMD64RestoreRegistersOp; 81 import org.graalvm.compiler.lir.amd64.AMD64SaveRegistersOp; 82 import org.graalvm.compiler.lir.amd64.AMD64VZeroUpper; 83 import org.graalvm.compiler.lir.asm.CompilationResultBuilder; 84 import org.graalvm.compiler.lir.framemap.FrameMapBuilder; 85 import org.graalvm.compiler.lir.gen.LIRGenerationResult; 86 import org.graalvm.compiler.options.OptionValues; 87 88 import jdk.vm.ci.amd64.AMD64; 89 import jdk.vm.ci.amd64.AMD64Kind; 90 import jdk.vm.ci.code.CallingConvention; 91 import jdk.vm.ci.code.Register; 92 import jdk.vm.ci.code.RegisterConfig; 93 import jdk.vm.ci.code.RegisterValue; 94 import jdk.vm.ci.code.StackSlot; 95 import jdk.vm.ci.hotspot.HotSpotMetaspaceConstant; 96 import jdk.vm.ci.hotspot.HotSpotObjectConstant; 97 import jdk.vm.ci.meta.AllocatableValue; 98 import jdk.vm.ci.meta.Constant; 99 import jdk.vm.ci.meta.DeoptimizationAction; 100 import jdk.vm.ci.meta.DeoptimizationReason; 101 import jdk.vm.ci.meta.JavaConstant; 102 import jdk.vm.ci.meta.JavaKind; 103 import jdk.vm.ci.meta.PlatformKind; 104 import jdk.vm.ci.meta.PrimitiveConstant; 105 import jdk.vm.ci.meta.SpeculationLog; 106 import jdk.vm.ci.meta.Value; 107 108 /** 109 * LIR generator specialized for AMD64 HotSpot. 110 */ 111 public class AMD64HotSpotLIRGenerator extends AMD64LIRGenerator implements HotSpotLIRGenerator { 112 113 final GraalHotSpotVMConfig config; 114 private HotSpotDebugInfoBuilder debugInfoBuilder; 115 116 protected AMD64HotSpotLIRGenerator(HotSpotProviders providers, GraalHotSpotVMConfig config, LIRGenerationResult lirGenRes) { 117 this(providers, config, lirGenRes, new BackupSlotProvider(lirGenRes.getFrameMapBuilder())); 118 } 119 120 private AMD64HotSpotLIRGenerator(HotSpotProviders providers, GraalHotSpotVMConfig config, LIRGenerationResult lirGenRes, BackupSlotProvider backupSlotProvider) { 121 this(new AMD64HotSpotLIRKindTool(), new AMD64ArithmeticLIRGenerator(null), new AMD64HotSpotMoveFactory(backupSlotProvider), providers, config, lirGenRes); 122 } 123 124 protected AMD64HotSpotLIRGenerator(LIRKindTool lirKindTool, AMD64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, HotSpotProviders providers, GraalHotSpotVMConfig config, 125 LIRGenerationResult lirGenRes) { 126 super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes); 127 assert config.basicLockSize == 8; 128 this.config = config; 129 } 130 131 @Override 132 public HotSpotProviders getProviders() { 133 return (HotSpotProviders) super.getProviders(); 134 } 135 136 @Override 137 protected int getMaxVectorSize() { 138 return config.maxVectorSize; 139 } 140 141 /** 142 * Utility for emitting the instruction to save RBP. 143 */ 144 class SaveRbp { 145 146 final NoOp placeholder; 147 148 /** 149 * The slot reserved for saving RBP. 150 */ 151 final StackSlot reservedSlot; 152 153 SaveRbp(NoOp placeholder) { 154 this.placeholder = placeholder; 155 AMD64FrameMapBuilder frameMapBuilder = (AMD64FrameMapBuilder) getResult().getFrameMapBuilder(); 156 this.reservedSlot = frameMapBuilder.allocateRBPSpillSlot(); 157 } 158 159 /** 160 * Replaces this operation with the appropriate move for saving rbp. 161 * 162 * @param useStack specifies if rbp must be saved to the stack 163 */ 164 public AllocatableValue finalize(boolean useStack) { 165 AllocatableValue dst; 166 if (useStack) { 167 dst = reservedSlot; 168 } else { 169 ((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).freeRBPSpillSlot(); 170 dst = newVariable(LIRKind.value(AMD64Kind.QWORD)); 171 } 172 173 placeholder.replace(getResult().getLIR(), new MoveFromRegOp(AMD64Kind.QWORD, dst, rbp.asValue(LIRKind.value(AMD64Kind.QWORD)))); 174 return dst; 175 } 176 } 177 178 private SaveRbp saveRbp; 179 180 protected void emitSaveRbp() { 181 NoOp placeholder = new NoOp(getCurrentBlock(), getResult().getLIR().getLIRforBlock(getCurrentBlock()).size()); 182 append(placeholder); 183 saveRbp = new SaveRbp(placeholder); 184 } 185 186 protected SaveRbp getSaveRbp() { 187 return saveRbp; 188 } 189 190 /** 191 * Helper instruction to reserve a stack slot for the whole method. Note that the actual users 192 * of the stack slot might be inserted after stack slot allocation. This dummy instruction 193 * ensures that the stack slot is alive and gets a real stack slot assigned. 194 */ 195 private static final class RescueSlotDummyOp extends LIRInstruction { 196 public static final LIRInstructionClass<RescueSlotDummyOp> TYPE = LIRInstructionClass.create(RescueSlotDummyOp.class); 197 198 @Alive({OperandFlag.STACK, OperandFlag.UNINITIALIZED}) private AllocatableValue slot; 199 200 RescueSlotDummyOp(FrameMapBuilder frameMapBuilder, LIRKind kind) { 201 super(TYPE); 202 slot = frameMapBuilder.allocateSpillSlot(kind); 203 } 204 205 public AllocatableValue getSlot() { 206 return slot; 207 } 208 209 @Override 210 public void emitCode(CompilationResultBuilder crb) { 211 } 212 } 213 214 private RescueSlotDummyOp rescueSlotOp; 215 216 private AllocatableValue getOrInitRescueSlot() { 217 RescueSlotDummyOp op = getOrInitRescueSlotOp(); 218 return op.getSlot(); 219 } 220 221 private RescueSlotDummyOp getOrInitRescueSlotOp() { 222 if (rescueSlotOp == null) { 223 // create dummy instruction to keep the rescue slot alive 224 rescueSlotOp = new RescueSlotDummyOp(getResult().getFrameMapBuilder(), getLIRKindTool().getWordKind()); 225 } 226 return rescueSlotOp; 227 } 228 229 /** 230 * List of epilogue operations that need to restore RBP. 231 */ 232 List<AMD64HotSpotRestoreRbpOp> epilogueOps = new ArrayList<>(2); 233 234 @Override 235 public <I extends LIRInstruction> I append(I op) { 236 I ret = super.append(op); 237 if (op instanceof AMD64HotSpotRestoreRbpOp) { 238 epilogueOps.add((AMD64HotSpotRestoreRbpOp) op); 239 } 240 return ret; 241 } 242 243 @Override 244 public VirtualStackSlot getLockSlot(int lockDepth) { 245 return getLockStack().makeLockSlot(lockDepth); 246 } 247 248 private HotSpotLockStack getLockStack() { 249 assert debugInfoBuilder != null && debugInfoBuilder.lockStack() != null; 250 return debugInfoBuilder.lockStack(); 251 } 252 253 private Register findPollOnReturnScratchRegister() { 254 RegisterConfig regConfig = getProviders().getCodeCache().getRegisterConfig(); 255 for (Register r : regConfig.getAllocatableRegisters()) { 256 if (!r.equals(regConfig.getReturnRegister(JavaKind.Long)) && !r.equals(AMD64.rbp)) { 257 return r; 258 } 259 } 260 throw GraalError.shouldNotReachHere(); 261 } 262 263 private Register pollOnReturnScratchRegister; 264 265 @Override 266 public void emitReturn(JavaKind kind, Value input) { 267 AllocatableValue operand = Value.ILLEGAL; 268 if (input != null) { 269 operand = resultOperandFor(kind, input.getValueKind()); 270 emitMove(operand, input); 271 } 272 if (pollOnReturnScratchRegister == null) { 273 pollOnReturnScratchRegister = findPollOnReturnScratchRegister(); 274 } 275 Register thread = getProviders().getRegisters().getThreadRegister(); 276 append(new AMD64HotSpotReturnOp(operand, getStub() != null, thread, pollOnReturnScratchRegister, config, getResult().requiresReservedStackAccessCheck())); 277 } 278 279 @Override 280 public boolean needOnlyOopMaps() { 281 // Stubs only need oop maps 282 return getResult().getStub() != null; 283 } 284 285 private LIRFrameState currentRuntimeCallInfo; 286 287 @Override 288 protected void emitForeignCallOp(ForeignCallLinkage linkage, Value result, Value[] arguments, Value[] temps, LIRFrameState info) { 289 currentRuntimeCallInfo = info; 290 HotSpotForeignCallLinkage hsLinkage = (HotSpotForeignCallLinkage) linkage; 291 AMD64 arch = (AMD64) target().arch; 292 if (arch.getFeatures().contains(AMD64.CPUFeature.AVX) && hsLinkage.mayContainFP() && !hsLinkage.isCompiledStub()) { 293 /* 294 * If the target may contain FP ops, and it is not compiled by us, we may have an 295 * AVX-SSE transition. 296 * 297 * We exclude the argument registers from the zeroing LIR instruction since it violates 298 * the LIR semantics of @Temp that values must not be live. Note that the emitted 299 * machine instruction actually zeros _all_ XMM registers which is fine since we know 300 * that their upper half is not used. 301 */ 302 append(new AMD64VZeroUpper(arguments, getRegisterConfig())); 303 } 304 super.emitForeignCallOp(linkage, result, arguments, temps, info); 305 } 306 307 /** 308 * @param savedRegisters the registers saved by this operation which may be subject to pruning 309 * @param savedRegisterLocations the slots to which the registers are saved 310 */ 311 protected AMD64SaveRegistersOp emitSaveRegisters(Register[] savedRegisters, AllocatableValue[] savedRegisterLocations) { 312 AMD64SaveRegistersOp save = new AMD64SaveRegistersOp(savedRegisters, savedRegisterLocations); 313 append(save); 314 return save; 315 } 316 317 /** 318 * Allocate a stack slot for saving a register. 319 */ 320 protected VirtualStackSlot allocateSaveRegisterLocation(Register register) { 321 PlatformKind kind = target().arch.getLargestStorableKind(register.getRegisterCategory()); 322 if (kind.getVectorLength() > 1) { 323 // we don't use vector registers, so there is no need to save them 324 kind = AMD64Kind.DOUBLE; 325 } 326 return getResult().getFrameMapBuilder().allocateSpillSlot(LIRKind.value(kind)); 327 } 328 329 /** 330 * Adds a node to the graph that saves all allocatable registers to the stack. 331 * 332 * @return the register save node 333 */ 334 private AMD64SaveRegistersOp emitSaveAllRegisters() { 335 Register[] savedRegisters = getSaveableRegisters(); 336 AllocatableValue[] savedRegisterLocations = new AllocatableValue[savedRegisters.length]; 337 for (int i = 0; i < savedRegisters.length; i++) { 338 savedRegisterLocations[i] = allocateSaveRegisterLocation(savedRegisters[i]); 339 } 340 return emitSaveRegisters(savedRegisters, savedRegisterLocations); 341 } 342 343 protected Register[] getSaveableRegisters() { 344 return getResult().getRegisterAllocationConfig().getAllocatableRegisters().toArray(); 345 } 346 347 protected void emitRestoreRegisters(AMD64SaveRegistersOp save) { 348 append(new AMD64RestoreRegistersOp(save.getSlots().clone(), save)); 349 } 350 351 /** 352 * Gets the {@link Stub} this generator is generating code for or {@code null} if a stub is not 353 * being generated. 354 */ 355 public Stub getStub() { 356 return getResult().getStub(); 357 } 358 359 @Override 360 public HotSpotLIRGenerationResult getResult() { 361 return ((HotSpotLIRGenerationResult) super.getResult()); 362 } 363 364 public void setDebugInfoBuilder(HotSpotDebugInfoBuilder debugInfoBuilder) { 365 this.debugInfoBuilder = debugInfoBuilder; 366 } 367 368 @Override 369 public Variable emitForeignCall(ForeignCallLinkage linkage, LIRFrameState state, Value... args) { 370 HotSpotForeignCallLinkage hotspotLinkage = (HotSpotForeignCallLinkage) linkage; 371 boolean destroysRegisters = hotspotLinkage.destroysRegisters(); 372 373 AMD64SaveRegistersOp save = null; 374 Stub stub = getStub(); 375 if (destroysRegisters && stub != null && stub.shouldSaveRegistersAroundCalls()) { 376 save = emitSaveAllRegisters(); 377 } 378 379 Variable result; 380 LIRFrameState debugInfo = null; 381 if (hotspotLinkage.needsDebugInfo()) { 382 debugInfo = state; 383 assert debugInfo != null || stub != null; 384 } 385 386 if (hotspotLinkage.needsJavaFrameAnchor()) { 387 Register thread = getProviders().getRegisters().getThreadRegister(); 388 append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread)); 389 result = super.emitForeignCall(hotspotLinkage, debugInfo, args); 390 append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), config.threadLastJavaPcOffset(), thread)); 391 } else { 392 result = super.emitForeignCall(hotspotLinkage, debugInfo, args); 393 } 394 395 if (save != null) { 396 HotSpotLIRGenerationResult generationResult = getResult(); 397 LIRFrameState key = currentRuntimeCallInfo; 398 if (key == null) { 399 key = LIRFrameState.NO_STATE; 400 } 401 assert !generationResult.getCalleeSaveInfo().containsKey(key); 402 generationResult.getCalleeSaveInfo().put(key, save); 403 emitRestoreRegisters(save); 404 } 405 406 return result; 407 } 408 409 @Override 410 public Value emitLoadObjectAddress(Constant constant) { 411 HotSpotObjectConstant objectConstant = (HotSpotObjectConstant) constant; 412 LIRKind kind = objectConstant.isCompressed() ? getLIRKindTool().getNarrowOopKind() : getLIRKindTool().getObjectKind(); 413 Variable result = newVariable(kind); 414 append(new AMD64HotSpotLoadAddressOp(result, constant, HotSpotConstantLoadAction.RESOLVE)); 415 return result; 416 } 417 418 @Override 419 public Value emitLoadMetaspaceAddress(Constant constant, HotSpotConstantLoadAction action) { 420 HotSpotMetaspaceConstant metaspaceConstant = (HotSpotMetaspaceConstant) constant; 421 LIRKind kind = metaspaceConstant.isCompressed() ? getLIRKindTool().getNarrowPointerKind() : getLIRKindTool().getWordKind(); 422 Variable result = newVariable(kind); 423 append(new AMD64HotSpotLoadAddressOp(result, constant, action)); 424 return result; 425 } 426 427 private Value emitConstantRetrieval(ForeignCallDescriptor foreignCall, Object[] notes, Constant[] constants, AllocatableValue[] constantDescriptions, LIRFrameState frameState) { 428 ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(foreignCall); 429 append(new AMD64HotSpotConstantRetrievalOp(constants, constantDescriptions, frameState, linkage, notes)); 430 AllocatableValue result = linkage.getOutgoingCallingConvention().getReturn(); 431 return emitMove(result); 432 } 433 434 private Value emitConstantRetrieval(ForeignCallDescriptor foreignCall, HotSpotConstantLoadAction action, Constant constant, AllocatableValue[] constantDescriptions, LIRFrameState frameState) { 435 Constant[] constants = new Constant[]{constant}; 436 Object[] notes = new Object[]{action}; 437 return emitConstantRetrieval(foreignCall, notes, constants, constantDescriptions, frameState); 438 } 439 440 private Value emitConstantRetrieval(ForeignCallDescriptor foreignCall, HotSpotConstantLoadAction action, Constant constant, Value constantDescription, LIRFrameState frameState) { 441 AllocatableValue[] constantDescriptions = new AllocatableValue[]{asAllocatable(constantDescription)}; 442 return emitConstantRetrieval(foreignCall, action, constant, constantDescriptions, frameState); 443 } 444 445 @Override 446 public Value emitObjectConstantRetrieval(Constant constant, Value constantDescription, LIRFrameState frameState) { 447 return emitConstantRetrieval(RESOLVE_STRING_BY_SYMBOL, RESOLVE, constant, constantDescription, frameState); 448 } 449 450 @Override 451 public Value emitMetaspaceConstantRetrieval(Constant constant, Value constantDescription, LIRFrameState frameState) { 452 return emitConstantRetrieval(RESOLVE_KLASS_BY_SYMBOL, RESOLVE, constant, constantDescription, frameState); 453 } 454 455 @Override 456 public Value emitKlassInitializationAndRetrieval(Constant constant, Value constantDescription, LIRFrameState frameState) { 457 return emitConstantRetrieval(INITIALIZE_KLASS_BY_SYMBOL, INITIALIZE, constant, constantDescription, frameState); 458 } 459 460 @Override 461 public Value emitResolveMethodAndLoadCounters(Constant method, Value klassHint, Value methodDescription, LIRFrameState frameState) { 462 AllocatableValue[] constantDescriptions = new AllocatableValue[]{asAllocatable(klassHint), asAllocatable(methodDescription)}; 463 return emitConstantRetrieval(RESOLVE_METHOD_BY_SYMBOL_AND_LOAD_COUNTERS, LOAD_COUNTERS, method, constantDescriptions, frameState); 464 } 465 466 @Override 467 public Value emitResolveDynamicInvoke(Constant appendix, LIRFrameState frameState) { 468 AllocatableValue[] constantDescriptions = new AllocatableValue[0]; 469 return emitConstantRetrieval(RESOLVE_DYNAMIC_INVOKE, INITIALIZE, appendix, constantDescriptions, frameState); 470 } 471 472 @Override 473 public Value emitLoadConfigValue(int markId, LIRKind kind) { 474 Variable result = newVariable(kind); 475 append(new AMD64HotSpotLoadConfigValueOp(markId, result)); 476 return result; 477 } 478 479 @Override 480 public Value emitRandomSeed() { 481 AMD64ReadTimestampCounter timestamp = new AMD64ReadTimestampCounter(); 482 append(timestamp); 483 return emitMove(timestamp.getLowResult()); 484 } 485 486 @Override 487 public void emitTailcall(Value[] args, Value address) { 488 append(new AMD64TailcallOp(args, address)); 489 } 490 491 @Override 492 public void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args, int numberOfFloatingPointArguments) { 493 Value[] argLocations = new Value[args.length]; 494 getResult().getFrameMapBuilder().callsMethod(nativeCallingConvention); 495 // TODO(mg): in case a native function uses floating point varargs, the ABI requires that 496 // RAX contains the length of the varargs 497 PrimitiveConstant intConst = JavaConstant.forInt(numberOfFloatingPointArguments); 498 AllocatableValue numberOfFloatingPointArgumentsRegister = AMD64.rax.asValue(LIRKind.value(AMD64Kind.DWORD)); 499 emitMoveConstant(numberOfFloatingPointArgumentsRegister, intConst); 500 for (int i = 0; i < args.length; i++) { 501 Value arg = args[i]; 502 AllocatableValue loc = nativeCallingConvention.getArgument(i); 503 emitMove(loc, arg); 504 argLocations[i] = loc; 505 } 506 Value ptr = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(address)); 507 append(new AMD64CCall(nativeCallingConvention.getReturn(), ptr, numberOfFloatingPointArgumentsRegister, argLocations)); 508 } 509 510 @Override 511 public void emitUnwind(Value exception) { 512 ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(HotSpotBackend.UNWIND_EXCEPTION_TO_CALLER); 513 CallingConvention outgoingCc = linkage.getOutgoingCallingConvention(); 514 assert outgoingCc.getArgumentCount() == 2; 515 RegisterValue exceptionParameter = (RegisterValue) outgoingCc.getArgument(0); 516 emitMove(exceptionParameter, exception); 517 append(new AMD64HotSpotUnwindOp(exceptionParameter)); 518 } 519 520 private void moveDeoptValuesToThread(Value actionAndReason, Value speculation) { 521 moveValueToThread(actionAndReason, config.pendingDeoptimizationOffset); 522 moveValueToThread(speculation, config.pendingFailedSpeculationOffset); 523 } 524 525 private void moveValueToThread(Value v, int offset) { 526 LIRKind wordKind = LIRKind.value(target().arch.getWordKind()); 527 RegisterValue thread = getProviders().getRegisters().getThreadRegister().asValue(wordKind); 528 AMD64AddressValue address = new AMD64AddressValue(wordKind, thread, offset); 529 arithmeticLIRGen.emitStore(v.getValueKind(), address, v, null); 530 } 531 532 @Override 533 public void emitDeoptimize(Value actionAndReason, Value speculation, LIRFrameState state) { 534 moveDeoptValuesToThread(actionAndReason, speculation); 535 append(new AMD64DeoptimizeOp(state)); 536 } 537 538 @Override 539 public void emitDeoptimizeCaller(DeoptimizationAction action, DeoptimizationReason reason) { 540 Value actionAndReason = emitJavaConstant(getMetaAccess().encodeDeoptActionAndReason(action, reason, 0)); 541 Value speculation = emitJavaConstant(getMetaAccess().encodeSpeculation(SpeculationLog.NO_SPECULATION)); 542 moveDeoptValuesToThread(actionAndReason, speculation); 543 append(new AMD64HotSpotDeoptimizeCallerOp()); 544 } 545 546 @Override 547 public void beforeRegisterAllocation() { 548 super.beforeRegisterAllocation(); 549 boolean hasDebugInfo = getResult().getLIR().hasDebugInfo(); 550 AllocatableValue savedRbp = saveRbp.finalize(hasDebugInfo); 551 if (hasDebugInfo) { 552 getResult().setDeoptimizationRescueSlot(((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).allocateDeoptimizationRescueSlot()); 553 } 554 555 getResult().setMaxInterpreterFrameSize(debugInfoBuilder.maxInterpreterFrameSize()); 556 557 for (AMD64HotSpotRestoreRbpOp op : epilogueOps) { 558 op.setSavedRbp(savedRbp); 559 } 560 if (BenchmarkCounters.enabled) { 561 // ensure that the rescue slot is available 562 LIRInstruction op = getOrInitRescueSlotOp(); 563 // insert dummy instruction into the start block 564 LIR lir = getResult().getLIR(); 565 ArrayList<LIRInstruction> instructions = lir.getLIRforBlock(lir.getControlFlowGraph().getStartBlock()); 566 instructions.add(1, op); 567 lir.getDebug().dump(DebugContext.INFO_LEVEL, lir, "created rescue dummy op"); 568 } 569 } 570 571 @Override 572 public Value emitCompress(Value pointer, CompressEncoding encoding, boolean nonNull) { 573 LIRKind inputKind = pointer.getValueKind(LIRKind.class); 574 LIRKindTool lirKindTool = getLIRKindTool(); 575 assert inputKind.getPlatformKind() == lirKindTool.getObjectKind().getPlatformKind(); 576 if (inputKind.isReference(0)) { 577 // oop 578 Variable result = newVariable(lirKindTool.getNarrowOopKind()); 579 append(new AMD64Move.CompressPointerOp(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull, getLIRKindTool())); 580 return result; 581 } else { 582 // metaspace pointer 583 Variable result = newVariable(lirKindTool.getNarrowPointerKind()); 584 AllocatableValue base = Value.ILLEGAL; 585 OptionValues options = getResult().getLIR().getOptions(); 586 if (encoding.hasBase() || GeneratePIC.getValue(options)) { 587 if (GeneratePIC.getValue(options)) { 588 Variable baseAddress = newVariable(lirKindTool.getWordKind()); 589 AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config); 590 append(move); 591 base = baseAddress; 592 } else { 593 base = emitLoadConstant(lirKindTool.getWordKind(), JavaConstant.forLong(encoding.getBase())); 594 } 595 } 596 append(new AMD64Move.CompressPointerOp(result, asAllocatable(pointer), base, encoding, nonNull, getLIRKindTool())); 597 return result; 598 } 599 } 600 601 @Override 602 public Value emitUncompress(Value pointer, CompressEncoding encoding, boolean nonNull) { 603 LIRKind inputKind = pointer.getValueKind(LIRKind.class); 604 LIRKindTool lirKindTool = getLIRKindTool(); 605 assert inputKind.getPlatformKind() == lirKindTool.getNarrowOopKind().getPlatformKind(); 606 if (inputKind.isReference(0)) { 607 // oop 608 Variable result = newVariable(lirKindTool.getObjectKind()); 609 append(new AMD64Move.UncompressPointerOp(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull, lirKindTool)); 610 return result; 611 } else { 612 // metaspace pointer 613 LIRKind uncompressedKind = lirKindTool.getWordKind(); 614 Variable result = newVariable(uncompressedKind); 615 AllocatableValue base = Value.ILLEGAL; 616 OptionValues options = getResult().getLIR().getOptions(); 617 if (encoding.hasBase() || GeneratePIC.getValue(options)) { 618 if (GeneratePIC.getValue(options)) { 619 Variable baseAddress = newVariable(uncompressedKind); 620 AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config); 621 append(move); 622 base = baseAddress; 623 } else { 624 base = emitLoadConstant(uncompressedKind, JavaConstant.forLong(encoding.getBase())); 625 } 626 } 627 append(new AMD64Move.UncompressPointerOp(result, asAllocatable(pointer), base, encoding, nonNull, lirKindTool)); 628 return result; 629 } 630 } 631 632 @Override 633 public void emitNullCheck(Value address, LIRFrameState state) { 634 if (address.getValueKind().getPlatformKind() == getLIRKindTool().getNarrowOopKind().getPlatformKind()) { 635 CompressEncoding encoding = config.getOopEncoding(); 636 Value uncompressed; 637 if (encoding.getShift() <= 3) { 638 LIRKind wordKind = LIRKind.unknownReference(target().arch.getWordKind()); 639 uncompressed = new AMD64AddressValue(wordKind, getProviders().getRegisters().getHeapBaseRegister().asValue(wordKind), asAllocatable(address), Scale.fromInt(1 << encoding.getShift()), 640 0); 641 } else { 642 uncompressed = emitUncompress(address, encoding, false); 643 } 644 append(new AMD64Move.NullCheckOp(asAddressValue(uncompressed), state)); 645 return; 646 } 647 super.emitNullCheck(address, state); 648 } 649 650 @Override 651 public LIRInstruction createBenchmarkCounter(String name, String group, Value increment) { 652 if (BenchmarkCounters.enabled) { 653 return new AMD64HotSpotCounterOp(name, group, increment, getProviders().getRegisters(), config, getOrInitRescueSlot()); 654 } 655 throw GraalError.shouldNotReachHere("BenchmarkCounters are not enabled!"); 656 } 657 658 @Override 659 public LIRInstruction createMultiBenchmarkCounter(String[] names, String[] groups, Value[] increments) { 660 if (BenchmarkCounters.enabled) { 661 return new AMD64HotSpotCounterOp(names, groups, increments, getProviders().getRegisters(), config, getOrInitRescueSlot()); 662 } 663 throw GraalError.shouldNotReachHere("BenchmarkCounters are not enabled!"); 664 } 665 666 @Override 667 public void emitPrefetchAllocate(Value address) { 668 append(new AMD64PrefetchOp(asAddressValue(address), config.allocatePrefetchInstr)); 669 } 670 671 @Override 672 protected StrategySwitchOp createStrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Variable key, AllocatableValue temp) { 673 return new AMD64HotSpotStrategySwitchOp(strategy, keyTargets, defaultTarget, key, temp); 674 } 675 }