src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot.amd64/src/org/graalvm/compiler/hotspot/amd64/AMD64HotSpotLIRGenerator.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot.amd64/src/org/graalvm/compiler/hotspot/amd64

src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot.amd64/src/org/graalvm/compiler/hotspot/amd64/AMD64HotSpotLIRGenerator.java

Print this page




   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.hotspot.amd64;
  24 

  25 import static org.graalvm.compiler.core.common.GraalOptions.GeneratePIC;
  26 import static org.graalvm.compiler.hotspot.HotSpotBackend.INITIALIZE_KLASS_BY_SYMBOL;
  27 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_KLASS_BY_SYMBOL;
  28 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_METHOD_BY_SYMBOL_AND_LOAD_COUNTERS;
  29 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_STRING_BY_SYMBOL;
  30 import static org.graalvm.compiler.hotspot.HotSpotBackend.FETCH_UNROLL_INFO;
  31 import static org.graalvm.compiler.hotspot.HotSpotBackend.UNCOMMON_TRAP;
  32 import static jdk.vm.ci.amd64.AMD64.rbp;
  33 
  34 import java.util.ArrayList;
  35 import java.util.List;
  36 import java.util.Map;
  37 
  38 import org.graalvm.compiler.asm.amd64.AMD64Address.Scale;
  39 import org.graalvm.compiler.core.amd64.AMD64ArithmeticLIRGenerator;
  40 import org.graalvm.compiler.core.amd64.AMD64LIRGenerator;
  41 import org.graalvm.compiler.core.amd64.AMD64MoveFactoryBase.BackupSlotProvider;

  42 import org.graalvm.compiler.core.common.LIRKind;
  43 import org.graalvm.compiler.core.common.spi.ForeignCallLinkage;
  44 import org.graalvm.compiler.core.common.spi.LIRKindTool;
  45 import org.graalvm.compiler.debug.Debug;
  46 import org.graalvm.compiler.debug.GraalError;
  47 import org.graalvm.compiler.hotspot.CompressEncoding;
  48 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
  49 import org.graalvm.compiler.hotspot.HotSpotBackend;
  50 import org.graalvm.compiler.hotspot.HotSpotDebugInfoBuilder;
  51 import org.graalvm.compiler.hotspot.HotSpotForeignCallLinkage;
  52 import org.graalvm.compiler.hotspot.HotSpotLIRGenerationResult;
  53 import org.graalvm.compiler.hotspot.HotSpotLIRGenerator;
  54 import org.graalvm.compiler.hotspot.HotSpotLockStack;
  55 import org.graalvm.compiler.hotspot.debug.BenchmarkCounters;
  56 import org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction;
  57 import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
  58 import org.graalvm.compiler.hotspot.nodes.type.HotSpotLIRKindTool;
  59 import org.graalvm.compiler.hotspot.stubs.Stub;
  60 import org.graalvm.compiler.lir.LIR;
  61 import org.graalvm.compiler.lir.LIRFrameState;
  62 import org.graalvm.compiler.lir.LIRInstruction;
  63 import org.graalvm.compiler.lir.LIRInstructionClass;
  64 import org.graalvm.compiler.lir.LabelRef;
  65 import org.graalvm.compiler.lir.StandardOp.NoOp;
  66 import org.graalvm.compiler.lir.StandardOp.SaveRegistersOp;
  67 import org.graalvm.compiler.lir.SwitchStrategy;
  68 import org.graalvm.compiler.lir.Variable;
  69 import org.graalvm.compiler.lir.VirtualStackSlot;
  70 import org.graalvm.compiler.lir.amd64.AMD64AddressValue;
  71 import org.graalvm.compiler.lir.amd64.AMD64CCall;
  72 import org.graalvm.compiler.lir.amd64.AMD64ControlFlow.StrategySwitchOp;
  73 import org.graalvm.compiler.lir.amd64.AMD64FrameMapBuilder;
  74 import org.graalvm.compiler.lir.amd64.AMD64Move;
  75 import org.graalvm.compiler.lir.amd64.AMD64Move.MoveFromRegOp;
  76 import org.graalvm.compiler.lir.amd64.AMD64PrefetchOp;
  77 import org.graalvm.compiler.lir.amd64.AMD64ReadTimestampCounter;
  78 import org.graalvm.compiler.lir.amd64.AMD64RestoreRegistersOp;
  79 import org.graalvm.compiler.lir.amd64.AMD64SaveRegistersOp;
  80 import org.graalvm.compiler.lir.amd64.AMD64VZeroUpper;
  81 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  82 import org.graalvm.compiler.lir.framemap.FrameMapBuilder;
  83 import org.graalvm.compiler.lir.gen.LIRGenerationResult;

  84 
  85 import jdk.vm.ci.amd64.AMD64;
  86 import jdk.vm.ci.amd64.AMD64Kind;
  87 import jdk.vm.ci.code.CallingConvention;
  88 import jdk.vm.ci.code.Register;
  89 import jdk.vm.ci.code.RegisterConfig;
  90 import jdk.vm.ci.code.RegisterValue;
  91 import jdk.vm.ci.code.StackSlot;
  92 import jdk.vm.ci.hotspot.HotSpotMetaspaceConstant;
  93 import jdk.vm.ci.hotspot.HotSpotObjectConstant;
  94 import jdk.vm.ci.meta.AllocatableValue;
  95 import jdk.vm.ci.meta.Constant;
  96 import jdk.vm.ci.meta.DeoptimizationAction;
  97 import jdk.vm.ci.meta.DeoptimizationReason;
  98 import jdk.vm.ci.meta.JavaConstant;
  99 import jdk.vm.ci.meta.JavaKind;
 100 import jdk.vm.ci.meta.PlatformKind;
 101 import jdk.vm.ci.meta.PrimitiveConstant;
 102 import jdk.vm.ci.meta.Value;
 103 


 277     @Override
 278     protected void emitForeignCallOp(ForeignCallLinkage linkage, Value result, Value[] arguments, Value[] temps, LIRFrameState info) {
 279         currentRuntimeCallInfo = info;
 280         HotSpotForeignCallLinkage hsLinkage = (HotSpotForeignCallLinkage) linkage;
 281         AMD64 arch = (AMD64) target().arch;
 282         if (arch.getFeatures().contains(AMD64.CPUFeature.AVX) && hsLinkage.mayContainFP() && !hsLinkage.isCompiledStub()) {
 283             /*
 284              * If the target may contain FP ops, and it is not compiled by us, we may have an
 285              * AVX-SSE transition.
 286              *
 287              * We exclude the argument registers from the zeroing LIR instruction since it violates
 288              * the LIR semantics of @Temp that values must not be live. Note that the emitted
 289              * machine instruction actually zeros _all_ XMM registers which is fine since we know
 290              * that their upper half is not used.
 291              */
 292             append(new AMD64VZeroUpper(arguments));
 293         }
 294         super.emitForeignCallOp(linkage, result, arguments, temps, info);
 295     }
 296 
 297     @Override
 298     public void emitLeaveCurrentStackFrame(SaveRegistersOp saveRegisterOp) {
 299         append(new AMD64HotSpotLeaveCurrentStackFrameOp(saveRegisterOp));
 300     }
 301 
 302     @Override
 303     public void emitLeaveDeoptimizedStackFrame(Value frameSize, Value initialInfo) {
 304         Variable frameSizeVariable = load(frameSize);
 305         Variable initialInfoVariable = load(initialInfo);
 306         append(new AMD64HotSpotLeaveDeoptimizedStackFrameOp(frameSizeVariable, initialInfoVariable));
 307     }
 308 
 309     @Override
 310     public void emitEnterUnpackFramesStackFrame(Value framePc, Value senderSp, Value senderFp, SaveRegistersOp saveRegisterOp) {
 311         Register threadRegister = getProviders().getRegisters().getThreadRegister();
 312         Variable framePcVariable = load(framePc);
 313         Variable senderSpVariable = load(senderSp);
 314         Variable senderFpVariable = load(senderFp);
 315         append(new AMD64HotSpotEnterUnpackFramesStackFrameOp(threadRegister, config.threadLastJavaSpOffset(), config.threadLastJavaPcOffset(), config.threadLastJavaFpOffset(), framePcVariable,
 316                         senderSpVariable, senderFpVariable, saveRegisterOp));
 317     }
 318 
 319     @Override
 320     public void emitLeaveUnpackFramesStackFrame(SaveRegistersOp saveRegisterOp) {
 321         Register threadRegister = getProviders().getRegisters().getThreadRegister();
 322         append(new AMD64HotSpotLeaveUnpackFramesStackFrameOp(threadRegister, config.threadLastJavaSpOffset(), config.threadLastJavaPcOffset(), config.threadLastJavaFpOffset(), saveRegisterOp));
 323     }
 324 
 325     /**
 326      * @param savedRegisters the registers saved by this operation which may be subject to pruning
 327      * @param savedRegisterLocations the slots to which the registers are saved
 328      * @param supportsRemove determines if registers can be pruned
 329      */
 330     protected AMD64SaveRegistersOp emitSaveRegisters(Register[] savedRegisters, AllocatableValue[] savedRegisterLocations, boolean supportsRemove) {
 331         AMD64SaveRegistersOp save = new AMD64SaveRegistersOp(savedRegisters, savedRegisterLocations, supportsRemove);
 332         append(save);
 333         return save;
 334     }
 335 
 336     /**
 337      * Allocate a stack slot for saving a register.
 338      */
 339     protected VirtualStackSlot allocateSaveRegisterLocation(Register register) {
 340         PlatformKind kind = target().arch.getLargestStorableKind(register.getRegisterCategory());
 341         if (kind.getVectorLength() > 1) {
 342             // we don't use vector registers, so there is no need to save them
 343             kind = AMD64Kind.DOUBLE;
 344         }
 345         return getResult().getFrameMapBuilder().allocateSpillSlot(LIRKind.value(kind));
 346     }
 347 
 348     /**
 349      * Adds a node to the graph that saves all allocatable registers to the stack.
 350      *
 351      * @param supportsRemove determines if registers can be pruned
 352      * @return the register save node
 353      */
 354     private AMD64SaveRegistersOp emitSaveAllRegisters(Register[] savedRegisters, boolean supportsRemove) {
 355         AllocatableValue[] savedRegisterLocations = new AllocatableValue[savedRegisters.length];
 356         for (int i = 0; i < savedRegisters.length; i++) {
 357             savedRegisterLocations[i] = allocateSaveRegisterLocation(savedRegisters[i]);
 358         }
 359         return emitSaveRegisters(savedRegisters, savedRegisterLocations, supportsRemove);
 360     }
 361 
 362     @Override
 363     public SaveRegistersOp emitSaveAllRegisters() {
 364         // We are saving all registers.
 365         // TODO Save upper half of YMM registers.
 366         return emitSaveAllRegisters(target().arch.getAvailableValueRegisters().toArray(), false);
 367     }
 368 
 369     protected void emitRestoreRegisters(AMD64SaveRegistersOp save) {
 370         append(new AMD64RestoreRegistersOp(save.getSlots().clone(), save));
 371     }
 372 
 373     /**
 374      * Gets the {@link Stub} this generator is generating code for or {@code null} if a stub is not
 375      * being generated.
 376      */
 377     public Stub getStub() {
 378         return getResult().getStub();
 379     }
 380 
 381     @Override
 382     public HotSpotLIRGenerationResult getResult() {
 383         return ((HotSpotLIRGenerationResult) super.getResult());
 384     }
 385 
 386     public void setDebugInfoBuilder(HotSpotDebugInfoBuilder debugInfoBuilder) {
 387         this.debugInfoBuilder = debugInfoBuilder;
 388     }


 404         Variable result;
 405         LIRFrameState debugInfo = null;
 406         if (hotspotLinkage.needsDebugInfo()) {
 407             debugInfo = state;
 408             assert debugInfo != null || stub != null;
 409         }
 410 
 411         if (hotspotLinkage.needsJavaFrameAnchor()) {
 412             Register thread = getProviders().getRegisters().getThreadRegister();
 413             append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread));
 414             result = super.emitForeignCall(hotspotLinkage, debugInfo, args);
 415             append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), config.threadLastJavaPcOffset(), thread));
 416         } else {
 417             result = super.emitForeignCall(hotspotLinkage, debugInfo, args);
 418         }
 419 
 420         if (destroysRegisters) {
 421             if (stub != null) {
 422                 if (stub.preservesRegisters()) {
 423                     HotSpotLIRGenerationResult generationResult = getResult();
 424                     assert !generationResult.getCalleeSaveInfo().containsKey(currentRuntimeCallInfo);
 425                     generationResult.getCalleeSaveInfo().put(currentRuntimeCallInfo, save);




 426                     emitRestoreRegisters(save);
 427                 }
 428             }
 429         }
 430 
 431         return result;
 432     }
 433 
 434     @Override
 435     public Value emitLoadObjectAddress(Constant constant) {
 436         HotSpotObjectConstant objectConstant = (HotSpotObjectConstant) constant;
 437         HotSpotLIRKindTool kindTool = (HotSpotLIRKindTool) getLIRKindTool();
 438         LIRKind kind = objectConstant.isCompressed() ? kindTool.getNarrowOopKind() : kindTool.getObjectKind();
 439         Variable result = newVariable(kind);
 440         append(new AMD64HotSpotLoadAddressOp(result, constant, HotSpotConstantLoadAction.RESOLVE));
 441         return result;
 442     }
 443 
 444     @Override
 445     public Value emitLoadMetaspaceAddress(Constant constant, HotSpotConstantLoadAction action) {


 494         append(new AMD64HotSpotConstantRetrievalOp(constants, constantDescriptions, frameState, linkage, notes));
 495         AllocatableValue result = linkage.getOutgoingCallingConvention().getReturn();
 496         return emitMove(result);
 497     }
 498 
 499     @Override
 500     public Value emitLoadConfigValue(int markId, LIRKind kind) {
 501         Variable result = newVariable(kind);
 502         append(new AMD64HotSpotLoadConfigValueOp(markId, result));
 503         return result;
 504     }
 505 
 506     @Override
 507     public Value emitRandomSeed() {
 508         AMD64ReadTimestampCounter timestamp = new AMD64ReadTimestampCounter();
 509         append(timestamp);
 510         return emitMove(timestamp.getLowResult());
 511     }
 512 
 513     @Override
 514     public Value emitUncommonTrapCall(Value trapRequest, Value mode, SaveRegistersOp saveRegisterOp) {
 515         ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(UNCOMMON_TRAP);
 516 
 517         Register thread = getProviders().getRegisters().getThreadRegister();
 518         append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread));
 519         Variable result = super.emitForeignCall(linkage, null, thread.asValue(LIRKind.value(AMD64Kind.QWORD)), trapRequest, mode);
 520         append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), config.threadLastJavaPcOffset(), thread));
 521 
 522         Map<LIRFrameState, SaveRegistersOp> calleeSaveInfo = getResult().getCalleeSaveInfo();
 523         assert !calleeSaveInfo.containsKey(currentRuntimeCallInfo);
 524         calleeSaveInfo.put(currentRuntimeCallInfo, saveRegisterOp);
 525 
 526         return result;
 527     }
 528 
 529     @Override
 530     public Value emitDeoptimizationFetchUnrollInfoCall(Value mode, SaveRegistersOp saveRegisterOp) {
 531         ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(FETCH_UNROLL_INFO);
 532 
 533         Register thread = getProviders().getRegisters().getThreadRegister();
 534         append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread));
 535         Variable result = super.emitForeignCall(linkage, null, thread.asValue(LIRKind.value(AMD64Kind.QWORD)), mode);
 536         append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), config.threadLastJavaPcOffset(), thread));
 537 
 538         Map<LIRFrameState, SaveRegistersOp> calleeSaveInfo = getResult().getCalleeSaveInfo();
 539         assert !calleeSaveInfo.containsKey(currentRuntimeCallInfo);
 540         calleeSaveInfo.put(currentRuntimeCallInfo, saveRegisterOp);
 541 
 542         return result;
 543     }
 544 
 545     @Override
 546     public void emitTailcall(Value[] args, Value address) {
 547         append(new AMD64TailcallOp(args, address));
 548     }
 549 
 550     @Override
 551     public void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args, int numberOfFloatingPointArguments) {
 552         Value[] argLocations = new Value[args.length];
 553         getResult().getFrameMapBuilder().callsMethod(nativeCallingConvention);
 554         // TODO(mg): in case a native function uses floating point varargs, the ABI requires that
 555         // RAX contains the length of the varargs
 556         PrimitiveConstant intConst = JavaConstant.forInt(numberOfFloatingPointArguments);
 557         AllocatableValue numberOfFloatingPointArgumentsRegister = AMD64.rax.asValue(LIRKind.value(AMD64Kind.DWORD));
 558         emitMoveConstant(numberOfFloatingPointArgumentsRegister, intConst);
 559         for (int i = 0; i < args.length; i++) {
 560             Value arg = args[i];
 561             AllocatableValue loc = nativeCallingConvention.getArgument(i);
 562             emitMove(loc, arg);
 563             argLocations[i] = loc;
 564         }
 565         Value ptr = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(address));


 604 
 605     @Override
 606     public void beforeRegisterAllocation() {
 607         super.beforeRegisterAllocation();
 608         boolean hasDebugInfo = getResult().getLIR().hasDebugInfo();
 609         AllocatableValue savedRbp = saveRbp.finalize(hasDebugInfo);
 610         if (hasDebugInfo) {
 611             getResult().setDeoptimizationRescueSlot(((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).allocateDeoptimizationRescueSlot());
 612         }
 613 
 614         getResult().setMaxInterpreterFrameSize(debugInfoBuilder.maxInterpreterFrameSize());
 615 
 616         for (AMD64HotSpotRestoreRbpOp op : epilogueOps) {
 617             op.setSavedRbp(savedRbp);
 618         }
 619         if (BenchmarkCounters.enabled) {
 620             // ensure that the rescue slot is available
 621             LIRInstruction op = getOrInitRescueSlotOp();
 622             // insert dummy instruction into the start block
 623             LIR lir = getResult().getLIR();
 624             List<LIRInstruction> instructions = lir.getLIRforBlock(lir.getControlFlowGraph().getStartBlock());
 625             instructions.add(1, op);
 626             Debug.dump(Debug.INFO_LOG_LEVEL, lir, "created rescue dummy op");
 627         }
 628     }
 629 
 630     @Override
 631     public void emitPushInterpreterFrame(Value frameSize, Value framePc, Value senderSp, Value initialInfo) {
 632         Variable frameSizeVariable = load(frameSize);
 633         Variable framePcVariable = load(framePc);
 634         Variable senderSpVariable = load(senderSp);
 635         Variable initialInfoVariable = load(initialInfo);
 636         append(new AMD64HotSpotPushInterpreterFrameOp(frameSizeVariable, framePcVariable, senderSpVariable, initialInfoVariable, config));
 637     }
 638 
 639     @Override
 640     public Value emitCompress(Value pointer, CompressEncoding encoding, boolean nonNull) {
 641         LIRKind inputKind = pointer.getValueKind(LIRKind.class);
 642         assert inputKind.getPlatformKind() == AMD64Kind.QWORD;
 643         if (inputKind.isReference(0)) {
 644             // oop
 645             Variable result = newVariable(LIRKind.reference(AMD64Kind.DWORD));
 646             append(new AMD64HotSpotMove.CompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull));
 647             return result;
 648         } else {
 649             // metaspace pointer
 650             Variable result = newVariable(LIRKind.value(AMD64Kind.DWORD));
 651             AllocatableValue base = Value.ILLEGAL;
 652             if (encoding.base != 0 || GeneratePIC.getValue()) {
 653                 if (GeneratePIC.getValue()) {

 654                     Variable baseAddress = newVariable(LIRKind.value(AMD64Kind.QWORD));
 655                     AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config);
 656                     append(move);
 657                     base = baseAddress;
 658                 } else {
 659                     base = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(encoding.base));
 660                 }
 661             }
 662             append(new AMD64HotSpotMove.CompressPointer(result, asAllocatable(pointer), base, encoding, nonNull));
 663             return result;
 664         }
 665     }
 666 
 667     @Override
 668     public Value emitUncompress(Value pointer, CompressEncoding encoding, boolean nonNull) {
 669         LIRKind inputKind = pointer.getValueKind(LIRKind.class);
 670         assert inputKind.getPlatformKind() == AMD64Kind.DWORD;
 671         if (inputKind.isReference(0)) {
 672             // oop
 673             Variable result = newVariable(LIRKind.reference(AMD64Kind.QWORD));
 674             append(new AMD64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull));
 675             return result;
 676         } else {
 677             // metaspace pointer
 678             Variable result = newVariable(LIRKind.value(AMD64Kind.QWORD));
 679             AllocatableValue base = Value.ILLEGAL;
 680             if (encoding.base != 0 || GeneratePIC.getValue()) {
 681                 if (GeneratePIC.getValue()) {

 682                     Variable baseAddress = newVariable(LIRKind.value(AMD64Kind.QWORD));
 683                     AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config);
 684                     append(move);
 685                     base = baseAddress;
 686                 } else {
 687                     base = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(encoding.base));
 688                 }
 689             }
 690             append(new AMD64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), base, encoding, nonNull));
 691             return result;
 692         }
 693     }
 694 
 695     @Override
 696     public void emitNullCheck(Value address, LIRFrameState state) {
 697         if (address.getValueKind().getPlatformKind() == AMD64Kind.DWORD) {
 698             CompressEncoding encoding = config.getOopEncoding();
 699             Value uncompressed;
 700             if (encoding.shift <= 3) {
 701                 LIRKind wordKind = LIRKind.unknownReference(target().arch.getWordKind());
 702                 uncompressed = new AMD64AddressValue(wordKind, getProviders().getRegisters().getHeapBaseRegister().asValue(wordKind), asAllocatable(address), Scale.fromInt(1 << encoding.shift), 0);

 703             } else {
 704                 uncompressed = emitUncompress(address, encoding, false);
 705             }
 706             append(new AMD64Move.NullCheckOp(asAddressValue(uncompressed), state));
 707         } else {
 708             super.emitNullCheck(address, state);
 709         }
 710     }
 711 
 712     @Override
 713     public LIRInstruction createBenchmarkCounter(String name, String group, Value increment) {
 714         if (BenchmarkCounters.enabled) {
 715             return new AMD64HotSpotCounterOp(name, group, increment, getProviders().getRegisters(), config, getOrInitRescueSlot());
 716         }
 717         throw GraalError.shouldNotReachHere("BenchmarkCounters are not enabled!");
 718     }
 719 
 720     @Override
 721     public LIRInstruction createMultiBenchmarkCounter(String[] names, String[] groups, Value[] increments) {
 722         if (BenchmarkCounters.enabled) {


   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.hotspot.amd64;
  24 
  25 import static jdk.vm.ci.amd64.AMD64.rbp;
  26 import static org.graalvm.compiler.core.common.GraalOptions.GeneratePIC;
  27 import static org.graalvm.compiler.hotspot.HotSpotBackend.INITIALIZE_KLASS_BY_SYMBOL;
  28 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_KLASS_BY_SYMBOL;
  29 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_METHOD_BY_SYMBOL_AND_LOAD_COUNTERS;
  30 import static org.graalvm.compiler.hotspot.HotSpotBackend.RESOLVE_STRING_BY_SYMBOL;



  31 
  32 import java.util.ArrayList;
  33 import java.util.List;

  34 
  35 import org.graalvm.compiler.asm.amd64.AMD64Address.Scale;
  36 import org.graalvm.compiler.core.amd64.AMD64ArithmeticLIRGenerator;
  37 import org.graalvm.compiler.core.amd64.AMD64LIRGenerator;
  38 import org.graalvm.compiler.core.amd64.AMD64MoveFactoryBase.BackupSlotProvider;
  39 import org.graalvm.compiler.core.common.CompressEncoding;
  40 import org.graalvm.compiler.core.common.LIRKind;
  41 import org.graalvm.compiler.core.common.spi.ForeignCallLinkage;
  42 import org.graalvm.compiler.core.common.spi.LIRKindTool;
  43 import org.graalvm.compiler.debug.Debug;
  44 import org.graalvm.compiler.debug.GraalError;

  45 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
  46 import org.graalvm.compiler.hotspot.HotSpotBackend;
  47 import org.graalvm.compiler.hotspot.HotSpotDebugInfoBuilder;
  48 import org.graalvm.compiler.hotspot.HotSpotForeignCallLinkage;
  49 import org.graalvm.compiler.hotspot.HotSpotLIRGenerationResult;
  50 import org.graalvm.compiler.hotspot.HotSpotLIRGenerator;
  51 import org.graalvm.compiler.hotspot.HotSpotLockStack;
  52 import org.graalvm.compiler.hotspot.debug.BenchmarkCounters;
  53 import org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction;
  54 import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
  55 import org.graalvm.compiler.hotspot.nodes.type.HotSpotLIRKindTool;
  56 import org.graalvm.compiler.hotspot.stubs.Stub;
  57 import org.graalvm.compiler.lir.LIR;
  58 import org.graalvm.compiler.lir.LIRFrameState;
  59 import org.graalvm.compiler.lir.LIRInstruction;
  60 import org.graalvm.compiler.lir.LIRInstructionClass;
  61 import org.graalvm.compiler.lir.LabelRef;
  62 import org.graalvm.compiler.lir.StandardOp.NoOp;

  63 import org.graalvm.compiler.lir.SwitchStrategy;
  64 import org.graalvm.compiler.lir.Variable;
  65 import org.graalvm.compiler.lir.VirtualStackSlot;
  66 import org.graalvm.compiler.lir.amd64.AMD64AddressValue;
  67 import org.graalvm.compiler.lir.amd64.AMD64CCall;
  68 import org.graalvm.compiler.lir.amd64.AMD64ControlFlow.StrategySwitchOp;
  69 import org.graalvm.compiler.lir.amd64.AMD64FrameMapBuilder;
  70 import org.graalvm.compiler.lir.amd64.AMD64Move;
  71 import org.graalvm.compiler.lir.amd64.AMD64Move.MoveFromRegOp;
  72 import org.graalvm.compiler.lir.amd64.AMD64PrefetchOp;
  73 import org.graalvm.compiler.lir.amd64.AMD64ReadTimestampCounter;
  74 import org.graalvm.compiler.lir.amd64.AMD64RestoreRegistersOp;
  75 import org.graalvm.compiler.lir.amd64.AMD64SaveRegistersOp;
  76 import org.graalvm.compiler.lir.amd64.AMD64VZeroUpper;
  77 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  78 import org.graalvm.compiler.lir.framemap.FrameMapBuilder;
  79 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
  80 import org.graalvm.compiler.options.OptionValues;
  81 
  82 import jdk.vm.ci.amd64.AMD64;
  83 import jdk.vm.ci.amd64.AMD64Kind;
  84 import jdk.vm.ci.code.CallingConvention;
  85 import jdk.vm.ci.code.Register;
  86 import jdk.vm.ci.code.RegisterConfig;
  87 import jdk.vm.ci.code.RegisterValue;
  88 import jdk.vm.ci.code.StackSlot;
  89 import jdk.vm.ci.hotspot.HotSpotMetaspaceConstant;
  90 import jdk.vm.ci.hotspot.HotSpotObjectConstant;
  91 import jdk.vm.ci.meta.AllocatableValue;
  92 import jdk.vm.ci.meta.Constant;
  93 import jdk.vm.ci.meta.DeoptimizationAction;
  94 import jdk.vm.ci.meta.DeoptimizationReason;
  95 import jdk.vm.ci.meta.JavaConstant;
  96 import jdk.vm.ci.meta.JavaKind;
  97 import jdk.vm.ci.meta.PlatformKind;
  98 import jdk.vm.ci.meta.PrimitiveConstant;
  99 import jdk.vm.ci.meta.Value;
 100 


 274     @Override
 275     protected void emitForeignCallOp(ForeignCallLinkage linkage, Value result, Value[] arguments, Value[] temps, LIRFrameState info) {
 276         currentRuntimeCallInfo = info;
 277         HotSpotForeignCallLinkage hsLinkage = (HotSpotForeignCallLinkage) linkage;
 278         AMD64 arch = (AMD64) target().arch;
 279         if (arch.getFeatures().contains(AMD64.CPUFeature.AVX) && hsLinkage.mayContainFP() && !hsLinkage.isCompiledStub()) {
 280             /*
 281              * If the target may contain FP ops, and it is not compiled by us, we may have an
 282              * AVX-SSE transition.
 283              *
 284              * We exclude the argument registers from the zeroing LIR instruction since it violates
 285              * the LIR semantics of @Temp that values must not be live. Note that the emitted
 286              * machine instruction actually zeros _all_ XMM registers which is fine since we know
 287              * that their upper half is not used.
 288              */
 289             append(new AMD64VZeroUpper(arguments));
 290         }
 291         super.emitForeignCallOp(linkage, result, arguments, temps, info);
 292     }
 293 




























 294     /**
 295      * @param savedRegisters the registers saved by this operation which may be subject to pruning
 296      * @param savedRegisterLocations the slots to which the registers are saved
 297      * @param supportsRemove determines if registers can be pruned
 298      */
 299     protected AMD64SaveRegistersOp emitSaveRegisters(Register[] savedRegisters, AllocatableValue[] savedRegisterLocations, boolean supportsRemove) {
 300         AMD64SaveRegistersOp save = new AMD64SaveRegistersOp(savedRegisters, savedRegisterLocations, supportsRemove);
 301         append(save);
 302         return save;
 303     }
 304 
 305     /**
 306      * Allocate a stack slot for saving a register.
 307      */
 308     protected VirtualStackSlot allocateSaveRegisterLocation(Register register) {
 309         PlatformKind kind = target().arch.getLargestStorableKind(register.getRegisterCategory());
 310         if (kind.getVectorLength() > 1) {
 311             // we don't use vector registers, so there is no need to save them
 312             kind = AMD64Kind.DOUBLE;
 313         }
 314         return getResult().getFrameMapBuilder().allocateSpillSlot(LIRKind.value(kind));
 315     }
 316 
 317     /**
 318      * Adds a node to the graph that saves all allocatable registers to the stack.
 319      *
 320      * @param supportsRemove determines if registers can be pruned
 321      * @return the register save node
 322      */
 323     private AMD64SaveRegistersOp emitSaveAllRegisters(Register[] savedRegisters, boolean supportsRemove) {
 324         AllocatableValue[] savedRegisterLocations = new AllocatableValue[savedRegisters.length];
 325         for (int i = 0; i < savedRegisters.length; i++) {
 326             savedRegisterLocations[i] = allocateSaveRegisterLocation(savedRegisters[i]);
 327         }
 328         return emitSaveRegisters(savedRegisters, savedRegisterLocations, supportsRemove);
 329     }
 330 







 331     protected void emitRestoreRegisters(AMD64SaveRegistersOp save) {
 332         append(new AMD64RestoreRegistersOp(save.getSlots().clone(), save));
 333     }
 334 
 335     /**
 336      * Gets the {@link Stub} this generator is generating code for or {@code null} if a stub is not
 337      * being generated.
 338      */
 339     public Stub getStub() {
 340         return getResult().getStub();
 341     }
 342 
 343     @Override
 344     public HotSpotLIRGenerationResult getResult() {
 345         return ((HotSpotLIRGenerationResult) super.getResult());
 346     }
 347 
 348     public void setDebugInfoBuilder(HotSpotDebugInfoBuilder debugInfoBuilder) {
 349         this.debugInfoBuilder = debugInfoBuilder;
 350     }


 366         Variable result;
 367         LIRFrameState debugInfo = null;
 368         if (hotspotLinkage.needsDebugInfo()) {
 369             debugInfo = state;
 370             assert debugInfo != null || stub != null;
 371         }
 372 
 373         if (hotspotLinkage.needsJavaFrameAnchor()) {
 374             Register thread = getProviders().getRegisters().getThreadRegister();
 375             append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread));
 376             result = super.emitForeignCall(hotspotLinkage, debugInfo, args);
 377             append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), config.threadLastJavaPcOffset(), thread));
 378         } else {
 379             result = super.emitForeignCall(hotspotLinkage, debugInfo, args);
 380         }
 381 
 382         if (destroysRegisters) {
 383             if (stub != null) {
 384                 if (stub.preservesRegisters()) {
 385                     HotSpotLIRGenerationResult generationResult = getResult();
 386                     LIRFrameState key = currentRuntimeCallInfo;
 387                     if (key == null) {
 388                         key = LIRFrameState.NO_STATE;
 389                     }
 390                     assert !generationResult.getCalleeSaveInfo().containsKey(key);
 391                     generationResult.getCalleeSaveInfo().put(key, save);
 392                     emitRestoreRegisters(save);
 393                 }
 394             }
 395         }
 396 
 397         return result;
 398     }
 399 
 400     @Override
 401     public Value emitLoadObjectAddress(Constant constant) {
 402         HotSpotObjectConstant objectConstant = (HotSpotObjectConstant) constant;
 403         HotSpotLIRKindTool kindTool = (HotSpotLIRKindTool) getLIRKindTool();
 404         LIRKind kind = objectConstant.isCompressed() ? kindTool.getNarrowOopKind() : kindTool.getObjectKind();
 405         Variable result = newVariable(kind);
 406         append(new AMD64HotSpotLoadAddressOp(result, constant, HotSpotConstantLoadAction.RESOLVE));
 407         return result;
 408     }
 409 
 410     @Override
 411     public Value emitLoadMetaspaceAddress(Constant constant, HotSpotConstantLoadAction action) {


 460         append(new AMD64HotSpotConstantRetrievalOp(constants, constantDescriptions, frameState, linkage, notes));
 461         AllocatableValue result = linkage.getOutgoingCallingConvention().getReturn();
 462         return emitMove(result);
 463     }
 464 
 465     @Override
 466     public Value emitLoadConfigValue(int markId, LIRKind kind) {
 467         Variable result = newVariable(kind);
 468         append(new AMD64HotSpotLoadConfigValueOp(markId, result));
 469         return result;
 470     }
 471 
 472     @Override
 473     public Value emitRandomSeed() {
 474         AMD64ReadTimestampCounter timestamp = new AMD64ReadTimestampCounter();
 475         append(timestamp);
 476         return emitMove(timestamp.getLowResult());
 477     }
 478 
 479     @Override
































 480     public void emitTailcall(Value[] args, Value address) {
 481         append(new AMD64TailcallOp(args, address));
 482     }
 483 
 484     @Override
 485     public void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args, int numberOfFloatingPointArguments) {
 486         Value[] argLocations = new Value[args.length];
 487         getResult().getFrameMapBuilder().callsMethod(nativeCallingConvention);
 488         // TODO(mg): in case a native function uses floating point varargs, the ABI requires that
 489         // RAX contains the length of the varargs
 490         PrimitiveConstant intConst = JavaConstant.forInt(numberOfFloatingPointArguments);
 491         AllocatableValue numberOfFloatingPointArgumentsRegister = AMD64.rax.asValue(LIRKind.value(AMD64Kind.DWORD));
 492         emitMoveConstant(numberOfFloatingPointArgumentsRegister, intConst);
 493         for (int i = 0; i < args.length; i++) {
 494             Value arg = args[i];
 495             AllocatableValue loc = nativeCallingConvention.getArgument(i);
 496             emitMove(loc, arg);
 497             argLocations[i] = loc;
 498         }
 499         Value ptr = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(address));


 538 
 539     @Override
 540     public void beforeRegisterAllocation() {
 541         super.beforeRegisterAllocation();
 542         boolean hasDebugInfo = getResult().getLIR().hasDebugInfo();
 543         AllocatableValue savedRbp = saveRbp.finalize(hasDebugInfo);
 544         if (hasDebugInfo) {
 545             getResult().setDeoptimizationRescueSlot(((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).allocateDeoptimizationRescueSlot());
 546         }
 547 
 548         getResult().setMaxInterpreterFrameSize(debugInfoBuilder.maxInterpreterFrameSize());
 549 
 550         for (AMD64HotSpotRestoreRbpOp op : epilogueOps) {
 551             op.setSavedRbp(savedRbp);
 552         }
 553         if (BenchmarkCounters.enabled) {
 554             // ensure that the rescue slot is available
 555             LIRInstruction op = getOrInitRescueSlotOp();
 556             // insert dummy instruction into the start block
 557             LIR lir = getResult().getLIR();
 558             ArrayList<LIRInstruction> instructions = lir.getLIRforBlock(lir.getControlFlowGraph().getStartBlock());
 559             instructions.add(1, op);
 560             Debug.dump(Debug.INFO_LOG_LEVEL, lir, "created rescue dummy op");
 561         }
 562     }
 563 
 564     @Override









 565     public Value emitCompress(Value pointer, CompressEncoding encoding, boolean nonNull) {
 566         LIRKind inputKind = pointer.getValueKind(LIRKind.class);
 567         assert inputKind.getPlatformKind() == AMD64Kind.QWORD;
 568         if (inputKind.isReference(0)) {
 569             // oop
 570             Variable result = newVariable(LIRKind.reference(AMD64Kind.DWORD));
 571             append(new AMD64HotSpotMove.CompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull));
 572             return result;
 573         } else {
 574             // metaspace pointer
 575             Variable result = newVariable(LIRKind.value(AMD64Kind.DWORD));
 576             AllocatableValue base = Value.ILLEGAL;
 577             OptionValues options = getResult().getLIR().getOptions();
 578             if (encoding.hasBase() || GeneratePIC.getValue(options)) {
 579                 if (GeneratePIC.getValue(options)) {
 580                     Variable baseAddress = newVariable(LIRKind.value(AMD64Kind.QWORD));
 581                     AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config);
 582                     append(move);
 583                     base = baseAddress;
 584                 } else {
 585                     base = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(encoding.getBase()));
 586                 }
 587             }
 588             append(new AMD64HotSpotMove.CompressPointer(result, asAllocatable(pointer), base, encoding, nonNull));
 589             return result;
 590         }
 591     }
 592 
 593     @Override
 594     public Value emitUncompress(Value pointer, CompressEncoding encoding, boolean nonNull) {
 595         LIRKind inputKind = pointer.getValueKind(LIRKind.class);
 596         assert inputKind.getPlatformKind() == AMD64Kind.DWORD;
 597         if (inputKind.isReference(0)) {
 598             // oop
 599             Variable result = newVariable(LIRKind.reference(AMD64Kind.QWORD));
 600             append(new AMD64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull));
 601             return result;
 602         } else {
 603             // metaspace pointer
 604             Variable result = newVariable(LIRKind.value(AMD64Kind.QWORD));
 605             AllocatableValue base = Value.ILLEGAL;
 606             OptionValues options = getResult().getLIR().getOptions();
 607             if (encoding.hasBase() || GeneratePIC.getValue(options)) {
 608                 if (GeneratePIC.getValue(options)) {
 609                     Variable baseAddress = newVariable(LIRKind.value(AMD64Kind.QWORD));
 610                     AMD64HotSpotMove.BaseMove move = new AMD64HotSpotMove.BaseMove(baseAddress, config);
 611                     append(move);
 612                     base = baseAddress;
 613                 } else {
 614                     base = emitLoadConstant(LIRKind.value(AMD64Kind.QWORD), JavaConstant.forLong(encoding.getBase()));
 615                 }
 616             }
 617             append(new AMD64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), base, encoding, nonNull));
 618             return result;
 619         }
 620     }
 621 
 622     @Override
 623     public void emitNullCheck(Value address, LIRFrameState state) {
 624         if (address.getValueKind().getPlatformKind() == AMD64Kind.DWORD) {
 625             CompressEncoding encoding = config.getOopEncoding();
 626             Value uncompressed;
 627             if (encoding.getShift() <= 3) {
 628                 LIRKind wordKind = LIRKind.unknownReference(target().arch.getWordKind());
 629                 uncompressed = new AMD64AddressValue(wordKind, getProviders().getRegisters().getHeapBaseRegister().asValue(wordKind), asAllocatable(address), Scale.fromInt(1 << encoding.getShift()),
 630                                 0);
 631             } else {
 632                 uncompressed = emitUncompress(address, encoding, false);
 633             }
 634             append(new AMD64Move.NullCheckOp(asAddressValue(uncompressed), state));
 635         } else {
 636             super.emitNullCheck(address, state);
 637         }
 638     }
 639 
 640     @Override
 641     public LIRInstruction createBenchmarkCounter(String name, String group, Value increment) {
 642         if (BenchmarkCounters.enabled) {
 643             return new AMD64HotSpotCounterOp(name, group, increment, getProviders().getRegisters(), config, getOrInitRescueSlot());
 644         }
 645         throw GraalError.shouldNotReachHere("BenchmarkCounters are not enabled!");
 646     }
 647 
 648     @Override
 649     public LIRInstruction createMultiBenchmarkCounter(String[] names, String[] groups, Value[] increments) {
 650         if (BenchmarkCounters.enabled) {
src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot.amd64/src/org/graalvm/compiler/hotspot/amd64/AMD64HotSpotLIRGenerator.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File