< prev index next >

src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot.amd64/src/org/graalvm/compiler/hotspot/amd64/AMD64HotSpotBackend.java

Print this page




  26 
  27 import static jdk.vm.ci.amd64.AMD64.r10;
  28 import static jdk.vm.ci.amd64.AMD64.rax;
  29 import static jdk.vm.ci.amd64.AMD64.rsp;
  30 import static jdk.vm.ci.code.ValueUtil.asRegister;
  31 import static org.graalvm.compiler.core.common.GraalOptions.CanOmitFrame;
  32 import static org.graalvm.compiler.core.common.GraalOptions.GeneratePIC;
  33 import static org.graalvm.compiler.core.common.GraalOptions.ZapStackOnMethodEntry;
  34 
  35 import jdk.internal.vm.compiler.collections.EconomicSet;
  36 import org.graalvm.compiler.asm.Assembler;
  37 import org.graalvm.compiler.asm.Label;
  38 import org.graalvm.compiler.asm.amd64.AMD64Address;
  39 import org.graalvm.compiler.asm.amd64.AMD64Assembler.ConditionFlag;
  40 import org.graalvm.compiler.asm.amd64.AMD64MacroAssembler;
  41 import org.graalvm.compiler.code.CompilationResult;
  42 import org.graalvm.compiler.core.amd64.AMD64NodeMatchRules;
  43 import org.graalvm.compiler.core.common.CompilationIdentifier;
  44 import org.graalvm.compiler.core.common.LIRKind;
  45 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig;
  46 import org.graalvm.compiler.core.target.Backend;
  47 import org.graalvm.compiler.debug.DebugContext;
  48 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
  49 import org.graalvm.compiler.hotspot.HotSpotDataBuilder;
  50 import org.graalvm.compiler.hotspot.HotSpotGraalRuntimeProvider;
  51 import org.graalvm.compiler.hotspot.HotSpotHostBackend;
  52 import org.graalvm.compiler.hotspot.HotSpotLIRGenerationResult;
  53 import org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction;
  54 import org.graalvm.compiler.hotspot.meta.HotSpotForeignCallsProvider;
  55 import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
  56 import org.graalvm.compiler.hotspot.stubs.Stub;
  57 import org.graalvm.compiler.lir.LIR;
  58 import org.graalvm.compiler.lir.amd64.AMD64Call;
  59 import org.graalvm.compiler.lir.amd64.AMD64FrameMap;
  60 import org.graalvm.compiler.lir.amd64.AMD64FrameMapBuilder;
  61 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  62 import org.graalvm.compiler.lir.asm.CompilationResultBuilderFactory;
  63 import org.graalvm.compiler.lir.asm.DataBuilder;
  64 import org.graalvm.compiler.lir.asm.FrameContext;
  65 import org.graalvm.compiler.lir.framemap.FrameMap;
  66 import org.graalvm.compiler.lir.framemap.FrameMapBuilder;


  68 import org.graalvm.compiler.lir.gen.LIRGeneratorTool;
  69 import org.graalvm.compiler.nodes.StructuredGraph;
  70 import org.graalvm.compiler.nodes.spi.NodeLIRBuilderTool;
  71 import org.graalvm.compiler.options.OptionValues;
  72 
  73 import jdk.vm.ci.amd64.AMD64;
  74 import jdk.vm.ci.amd64.AMD64Kind;
  75 import jdk.vm.ci.code.CallingConvention;
  76 import jdk.vm.ci.code.Register;
  77 import jdk.vm.ci.code.RegisterConfig;
  78 import jdk.vm.ci.code.StackSlot;
  79 import jdk.vm.ci.hotspot.HotSpotCallingConventionType;
  80 import jdk.vm.ci.hotspot.HotSpotSentinelConstant;
  81 import jdk.vm.ci.meta.JavaKind;
  82 import jdk.vm.ci.meta.JavaType;
  83 import jdk.vm.ci.meta.ResolvedJavaMethod;
  84 
  85 /**
  86  * HotSpot AMD64 specific backend.
  87  */
  88 public class AMD64HotSpotBackend extends HotSpotHostBackend {
  89 
  90     public AMD64HotSpotBackend(GraalHotSpotVMConfig config, HotSpotGraalRuntimeProvider runtime, HotSpotProviders providers) {
  91         super(config, runtime, providers);
  92     }
  93 
  94     @Override
  95     public FrameMapBuilder newFrameMapBuilder(RegisterConfig registerConfig) {
  96         RegisterConfig registerConfigNonNull = registerConfig == null ? getCodeCache().getRegisterConfig() : registerConfig;
  97         return new AMD64FrameMapBuilder(newFrameMap(registerConfigNonNull), getCodeCache(), registerConfigNonNull);
  98     }
  99 
 100     @Override
 101     public FrameMap newFrameMap(RegisterConfig registerConfig) {
 102         return new AMD64FrameMap(getCodeCache(), registerConfig, this);
 103     }
 104 
 105     @Override
 106     public LIRGeneratorTool newLIRGenerator(LIRGenerationResult lirGenRes) {
 107         return new AMD64HotSpotLIRGenerator(getProviders(), config, lirGenRes);
 108     }
 109 
 110     @Override
 111     public LIRGenerationResult newLIRGenerationResult(CompilationIdentifier compilationId, LIR lir, FrameMapBuilder frameMapBuilder, StructuredGraph graph, Object stub) {
 112         return new HotSpotLIRGenerationResult(compilationId, lir, frameMapBuilder, makeCallingConvention(graph, (Stub) stub), stub, config.requiresReservedStackCheck(graph.getMethods()));

 113     }
 114 
 115     @Override
 116     public NodeLIRBuilderTool newNodeLIRBuilder(StructuredGraph graph, LIRGeneratorTool lirGen) {
 117         return new AMD64HotSpotNodeLIRBuilder(graph, lirGen, new AMD64NodeMatchRules(lirGen));
 118     }
 119 
 120     @Override
 121     protected void bangStackWithOffset(CompilationResultBuilder crb, int bangOffset) {
 122         AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
 123         int pos = asm.position();
 124         asm.movl(new AMD64Address(rsp, -bangOffset), AMD64.rax);
 125         assert asm.position() - pos >= PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE;
 126     }
 127 
 128     /**
 129      * The size of the instruction used to patch the verified entry point of an nmethod when the
 130      * nmethod is made non-entrant or a zombie (e.g. during deopt or class unloading). The first
 131      * instruction emitted at an nmethod's verified entry point must be at least this length to
 132      * ensure mt-safe patching.


 179                         asm.movl(new AMD64Address(rsp, i * intSize), 0xC1C1C1C1);
 180                     }
 181                 }
 182                 assert frameMap.getRegisterConfig().getCalleeSaveRegisters() == null;
 183             }
 184         }
 185 
 186         @Override
 187         public void leave(CompilationResultBuilder crb) {
 188             if (!omitFrame) {
 189                 AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
 190                 assert crb.frameMap.getRegisterConfig().getCalleeSaveRegisters() == null;
 191 
 192                 int frameSize = crb.frameMap.frameSize();
 193                 asm.incrementq(rsp, frameSize);
 194             }
 195         }
 196     }
 197 
 198     @Override
 199     protected Assembler createAssembler(FrameMap frameMap) {
 200         return new AMD64MacroAssembler(getTarget());
 201     }
 202 
 203     @Override
 204     public CompilationResultBuilder newCompilationResultBuilder(LIRGenerationResult lirGenRen, FrameMap frameMap, CompilationResult compilationResult, CompilationResultBuilderFactory factory) {
 205         // Omit the frame if the method:
 206         // - has no spill slots or other slots allocated during register allocation
 207         // - has no callee-saved registers
 208         // - has no incoming arguments passed on the stack
 209         // - has no deoptimization points
 210         // - makes no foreign calls (which require an aligned stack)
 211         HotSpotLIRGenerationResult gen = (HotSpotLIRGenerationResult) lirGenRen;
 212         LIR lir = gen.getLIR();
 213         assert gen.getDeoptimizationRescueSlot() == null || frameMap.frameNeedsAllocating() : "method that can deoptimize must have a frame";
 214         OptionValues options = lir.getOptions();
 215         DebugContext debug = lir.getDebug();
 216         boolean omitFrame = CanOmitFrame.getValue(options) && !frameMap.frameNeedsAllocating() && !lir.hasArgInCallerFrame() && !gen.hasForeignCall();
 217 
 218         Stub stub = gen.getStub();
 219         Assembler masm = createAssembler(frameMap);
 220         HotSpotFrameContext frameContext = new HotSpotFrameContext(stub != null, omitFrame);
 221         DataBuilder dataBuilder = new HotSpotDataBuilder(getCodeCache().getTarget());
 222         CompilationResultBuilder crb = factory.createBuilder(getCodeCache(), getForeignCalls(), frameMap, masm, dataBuilder, frameContext, options, debug, compilationResult, Register.None);
 223         crb.setTotalFrameSize(frameMap.totalFrameSize());
 224         crb.setMaxInterpreterFrameSize(gen.getMaxInterpreterFrameSize());
 225         StackSlot deoptimizationRescueSlot = gen.getDeoptimizationRescueSlot();
 226         if (deoptimizationRescueSlot != null && stub == null) {
 227             crb.compilationResult.setCustomStackAreaOffset(deoptimizationRescueSlot);
 228         }
 229 
 230         if (stub != null) {
 231             EconomicSet<Register> destroyedCallerRegisters = gatherDestroyedCallerRegisters(lir);
 232             updateStub(stub, destroyedCallerRegisters, gen.getCalleeSaveInfo(), frameMap);
 233         }
 234 
 235         return crb;
 236     }
 237 
 238     @Override
 239     public void emitCode(CompilationResultBuilder crb, LIR lir, ResolvedJavaMethod installedCodeOwner) {


 241         FrameMap frameMap = crb.frameMap;
 242         RegisterConfig regConfig = frameMap.getRegisterConfig();
 243         Label verifiedEntry = new Label();
 244 
 245         // Emit the prefix
 246         emitCodePrefix(installedCodeOwner, crb, asm, regConfig, verifiedEntry);
 247 
 248         // Emit code for the LIR
 249         emitCodeBody(installedCodeOwner, crb, lir);
 250 
 251         // Emit the suffix
 252         emitCodeSuffix(installedCodeOwner, crb, asm, frameMap);
 253 
 254         // Profile assembler instructions
 255         profileInstructions(lir, crb);
 256     }
 257 
 258     /**
 259      * Emits the code prior to the verified entry point.
 260      *
 261      * @param installedCodeOwner see {@link Backend#emitCode}
 262      */
 263     public void emitCodePrefix(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, AMD64MacroAssembler asm, RegisterConfig regConfig, Label verifiedEntry) {
 264         HotSpotProviders providers = getProviders();
 265         if (installedCodeOwner != null && !installedCodeOwner.isStatic()) {
 266             crb.recordMark(config.MARKID_UNVERIFIED_ENTRY);
 267             CallingConvention cc = regConfig.getCallingConvention(HotSpotCallingConventionType.JavaCallee, null, new JavaType[]{providers.getMetaAccess().lookupJavaType(Object.class)}, this);
 268             Register inlineCacheKlass = rax; // see definition of IC_Klass in
 269                                              // c1_LIRAssembler_x86.cpp
 270             Register receiver = asRegister(cc.getArgument(0));
 271             AMD64Address src = new AMD64Address(receiver, config.hubOffset);
 272 
 273             if (config.useCompressedClassPointers) {
 274                 Register register = r10;
 275                 AMD64HotSpotMove.decodeKlassPointer(crb, asm, register, providers.getRegisters().getHeapBaseRegister(), src, config);
 276                 if (GeneratePIC.getValue(crb.getOptions())) {
 277                     asm.movq(providers.getRegisters().getHeapBaseRegister(), asm.getPlaceholder(-1));
 278                     crb.recordMark(config.MARKID_NARROW_OOP_BASE_ADDRESS);
 279                 } else {
 280                     if (config.narrowKlassBase != 0) {
 281                         // The heap base register was destroyed above, so restore it


 292         asm.align(config.codeEntryAlignment);
 293         crb.recordMark(config.MARKID_OSR_ENTRY);
 294         asm.bind(verifiedEntry);
 295         crb.recordMark(config.MARKID_VERIFIED_ENTRY);
 296 
 297         if (GeneratePIC.getValue(crb.getOptions())) {
 298             // Check for method state
 299             HotSpotFrameContext frameContext = (HotSpotFrameContext) crb.frameContext;
 300             if (!frameContext.isStub) {
 301                 crb.recordInlineDataInCodeWithNote(new HotSpotSentinelConstant(LIRKind.value(AMD64Kind.QWORD), JavaKind.Long), HotSpotConstantLoadAction.MAKE_NOT_ENTRANT);
 302                 asm.movq(AMD64.rax, asm.getPlaceholder(-1));
 303                 asm.testq(AMD64.rax, AMD64.rax);
 304                 AMD64Call.directConditionalJmp(crb, asm, getForeignCalls().lookupForeignCall(WRONG_METHOD_HANDLER), ConditionFlag.NotZero);
 305             }
 306         }
 307     }
 308 
 309     /**
 310      * Emits the code which starts at the verified entry point.
 311      *
 312      * @param installedCodeOwner see {@link Backend#emitCode}
 313      */
 314     public void emitCodeBody(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, LIR lir) {
 315         crb.emit(lir);
 316     }
 317 
 318     /**
 319      * @param installedCodeOwner see {@link Backend#emitCode}
 320      */
 321     public void emitCodeSuffix(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, AMD64MacroAssembler asm, FrameMap frameMap) {
 322         HotSpotProviders providers = getProviders();
 323         HotSpotFrameContext frameContext = (HotSpotFrameContext) crb.frameContext;
 324         if (!frameContext.isStub) {
 325             HotSpotForeignCallsProvider foreignCalls = providers.getForeignCalls();
 326             crb.recordMark(config.MARKID_EXCEPTION_HANDLER_ENTRY);
 327             AMD64Call.directCall(crb, asm, foreignCalls.lookupForeignCall(EXCEPTION_HANDLER), null, false, null);
 328             crb.recordMark(config.MARKID_DEOPT_HANDLER_ENTRY);
 329             AMD64Call.directCall(crb, asm, foreignCalls.lookupForeignCall(DEOPTIMIZATION_HANDLER), null, false, null);
 330         } else {
 331             // No need to emit the stubs for entries back into the method since
 332             // it has no calls that can cause such "return" entries
 333 
 334             if (frameContext.omitFrame) {
 335                 // Cannot access slots in caller's frame if my frame is omitted
 336                 assert !frameMap.accessesCallerFrame();
 337             }
 338         }
 339     }


  26 
  27 import static jdk.vm.ci.amd64.AMD64.r10;
  28 import static jdk.vm.ci.amd64.AMD64.rax;
  29 import static jdk.vm.ci.amd64.AMD64.rsp;
  30 import static jdk.vm.ci.code.ValueUtil.asRegister;
  31 import static org.graalvm.compiler.core.common.GraalOptions.CanOmitFrame;
  32 import static org.graalvm.compiler.core.common.GraalOptions.GeneratePIC;
  33 import static org.graalvm.compiler.core.common.GraalOptions.ZapStackOnMethodEntry;
  34 
  35 import jdk.internal.vm.compiler.collections.EconomicSet;
  36 import org.graalvm.compiler.asm.Assembler;
  37 import org.graalvm.compiler.asm.Label;
  38 import org.graalvm.compiler.asm.amd64.AMD64Address;
  39 import org.graalvm.compiler.asm.amd64.AMD64Assembler.ConditionFlag;
  40 import org.graalvm.compiler.asm.amd64.AMD64MacroAssembler;
  41 import org.graalvm.compiler.code.CompilationResult;
  42 import org.graalvm.compiler.core.amd64.AMD64NodeMatchRules;
  43 import org.graalvm.compiler.core.common.CompilationIdentifier;
  44 import org.graalvm.compiler.core.common.LIRKind;
  45 import org.graalvm.compiler.core.common.alloc.RegisterAllocationConfig;
  46 import org.graalvm.compiler.core.gen.LIRGenerationProvider;
  47 import org.graalvm.compiler.debug.DebugContext;
  48 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
  49 import org.graalvm.compiler.hotspot.HotSpotDataBuilder;
  50 import org.graalvm.compiler.hotspot.HotSpotGraalRuntimeProvider;
  51 import org.graalvm.compiler.hotspot.HotSpotHostBackend;
  52 import org.graalvm.compiler.hotspot.HotSpotLIRGenerationResult;
  53 import org.graalvm.compiler.hotspot.meta.HotSpotConstantLoadAction;
  54 import org.graalvm.compiler.hotspot.meta.HotSpotForeignCallsProvider;
  55 import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
  56 import org.graalvm.compiler.hotspot.stubs.Stub;
  57 import org.graalvm.compiler.lir.LIR;
  58 import org.graalvm.compiler.lir.amd64.AMD64Call;
  59 import org.graalvm.compiler.lir.amd64.AMD64FrameMap;
  60 import org.graalvm.compiler.lir.amd64.AMD64FrameMapBuilder;
  61 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  62 import org.graalvm.compiler.lir.asm.CompilationResultBuilderFactory;
  63 import org.graalvm.compiler.lir.asm.DataBuilder;
  64 import org.graalvm.compiler.lir.asm.FrameContext;
  65 import org.graalvm.compiler.lir.framemap.FrameMap;
  66 import org.graalvm.compiler.lir.framemap.FrameMapBuilder;


  68 import org.graalvm.compiler.lir.gen.LIRGeneratorTool;
  69 import org.graalvm.compiler.nodes.StructuredGraph;
  70 import org.graalvm.compiler.nodes.spi.NodeLIRBuilderTool;
  71 import org.graalvm.compiler.options.OptionValues;
  72 
  73 import jdk.vm.ci.amd64.AMD64;
  74 import jdk.vm.ci.amd64.AMD64Kind;
  75 import jdk.vm.ci.code.CallingConvention;
  76 import jdk.vm.ci.code.Register;
  77 import jdk.vm.ci.code.RegisterConfig;
  78 import jdk.vm.ci.code.StackSlot;
  79 import jdk.vm.ci.hotspot.HotSpotCallingConventionType;
  80 import jdk.vm.ci.hotspot.HotSpotSentinelConstant;
  81 import jdk.vm.ci.meta.JavaKind;
  82 import jdk.vm.ci.meta.JavaType;
  83 import jdk.vm.ci.meta.ResolvedJavaMethod;
  84 
  85 /**
  86  * HotSpot AMD64 specific backend.
  87  */
  88 public class AMD64HotSpotBackend extends HotSpotHostBackend implements LIRGenerationProvider {
  89 
  90     public AMD64HotSpotBackend(GraalHotSpotVMConfig config, HotSpotGraalRuntimeProvider runtime, HotSpotProviders providers) {
  91         super(config, runtime, providers);
  92     }
  93 
  94     private FrameMapBuilder newFrameMapBuilder(RegisterConfig registerConfig) {

  95         RegisterConfig registerConfigNonNull = registerConfig == null ? getCodeCache().getRegisterConfig() : registerConfig;
  96         FrameMap frameMap = new AMD64FrameMap(getCodeCache(), registerConfigNonNull, this);
  97         return new AMD64FrameMapBuilder(frameMap, getCodeCache(), registerConfigNonNull);




  98     }
  99 
 100     @Override
 101     public LIRGeneratorTool newLIRGenerator(LIRGenerationResult lirGenRes) {
 102         return new AMD64HotSpotLIRGenerator(getProviders(), config, lirGenRes);
 103     }
 104 
 105     @Override
 106     public LIRGenerationResult newLIRGenerationResult(CompilationIdentifier compilationId, LIR lir, RegisterConfig registerConfig, StructuredGraph graph, Object stub) {
 107         return new HotSpotLIRGenerationResult(compilationId, lir, newFrameMapBuilder(registerConfig), makeCallingConvention(graph, (Stub) stub), stub,
 108                         config.requiresReservedStackCheck(graph.getMethods()));
 109     }
 110 
 111     @Override
 112     public NodeLIRBuilderTool newNodeLIRBuilder(StructuredGraph graph, LIRGeneratorTool lirGen) {
 113         return new AMD64HotSpotNodeLIRBuilder(graph, lirGen, new AMD64NodeMatchRules(lirGen));
 114     }
 115 
 116     @Override
 117     protected void bangStackWithOffset(CompilationResultBuilder crb, int bangOffset) {
 118         AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
 119         int pos = asm.position();
 120         asm.movl(new AMD64Address(rsp, -bangOffset), AMD64.rax);
 121         assert asm.position() - pos >= PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE;
 122     }
 123 
 124     /**
 125      * The size of the instruction used to patch the verified entry point of an nmethod when the
 126      * nmethod is made non-entrant or a zombie (e.g. during deopt or class unloading). The first
 127      * instruction emitted at an nmethod's verified entry point must be at least this length to
 128      * ensure mt-safe patching.


 175                         asm.movl(new AMD64Address(rsp, i * intSize), 0xC1C1C1C1);
 176                     }
 177                 }
 178                 assert frameMap.getRegisterConfig().getCalleeSaveRegisters() == null;
 179             }
 180         }
 181 
 182         @Override
 183         public void leave(CompilationResultBuilder crb) {
 184             if (!omitFrame) {
 185                 AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
 186                 assert crb.frameMap.getRegisterConfig().getCalleeSaveRegisters() == null;
 187 
 188                 int frameSize = crb.frameMap.frameSize();
 189                 asm.incrementq(rsp, frameSize);
 190             }
 191         }
 192     }
 193 
 194     @Override





 195     public CompilationResultBuilder newCompilationResultBuilder(LIRGenerationResult lirGenRen, FrameMap frameMap, CompilationResult compilationResult, CompilationResultBuilderFactory factory) {
 196         // Omit the frame if the method:
 197         // - has no spill slots or other slots allocated during register allocation
 198         // - has no callee-saved registers
 199         // - has no incoming arguments passed on the stack
 200         // - has no deoptimization points
 201         // - makes no foreign calls (which require an aligned stack)
 202         HotSpotLIRGenerationResult gen = (HotSpotLIRGenerationResult) lirGenRen;
 203         LIR lir = gen.getLIR();
 204         assert gen.getDeoptimizationRescueSlot() == null || frameMap.frameNeedsAllocating() : "method that can deoptimize must have a frame";
 205         OptionValues options = lir.getOptions();
 206         DebugContext debug = lir.getDebug();
 207         boolean omitFrame = CanOmitFrame.getValue(options) && !frameMap.frameNeedsAllocating() && !lir.hasArgInCallerFrame() && !gen.hasForeignCall();
 208 
 209         Stub stub = gen.getStub();
 210         Assembler masm = new AMD64MacroAssembler(getTarget());
 211         HotSpotFrameContext frameContext = new HotSpotFrameContext(stub != null, omitFrame);
 212         DataBuilder dataBuilder = new HotSpotDataBuilder(getCodeCache().getTarget());
 213         CompilationResultBuilder crb = factory.createBuilder(getCodeCache(), getForeignCalls(), frameMap, masm, dataBuilder, frameContext, options, debug, compilationResult, Register.None);
 214         crb.setTotalFrameSize(frameMap.totalFrameSize());
 215         crb.setMaxInterpreterFrameSize(gen.getMaxInterpreterFrameSize());
 216         StackSlot deoptimizationRescueSlot = gen.getDeoptimizationRescueSlot();
 217         if (deoptimizationRescueSlot != null && stub == null) {
 218             crb.compilationResult.setCustomStackAreaOffset(deoptimizationRescueSlot);
 219         }
 220 
 221         if (stub != null) {
 222             EconomicSet<Register> destroyedCallerRegisters = gatherDestroyedCallerRegisters(lir);
 223             updateStub(stub, destroyedCallerRegisters, gen.getCalleeSaveInfo(), frameMap);
 224         }
 225 
 226         return crb;
 227     }
 228 
 229     @Override
 230     public void emitCode(CompilationResultBuilder crb, LIR lir, ResolvedJavaMethod installedCodeOwner) {


 232         FrameMap frameMap = crb.frameMap;
 233         RegisterConfig regConfig = frameMap.getRegisterConfig();
 234         Label verifiedEntry = new Label();
 235 
 236         // Emit the prefix
 237         emitCodePrefix(installedCodeOwner, crb, asm, regConfig, verifiedEntry);
 238 
 239         // Emit code for the LIR
 240         emitCodeBody(installedCodeOwner, crb, lir);
 241 
 242         // Emit the suffix
 243         emitCodeSuffix(installedCodeOwner, crb, asm, frameMap);
 244 
 245         // Profile assembler instructions
 246         profileInstructions(lir, crb);
 247     }
 248 
 249     /**
 250      * Emits the code prior to the verified entry point.
 251      *
 252      * @param installedCodeOwner see {@link LIRGenerationProvider#emitCode}
 253      */
 254     public void emitCodePrefix(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, AMD64MacroAssembler asm, RegisterConfig regConfig, Label verifiedEntry) {
 255         HotSpotProviders providers = getProviders();
 256         if (installedCodeOwner != null && !installedCodeOwner.isStatic()) {
 257             crb.recordMark(config.MARKID_UNVERIFIED_ENTRY);
 258             CallingConvention cc = regConfig.getCallingConvention(HotSpotCallingConventionType.JavaCallee, null, new JavaType[]{providers.getMetaAccess().lookupJavaType(Object.class)}, this);
 259             Register inlineCacheKlass = rax; // see definition of IC_Klass in
 260                                              // c1_LIRAssembler_x86.cpp
 261             Register receiver = asRegister(cc.getArgument(0));
 262             AMD64Address src = new AMD64Address(receiver, config.hubOffset);
 263 
 264             if (config.useCompressedClassPointers) {
 265                 Register register = r10;
 266                 AMD64HotSpotMove.decodeKlassPointer(crb, asm, register, providers.getRegisters().getHeapBaseRegister(), src, config);
 267                 if (GeneratePIC.getValue(crb.getOptions())) {
 268                     asm.movq(providers.getRegisters().getHeapBaseRegister(), asm.getPlaceholder(-1));
 269                     crb.recordMark(config.MARKID_NARROW_OOP_BASE_ADDRESS);
 270                 } else {
 271                     if (config.narrowKlassBase != 0) {
 272                         // The heap base register was destroyed above, so restore it


 283         asm.align(config.codeEntryAlignment);
 284         crb.recordMark(config.MARKID_OSR_ENTRY);
 285         asm.bind(verifiedEntry);
 286         crb.recordMark(config.MARKID_VERIFIED_ENTRY);
 287 
 288         if (GeneratePIC.getValue(crb.getOptions())) {
 289             // Check for method state
 290             HotSpotFrameContext frameContext = (HotSpotFrameContext) crb.frameContext;
 291             if (!frameContext.isStub) {
 292                 crb.recordInlineDataInCodeWithNote(new HotSpotSentinelConstant(LIRKind.value(AMD64Kind.QWORD), JavaKind.Long), HotSpotConstantLoadAction.MAKE_NOT_ENTRANT);
 293                 asm.movq(AMD64.rax, asm.getPlaceholder(-1));
 294                 asm.testq(AMD64.rax, AMD64.rax);
 295                 AMD64Call.directConditionalJmp(crb, asm, getForeignCalls().lookupForeignCall(WRONG_METHOD_HANDLER), ConditionFlag.NotZero);
 296             }
 297         }
 298     }
 299 
 300     /**
 301      * Emits the code which starts at the verified entry point.
 302      *
 303      * @param installedCodeOwner see {@link LIRGenerationProvider#emitCode}
 304      */
 305     public void emitCodeBody(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, LIR lir) {
 306         crb.emit(lir);
 307     }
 308 
 309     /**
 310      * @param installedCodeOwner see {@link LIRGenerationProvider#emitCode}
 311      */
 312     public void emitCodeSuffix(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, AMD64MacroAssembler asm, FrameMap frameMap) {
 313         HotSpotProviders providers = getProviders();
 314         HotSpotFrameContext frameContext = (HotSpotFrameContext) crb.frameContext;
 315         if (!frameContext.isStub) {
 316             HotSpotForeignCallsProvider foreignCalls = providers.getForeignCalls();
 317             crb.recordMark(config.MARKID_EXCEPTION_HANDLER_ENTRY);
 318             AMD64Call.directCall(crb, asm, foreignCalls.lookupForeignCall(EXCEPTION_HANDLER), null, false, null);
 319             crb.recordMark(config.MARKID_DEOPT_HANDLER_ENTRY);
 320             AMD64Call.directCall(crb, asm, foreignCalls.lookupForeignCall(DEOPTIMIZATION_HANDLER), null, false, null);
 321         } else {
 322             // No need to emit the stubs for entries back into the method since
 323             // it has no calls that can cause such "return" entries
 324 
 325             if (frameContext.omitFrame) {
 326                 // Cannot access slots in caller's frame if my frame is omitted
 327                 assert !frameMap.accessesCallerFrame();
 328             }
 329         }
 330     }
< prev index next >