1 /*
   2  * Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 
  25 package org.graalvm.compiler.hotspot.aarch64;
  26 
  27 import static jdk.vm.ci.aarch64.AArch64.lr;
  28 import static jdk.vm.ci.aarch64.AArch64.sp;
  29 import static jdk.vm.ci.aarch64.AArch64.zr;
  30 import static jdk.vm.ci.hotspot.aarch64.AArch64HotSpotRegisterConfig.fp;
  31 import static org.graalvm.compiler.hotspot.HotSpotHostBackend.ENABLE_STACK_RESERVED_ZONE;
  32 import static org.graalvm.compiler.hotspot.HotSpotHostBackend.THROW_DELAYED_STACKOVERFLOW_ERROR;
  33 
  34 import org.graalvm.compiler.asm.Label;
  35 import org.graalvm.compiler.asm.aarch64.AArch64Address;
  36 import org.graalvm.compiler.asm.aarch64.AArch64Assembler;
  37 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
  38 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler.ScratchRegister;
  39 import org.graalvm.compiler.core.common.spi.ForeignCallLinkage;
  40 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
  41 import org.graalvm.compiler.hotspot.meta.HotSpotForeignCallsProvider;
  42 import org.graalvm.compiler.lir.LIRInstructionClass;
  43 import org.graalvm.compiler.lir.aarch64.AArch64BlockEndOp;
  44 import org.graalvm.compiler.lir.aarch64.AArch64Call;
  45 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
  46 
  47 import jdk.vm.ci.code.CallingConvention;
  48 import jdk.vm.ci.code.Register;
  49 import jdk.vm.ci.code.RegisterValue;
  50 
  51 /**
  52  * Superclass for operations that leave a method's frame.
  53  */
  54 abstract class AArch64HotSpotEpilogueOp extends AArch64BlockEndOp {
  55 
  56     private final GraalHotSpotVMConfig config;
  57     private final Register thread;
  58 
  59     protected AArch64HotSpotEpilogueOp(LIRInstructionClass<? extends AArch64HotSpotEpilogueOp> c, GraalHotSpotVMConfig config, Register thread) {
  60         super(c);
  61         this.config = config;
  62         this.thread = thread;
  63     }
  64 
  65     protected AArch64HotSpotEpilogueOp(LIRInstructionClass<? extends AArch64HotSpotEpilogueOp> c, GraalHotSpotVMConfig config) {
  66         super(c);
  67         this.config = config;
  68         this.thread = null; // no safepoint
  69     }
  70 
  71     protected void leaveFrame(CompilationResultBuilder crb, AArch64MacroAssembler masm, boolean emitSafepoint, boolean requiresReservedStackAccessCheck) {
  72         assert crb.frameContext != null : "We never elide frames in aarch64";
  73         crb.frameContext.leave(crb);
  74         if (requiresReservedStackAccessCheck) {
  75             HotSpotForeignCallsProvider foreignCalls = (HotSpotForeignCallsProvider) crb.foreignCalls;
  76             Label noReserved = new Label();
  77             try (ScratchRegister sc = masm.getScratchRegister()) {
  78                 Register scratch = sc.getRegister();
  79                 masm.ldr(64, scratch, masm.makeAddress(thread, config.javaThreadReservedStackActivationOffset, 8));
  80                 masm.subs(64, zr, sp, scratch);
  81             }
  82             masm.branchConditionally(AArch64Assembler.ConditionFlag.LO, noReserved);
  83             ForeignCallLinkage enableStackReservedZone = foreignCalls.lookupForeignCall(ENABLE_STACK_RESERVED_ZONE);
  84             CallingConvention cc = enableStackReservedZone.getOutgoingCallingConvention();
  85             assert cc.getArgumentCount() == 1;
  86             Register arg0 = ((RegisterValue) cc.getArgument(0)).getRegister();
  87             masm.mov(64, arg0, thread);
  88             try (ScratchRegister sc = masm.getScratchRegister()) {
  89                 masm.stp(64, fp, lr, AArch64Address.createPreIndexedImmediateAddress(sp, -2));
  90                 AArch64Call.directCall(crb, masm, enableStackReservedZone, sc.getRegister(), null);
  91                 masm.ldp(64, fp, lr, AArch64Address.createPostIndexedImmediateAddress(sp, 2));
  92             }
  93             AArch64Call.directJmp(crb, masm, foreignCalls.lookupForeignCall(THROW_DELAYED_STACKOVERFLOW_ERROR));
  94             masm.bind(noReserved);
  95         }
  96         if (emitSafepoint) {
  97             try (ScratchRegister sc = masm.getScratchRegister()) {
  98                 Register scratch = sc.getRegister();
  99                 AArch64HotSpotSafepointOp.emitCode(crb, masm, config, true, thread, scratch, null);
 100             }
 101         }
 102     }
 103 }