1 /* 2 * Copyright (c) 2015, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.hotspot.amd64; 26 27 import static jdk.vm.ci.amd64.AMD64.rax; 28 import static jdk.vm.ci.amd64.AMD64.rbx; 29 import static jdk.vm.ci.code.ValueUtil.asRegister; 30 import static jdk.vm.ci.code.ValueUtil.isRegister; 31 import static org.graalvm.compiler.lir.LIRValueUtil.asJavaConstant; 32 import static org.graalvm.compiler.lir.LIRValueUtil.isJavaConstant; 33 34 import org.graalvm.compiler.asm.Label; 35 import org.graalvm.compiler.asm.amd64.AMD64Address; 36 import org.graalvm.compiler.asm.amd64.AMD64Assembler; 37 import org.graalvm.compiler.asm.amd64.AMD64MacroAssembler; 38 import org.graalvm.compiler.debug.GraalError; 39 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig; 40 import org.graalvm.compiler.hotspot.HotSpotCounterOp; 41 import org.graalvm.compiler.hotspot.debug.BenchmarkCounters; 42 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider; 43 import org.graalvm.compiler.lir.LIRInstructionClass; 44 import org.graalvm.compiler.lir.Opcode; 45 import org.graalvm.compiler.lir.asm.CompilationResultBuilder; 46 47 import jdk.vm.ci.code.Register; 48 import jdk.vm.ci.code.TargetDescription; 49 import jdk.vm.ci.meta.AllocatableValue; 50 import jdk.vm.ci.meta.Value; 51 52 @Opcode("BenchMarkCounter") 53 public class AMD64HotSpotCounterOp extends HotSpotCounterOp { 54 public static final LIRInstructionClass<AMD64HotSpotCounterOp> TYPE = LIRInstructionClass.create(AMD64HotSpotCounterOp.class); 55 56 @Alive({OperandFlag.STACK, OperandFlag.UNINITIALIZED}) private AllocatableValue backupSlot; 57 58 public AMD64HotSpotCounterOp(String name, String group, Value increment, HotSpotRegistersProvider registers, GraalHotSpotVMConfig config, AllocatableValue backupSlot) { 59 super(TYPE, name, group, increment, registers, config); 60 this.backupSlot = backupSlot; 61 } 62 63 public AMD64HotSpotCounterOp(String[] names, String[] groups, Value[] increments, HotSpotRegistersProvider registers, GraalHotSpotVMConfig config, AllocatableValue backupSlot) { 64 super(TYPE, names, groups, increments, registers, config); 65 this.backupSlot = backupSlot; 66 } 67 68 @Override 69 public void emitCode(CompilationResultBuilder crb) { 70 AMD64MacroAssembler masm = (AMD64MacroAssembler) crb.asm; 71 TargetDescription target = crb.target; 72 73 Register scratch; 74 // It can happen that the rax register is the increment register, in this case we do not 75 // want to spill it to the stack. 76 if (!contains(increments, rax)) { 77 scratch = rax; 78 } else if (!contains(increments, rbx)) { 79 scratch = rbx; 80 } else { 81 // In this case rax and rbx are used as increment. Either we implement a third register 82 // or we implement a spillover the value from rax to rbx or vice versa during 83 // emitIncrement(). 84 throw GraalError.unimplemented("RAX and RBX are increment registers at the same time, spilling over the scratch register is not supported right now"); 85 } 86 87 // address for counters array 88 AMD64Address countersArrayAddr = new AMD64Address(thread, config.jvmciCountersThreadOffset); 89 Register countersArrayReg = scratch; 90 91 // backup scratch register 92 masm.movq((AMD64Address) crb.asAddress(backupSlot), scratch); 93 94 // load counters array 95 masm.movptr(countersArrayReg, countersArrayAddr); 96 CounterProcedure emitProcedure = (counterIndex, increment, displacement) -> emitIncrement(crb, masm, countersArrayReg, increment, displacement); 97 forEachCounter(emitProcedure, target); 98 99 // restore scratch register 100 masm.movq(scratch, (AMD64Address) crb.asAddress(backupSlot)); 101 } 102 103 /** 104 * Tests if the array contains the register. 105 */ 106 private static boolean contains(Value[] increments, Register register) { 107 for (Value increment : increments) { 108 if (isRegister(increment) && asRegister(increment).equals(register)) { 109 return true; 110 } 111 } 112 return false; 113 } 114 115 private static void emitIncrement(CompilationResultBuilder crb, AMD64MacroAssembler masm, Register countersArrayReg, Value incrementValue, int displacement) { 116 // address for counter value 117 AMD64Address counterAddr = new AMD64Address(countersArrayReg, displacement); 118 // increment counter (in memory) 119 if (isJavaConstant(incrementValue)) { 120 int increment = asInt(asJavaConstant(incrementValue)); 121 masm.incrementq(counterAddr, increment); 122 } else { 123 masm.addq(counterAddr, asRegister(incrementValue)); 124 } 125 if (BenchmarkCounters.Options.AbortOnBenchmarkCounterOverflow.getValue(crb.getOptions())) { 126 Label target = new Label(); 127 masm.jccb(AMD64Assembler.ConditionFlag.NoOverflow, target); 128 crb.blockComment("[BENCHMARK COUNTER OVERFLOW]"); 129 masm.illegal(); 130 masm.bind(target); 131 } 132 } 133 }