1 /*
   2  * Copyright (c) 2012, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 
  25 package org.graalvm.compiler.hotspot.replacements;
  26 
  27 import static jdk.vm.ci.code.MemoryBarriers.LOAD_STORE;
  28 import static jdk.vm.ci.code.MemoryBarriers.STORE_LOAD;
  29 import static jdk.vm.ci.code.MemoryBarriers.STORE_STORE;
  30 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_OPTIONVALUES;
  31 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG;
  32 import static org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode.mark;
  33 import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope;
  34 import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope;
  35 import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError;
  36 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION;
  37 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION;
  38 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_CXQ_LOCATION;
  39 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_ENTRY_LIST_LOCATION;
  40 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_OWNER_LOCATION;
  41 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_RECURSION_LOCATION;
  42 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_SUCC_LOCATION;
  43 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION;
  44 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace;
  45 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace;
  46 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern;
  47 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace;
  48 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject;
  49 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset;
  50 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset;
  51 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.monitorMask;
  52 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorCxqOffset;
  53 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorEntryListOffset;
  54 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorOwnerOffset;
  55 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorRecursionsOffset;
  56 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorSuccOffset;
  57 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize;
  58 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset;
  59 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord;
  60 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.stackBias;
  61 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask;
  62 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking;
  63 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop;
  64 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize;
  65 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors;
  66 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.SimpleFastInflatedLocking;
  67 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter;
  68 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter;
  69 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors;
  70 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FAST_PATH_PROBABILITY;
  71 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY;
  72 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_FREQUENT_PROBABILITY;
  73 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_LIKELY_PROBABILITY;
  74 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY;
  75 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY;
  76 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability;
  77 import static org.graalvm.compiler.nodes.extended.MembarNode.memoryBarrier;
  78 import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER;
  79 import static jdk.internal.vm.compiler.word.WordFactory.unsigned;
  80 import static jdk.internal.vm.compiler.word.WordFactory.zero;
  81 
  82 import java.util.List;
  83 
  84 import org.graalvm.compiler.api.replacements.Fold;
  85 import org.graalvm.compiler.api.replacements.Snippet;
  86 import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter;
  87 import org.graalvm.compiler.bytecode.Bytecode;
  88 import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode;
  89 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor;
  90 import org.graalvm.compiler.core.common.type.ObjectStamp;
  91 import org.graalvm.compiler.core.common.type.StampFactory;
  92 import org.graalvm.compiler.core.common.type.StampPair;
  93 import org.graalvm.compiler.debug.DebugHandlersFactory;
  94 import org.graalvm.compiler.graph.Node.ConstantNodeParameter;
  95 import org.graalvm.compiler.graph.Node.NodeIntrinsic;
  96 import org.graalvm.compiler.graph.iterators.NodeIterable;
  97 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
  98 import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
  99 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider;
 100 import org.graalvm.compiler.hotspot.nodes.CurrentLockNode;
 101 import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode;
 102 import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode;
 103 import org.graalvm.compiler.hotspot.word.KlassPointer;
 104 import org.graalvm.compiler.nodes.BreakpointNode;
 105 import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind;
 106 import org.graalvm.compiler.nodes.ConstantNode;
 107 import org.graalvm.compiler.nodes.DeoptimizeNode;
 108 import org.graalvm.compiler.nodes.FrameState;
 109 import org.graalvm.compiler.nodes.InvokeNode;
 110 import org.graalvm.compiler.nodes.NamedLocationIdentity;
 111 import org.graalvm.compiler.nodes.NodeView;
 112 import org.graalvm.compiler.nodes.ReturnNode;
 113 import org.graalvm.compiler.nodes.StructuredGraph;
 114 import org.graalvm.compiler.nodes.ValueNode;
 115 import org.graalvm.compiler.nodes.debug.DynamicCounterNode;
 116 import org.graalvm.compiler.nodes.extended.BranchProbabilityNode;
 117 import org.graalvm.compiler.nodes.extended.ForeignCallNode;
 118 import org.graalvm.compiler.nodes.extended.MembarNode;
 119 import org.graalvm.compiler.nodes.java.MethodCallTargetNode;
 120 import org.graalvm.compiler.nodes.java.MonitorExitNode;
 121 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode;
 122 import org.graalvm.compiler.nodes.spi.LoweringTool;
 123 import org.graalvm.compiler.nodes.type.StampTool;
 124 import org.graalvm.compiler.options.OptionValues;
 125 import org.graalvm.compiler.phases.common.inlining.InliningUtil;
 126 import org.graalvm.compiler.replacements.SnippetCounter;
 127 import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates;
 128 import org.graalvm.compiler.replacements.SnippetTemplate.Arguments;
 129 import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo;
 130 import org.graalvm.compiler.replacements.Snippets;
 131 import org.graalvm.compiler.word.Word;
 132 import jdk.internal.vm.compiler.word.LocationIdentity;
 133 import jdk.internal.vm.compiler.word.Pointer;
 134 import jdk.internal.vm.compiler.word.WordBase;
 135 import jdk.internal.vm.compiler.word.WordFactory;
 136 
 137 import jdk.vm.ci.code.BytecodeFrame;
 138 import jdk.vm.ci.code.Register;
 139 import jdk.vm.ci.code.TargetDescription;
 140 import jdk.vm.ci.meta.DeoptimizationAction;
 141 import jdk.vm.ci.meta.DeoptimizationReason;
 142 import jdk.vm.ci.meta.JavaType;
 143 import jdk.vm.ci.meta.ResolvedJavaType;
 144 
 145 /**
 146  * Snippets used for implementing the monitorenter and monitorexit instructions.
 147  *
 148  * The locking algorithm used is described in the paper
 149  * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related
 150  * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David
 151  * Detlefs.
 152  *
 153  * Comment below is reproduced from {@code markWord.hpp} for convenience:
 154  *
 155  * <pre>
 156  *  Bit-format of an object header (most significant first, big endian layout below):
 157  *  32 bits:
 158  *  --------
 159  *             hash:25 ------------>| age:4    biased_lock:1 lock:2 (normal object)
 160  *             JavaThread*:23 epoch:2 age:4    biased_lock:1 lock:2 (biased object)
 161  *             size:32 ------------------------------------------>| (CMS free block)
 162  *             PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object)
 163  *
 164  *  64 bits:
 165  *  --------
 166  *  unused:25 hash:31 -->| unused:1   age:4    biased_lock:1 lock:2 (normal object)
 167  *  JavaThread*:54 epoch:2 unused:1   age:4    biased_lock:1 lock:2 (biased object)
 168  *  PromotedObject*:61 --------------------->| promo_bits:3 ----->| (CMS promoted object)
 169  *  size:64 ----------------------------------------------------->| (CMS free block)
 170  *
 171  *  unused:25 hash:31 -->| cms_free:1 age:4    biased_lock:1 lock:2 (COOPs && normal object)
 172  *  JavaThread*:54 epoch:2 cms_free:1 age:4    biased_lock:1 lock:2 (COOPs && biased object)
 173  *  narrowOop:32 unused:24 cms_free:1 unused:4 promo_bits:3 ----->| (COOPs && CMS promoted object)
 174  *  unused:21 size:35 -->| cms_free:1 unused:7 ------------------>| (COOPs && CMS free block)
 175  *
 176  *  - hash contains the identity hash value: largest value is
 177  *    31 bits, see os::random().  Also, 64-bit vm's require
 178  *    a hash value no bigger than 32 bits because they will not
 179  *    properly generate a mask larger than that: see library_call.cpp
 180  *    and c1_CodePatterns_sparc.cpp.
 181  *
 182  *  - the biased lock pattern is used to bias a lock toward a given
 183  *    thread. When this pattern is set in the low three bits, the lock
 184  *    is either biased toward a given thread or "anonymously" biased,
 185  *    indicating that it is possible for it to be biased. When the
 186  *    lock is biased toward a given thread, locking and unlocking can
 187  *    be performed by that thread without using atomic operations.
 188  *    When a lock's bias is revoked, it reverts back to the normal
 189  *    locking scheme described below.
 190  *
 191  *    Note that we are overloading the meaning of the "unlocked" state
 192  *    of the header. Because we steal a bit from the age we can
 193  *    guarantee that the bias pattern will never be seen for a truly
 194  *    unlocked object.
 195  *
 196  *    Note also that the biased state contains the age bits normally
 197  *    contained in the object header. Large increases in scavenge
 198  *    times were seen when these bits were absent and an arbitrary age
 199  *    assigned to all biased objects, because they tended to consume a
 200  *    significant fraction of the eden semispaces and were not
 201  *    promoted promptly, causing an increase in the amount of copying
 202  *    performed. The runtime system aligns all JavaThread* pointers to
 203  *    a very large value (currently 128 bytes (32bVM) or 256 bytes (64bVM))
 204  *    to make room for the age bits & the epoch bits (used in support of
 205  *    biased locking), and for the CMS "freeness" bit in the 64bVM (+COOPs).
 206  *
 207  *    [JavaThread* | epoch | age | 1 | 01]       lock is biased toward given thread
 208  *    [0           | epoch | age | 1 | 01]       lock is anonymously biased
 209  *
 210  *  - the two lock bits are used to describe three states: locked/unlocked and monitor.
 211  *
 212  *    [ptr             | 00]  locked             ptr points to real header on stack
 213  *    [header      | 0 | 01]  unlocked           regular object header
 214  *    [ptr             | 10]  monitor            inflated lock (header is wapped out)
 215  *    [ptr             | 11]  marked             used by markSweep to mark an object
 216  *                                               not valid at any other time
 217  *
 218  *    We assume that stack/thread pointers have the lowest two bits cleared.
 219  * </pre>
 220  *
 221  * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned
 222  * appropriately to comply with the layouts above.
 223  */
 224 public class MonitorSnippets implements Snippets {
 225 
 226     private static final boolean PROFILE_CONTEXT = false;
 227 
 228     @Fold
 229     static boolean doProfile(@Fold.InjectedParameter OptionValues options) {
 230         return ProfileMonitors.getValue(options);
 231     }
 232 
 233     @Snippet
 234     public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister,
 235                     @ConstantParameter boolean trace, @ConstantParameter Counters counters) {
 236         verifyOop(object);
 237 
 238         // Load the mark word - this includes a null-check on object
 239         final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
 240 
 241         final Word lock = beginLockScope(lockDepth);
 242 
 243         Pointer objectPointer = Word.objectToTrackedPointer(object);
 244         trace(trace, "           object: 0x%016lx\n", objectPointer);
 245         trace(trace, "             lock: 0x%016lx\n", lock);
 246         trace(trace, "             mark: 0x%016lx\n", mark);
 247 
 248         incCounter();
 249 
 250         if (useBiasedLocking(INJECTED_VMCONFIG)) {
 251             if (tryEnterBiased(object, hub, lock, mark, threadRegister, trace, counters)) {
 252                 return;
 253             }
 254             // not biased, fall-through
 255         }
 256         if (inlineFastLockSupported() && probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) {
 257             // Inflated case
 258             if (tryEnterInflated(object, lock, mark, threadRegister, trace, counters)) {
 259                 return;
 260             }
 261         } else {
 262             // Create the unlocked mark word pattern
 263             Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG));
 264             trace(trace, "     unlockedMark: 0x%016lx\n", unlockedMark);
 265 
 266             // Copy this unlocked mark word into the lock slot on the stack
 267             lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION);
 268 
 269             // make sure previous store does not float below compareAndSwap
 270             MembarNode.memoryBarrier(STORE_STORE);
 271 
 272             // Test if the object's mark word is unlocked, and if so, store the
 273             // (address of) the lock slot into the object's mark word.
 274             Word currentMark = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), unlockedMark, lock, MARK_WORD_LOCATION);
 275             if (probability(FAST_PATH_PROBABILITY, currentMark.equal(unlockedMark))) {
 276                 traceObject(trace, "+lock{cas}", object, true);
 277                 counters.lockCas.inc();
 278                 mark(object);
 279                 return;
 280             } else {
 281                 trace(trace, "      currentMark: 0x%016lx\n", currentMark);
 282                 // The mark word in the object header was not the same.
 283                 // Either the object is locked by another thread or is already locked
 284                 // by the current thread. The latter is true if the mark word
 285                 // is a stack pointer into the current thread's stack, i.e.:
 286                 //
 287                 // 1) (currentMark & aligned_mask) == 0
 288                 // 2) rsp <= currentMark
 289                 // 3) currentMark <= rsp + page_size
 290                 //
 291                 // These 3 tests can be done by evaluating the following expression:
 292                 //
 293                 // (currentMark - rsp) & (aligned_mask - page_size)
 294                 //
 295                 // assuming both the stack pointer and page_size have their least
 296                 // significant 2 bits cleared and page_size is a power of 2
 297                 final Word alignedMask = unsigned(wordSize() - 1);
 298                 final Word stackPointer = registerAsWord(stackPointerRegister).add(stackBias(INJECTED_VMCONFIG));
 299                 if (probability(FAST_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize(INJECTED_VMCONFIG))).equal(0))) {
 300                     // Recursively locked => write 0 to the lock slot
 301                     lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), zero(), DISPLACED_MARK_WORD_LOCATION);
 302                     traceObject(trace, "+lock{cas:recursive}", object, true);
 303                     counters.lockCasRecursive.inc();
 304                     return;
 305                 }
 306                 traceObject(trace, "+lock{stub:failed-cas/stack}", object, true);
 307                 counters.lockStubFailedCas.inc();
 308             }
 309         }
 310         // slow-path runtime-call
 311         monitorenterStubC(MONITORENTER, object, lock);
 312     }
 313 
 314     private static boolean tryEnterBiased(Object object, KlassPointer hub, Word lock, Word mark, Register threadRegister, boolean trace, Counters counters) {
 315         // See whether the lock is currently biased toward our thread and
 316         // whether the epoch is still valid.
 317         // Note that the runtime guarantees sufficient alignment of JavaThread
 318         // pointers to allow age to be placed into low bits.
 319         final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG));
 320 
 321         // Check whether the bias pattern is present in the object's mark word
 322         // and the bias owner and the epoch are both still current.
 323         final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION);
 324         final Word thread = registerAsWord(threadRegister);
 325         final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG));
 326         trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord);
 327         trace(trace, "           thread: 0x%016lx\n", thread);
 328         trace(trace, "              tmp: 0x%016lx\n", tmp);
 329         if (probability(FAST_PATH_PROBABILITY, tmp.equal(0))) {
 330             // Object is already biased to current thread -> done
 331             traceObject(trace, "+lock{bias:existing}", object, true);
 332             counters.lockBiasExisting.inc();
 333             FastAcquireBiasedLockNode.mark(object);
 334             return true;
 335         }
 336 
 337         // Now check to see whether biasing is enabled for this object
 338         if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
 339             Pointer objectPointer = Word.objectToTrackedPointer(object);
 340             // At this point we know that the mark word has the bias pattern and
 341             // that we are not the bias owner in the current epoch. We need to
 342             // figure out more details about the state of the mark word in order to
 343             // know what operations can be legally performed on the object's
 344             // mark word.
 345 
 346             // If the low three bits in the xor result aren't clear, that means
 347             // the prototype header is no longer biasable and we have to revoke
 348             // the bias on this object.
 349             if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
 350                 // Biasing is still enabled for object's type. See whether the
 351                 // epoch of the current bias is still valid, meaning that the epoch
 352                 // bits of the mark word are equal to the epoch bits of the
 353                 // prototype mark word. (Note that the prototype mark word's epoch bits
 354                 // only change at a safepoint.) If not, attempt to rebias the object
 355                 // toward the current thread. Note that we must be absolutely sure
 356                 // that the current epoch is invalid in order to do this because
 357                 // otherwise the manipulations it performs on the mark word are
 358                 // illegal.
 359                 if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
 360                     // The epoch of the current bias is still valid but we know nothing
 361                     // about the owner; it might be set or it might be clear. Try to
 362                     // acquire the bias of the object using an atomic operation. If this
 363                     // fails we will go in to the runtime to revoke the object's bias.
 364                     // Note that we first construct the presumed unbiased header so we
 365                     // don't accidentally blow away another thread's valid bias.
 366                     Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG));
 367                     Word biasedMark = unbiasedMark.or(thread);
 368                     trace(trace, "     unbiasedMark: 0x%016lx\n", unbiasedMark);
 369                     trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
 370                     if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), unbiasedMark, biasedMark, MARK_WORD_LOCATION))) {
 371                         // Object is now biased to current thread -> done
 372                         traceObject(trace, "+lock{bias:acquired}", object, true);
 373                         counters.lockBiasAcquired.inc();
 374                         return true;
 375                     }
 376                     // If the biasing toward our thread failed, this means that another thread
 377                     // owns the bias and we need to revoke that bias. The revocation will occur
 378                     // in the interpreter runtime.
 379                     traceObject(trace, "+lock{stub:revoke}", object, true);
 380                     counters.lockStubRevoke.inc();
 381                 } else {
 382                     // At this point we know the epoch has expired, meaning that the
 383                     // current bias owner, if any, is actually invalid. Under these
 384                     // circumstances _only_, are we allowed to use the current mark word
 385                     // value as the comparison value when doing the CAS to acquire the
 386                     // bias in the current epoch. In other words, we allow transfer of
 387                     // the bias from one thread to another directly in this situation.
 388                     Word biasedMark = prototypeMarkWord.or(thread);
 389                     trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
 390                     if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, biasedMark, MARK_WORD_LOCATION))) {
 391                         // Object is now biased to current thread -> done
 392                         traceObject(trace, "+lock{bias:transfer}", object, true);
 393                         counters.lockBiasTransfer.inc();
 394                         return true;
 395                     }
 396                     // If the biasing toward our thread failed, then another thread
 397                     // succeeded in biasing it toward itself and we need to revoke that
 398                     // bias. The revocation will occur in the runtime in the slow case.
 399                     traceObject(trace, "+lock{stub:epoch-expired}", object, true);
 400                     counters.lockStubEpochExpired.inc();
 401                 }
 402                 // slow-path runtime-call
 403                 monitorenterStubC(MONITORENTER, object, lock);
 404                 return true;
 405             } else {
 406                 // The prototype mark word doesn't have the bias bit set any
 407                 // more, indicating that objects of this data type are not supposed
 408                 // to be biased any more. We are going to try to reset the mark of
 409                 // this object to the prototype value and fall through to the
 410                 // CAS-based locking scheme. Note that if our CAS fails, it means
 411                 // that another thread raced us for the privilege of revoking the
 412                 // bias of this particular object, so it's okay to continue in the
 413                 // normal locking code.
 414                 Word result = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, prototypeMarkWord, MARK_WORD_LOCATION);
 415 
 416                 // Fall through to the normal CAS-based lock, because no matter what
 417                 // the result of the above CAS, some thread must have succeeded in
 418                 // removing the bias bit from the object's header.
 419 
 420                 if (ENABLE_BREAKPOINT) {
 421                     bkpt(object, mark, tmp, result);
 422                 }
 423                 counters.revokeBias.inc();
 424                 return false;
 425             }
 426         } else {
 427             // Biasing not enabled -> fall through to lightweight locking
 428             counters.unbiasable.inc();
 429             return false;
 430         }
 431     }
 432 
 433     @Fold
 434     public static boolean useFastInflatedLocking(@Fold.InjectedParameter OptionValues options) {
 435         return SimpleFastInflatedLocking.getValue(options);
 436     }
 437 
 438     private static boolean inlineFastLockSupported() {
 439         return inlineFastLockSupported(INJECTED_VMCONFIG, INJECTED_OPTIONVALUES);
 440     }
 441 
 442     private static boolean inlineFastLockSupported(GraalHotSpotVMConfig config, OptionValues options) {
 443         return useFastInflatedLocking(options) && monitorMask(config) >= 0 && objectMonitorOwnerOffset(config) >= 0;
 444     }
 445 
 446     private static boolean tryEnterInflated(Object object, Word lock, Word mark, Register threadRegister, boolean trace, Counters counters) {
 447         // write non-zero value to lock slot
 448         lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), lock, DISPLACED_MARK_WORD_LOCATION);
 449         // mark is a pointer to the ObjectMonitor + monitorMask
 450         Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG));
 451         int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG);
 452         Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION);
 453         if (probability(FREQUENT_PROBABILITY, owner.equal(0))) {
 454             // it appears unlocked (owner == 0)
 455             if (probability(FREQUENT_PROBABILITY, monitor.logicCompareAndSwapWord(ownerOffset, owner, registerAsWord(threadRegister), OBJECT_MONITOR_OWNER_LOCATION))) {
 456                 // success
 457                 traceObject(trace, "+lock{inflated:cas}", object, true);
 458                 counters.inflatedCas.inc();
 459                 return true;
 460             } else {
 461                 traceObject(trace, "+lock{stub:inflated:failed-cas}", object, true);
 462                 counters.inflatedFailedCas.inc();
 463             }
 464         } else {
 465             traceObject(trace, "+lock{stub:inflated:owned}", object, true);
 466             counters.inflatedOwned.inc();
 467         }
 468         return false;
 469     }
 470 
 471     /**
 472      * Calls straight out to the monitorenter stub.
 473      */
 474     @Snippet
 475     public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) {
 476         verifyOop(object);
 477         incCounter();
 478         if (BranchProbabilityNode.probability(BranchProbabilityNode.DEOPT_PROBABILITY, object == null)) {
 479             DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException);
 480         }
 481         // BeginLockScope nodes do not read from object so a use of object
 482         // cannot float about the null check above
 483         final Word lock = beginLockScope(lockDepth);
 484         traceObject(trace, "+lock{stub}", object, true);
 485         monitorenterStubC(MONITORENTER, object, lock);
 486     }
 487 
 488     @Snippet
 489     public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter boolean trace,
 490                     @ConstantParameter Counters counters) {
 491         trace(trace, "           object: 0x%016lx\n", Word.objectToTrackedPointer(object));
 492         final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
 493         if (useBiasedLocking(INJECTED_VMCONFIG)) {
 494             // Check for biased locking unlock case, which is a no-op
 495             // Note: we do not have to check the thread ID for two reasons.
 496             // First, the interpreter checks for IllegalMonitorStateException at
 497             // a higher level. Second, if the bias was revoked while we held the
 498             // lock, the object could not be rebiased toward another thread, so
 499             // the bias bit would be clear.
 500             trace(trace, "             mark: 0x%016lx\n", mark);
 501             if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
 502                 endLockScope();
 503                 decCounter();
 504                 traceObject(trace, "-lock{bias}", object, false);
 505                 counters.unlockBias.inc();
 506                 return;
 507             }
 508         }
 509 
 510         final Word lock = CurrentLockNode.currentLock(lockDepth);
 511 
 512         // Load displaced mark
 513         final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION);
 514         trace(trace, "    displacedMark: 0x%016lx\n", displacedMark);
 515 
 516         if (probability(NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) {
 517             // Recursive locking => done
 518             traceObject(trace, "-lock{recursive}", object, false);
 519             counters.unlockCasRecursive.inc();
 520         } else {
 521             if (!tryExitInflated(object, mark, lock, threadRegister, trace, counters)) {
 522                 verifyOop(object);
 523                 // Test if object's mark word is pointing to the displaced mark word, and if so,
 524                 // restore
 525                 // the displaced mark in the object - if the object's mark word is not pointing to
 526                 // the displaced mark word, do unlocking via runtime call.
 527                 Pointer objectPointer = Word.objectToTrackedPointer(object);
 528                 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), lock, displacedMark, MARK_WORD_LOCATION))) {
 529                     traceObject(trace, "-lock{cas}", object, false);
 530                     counters.unlockCas.inc();
 531                 } else {
 532                     // The object's mark word was not pointing to the displaced header
 533                     traceObject(trace, "-lock{stub}", object, false);
 534                     counters.unlockStub.inc();
 535                     monitorexitStubC(MONITOREXIT, object, lock);
 536                 }
 537             }
 538         }
 539         endLockScope();
 540         decCounter();
 541     }
 542 
 543     private static boolean inlineFastUnlockSupported(OptionValues options) {
 544         return inlineFastUnlockSupported(INJECTED_VMCONFIG, options);
 545     }
 546 
 547     private static boolean inlineFastUnlockSupported(GraalHotSpotVMConfig config, OptionValues options) {
 548         return useFastInflatedLocking(options) && objectMonitorEntryListOffset(config) >= 0 && objectMonitorCxqOffset(config) >= 0 && monitorMask(config) >= 0 &&
 549                         objectMonitorOwnerOffset(config) >= 0 && objectMonitorRecursionsOffset(config) >= 0;
 550     }
 551 
 552     private static boolean tryExitInflated(Object object, Word mark, Word lock, Register threadRegister, boolean trace, Counters counters) {
 553         if (!inlineFastUnlockSupported(INJECTED_OPTIONVALUES)) {
 554             return false;
 555         }
 556         if (probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) {
 557             // Inflated case
 558             // mark is a pointer to the ObjectMonitor + monitorMask
 559             Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG));
 560             int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG);
 561             Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION);
 562             int recursionsOffset = objectMonitorRecursionsOffset(INJECTED_VMCONFIG);
 563             Word recursions = monitor.readWord(recursionsOffset, OBJECT_MONITOR_RECURSION_LOCATION);
 564             Word thread = registerAsWord(threadRegister);
 565             if (probability(FAST_PATH_PROBABILITY, owner.xor(thread).or(recursions).equal(0))) {
 566                 // owner == thread && recursions == 0
 567                 int cxqOffset = objectMonitorCxqOffset(INJECTED_VMCONFIG);
 568                 Word cxq = monitor.readWord(cxqOffset, OBJECT_MONITOR_CXQ_LOCATION);
 569                 int entryListOffset = objectMonitorEntryListOffset(INJECTED_VMCONFIG);
 570                 Word entryList = monitor.readWord(entryListOffset, OBJECT_MONITOR_ENTRY_LIST_LOCATION);
 571                 if (probability(FREQUENT_PROBABILITY, cxq.or(entryList).equal(0))) {
 572                     // cxq == 0 && entryList == 0
 573                     // Nobody is waiting, success
 574                     // release_store
 575                     memoryBarrier(LOAD_STORE | STORE_STORE);
 576                     monitor.writeWord(ownerOffset, zero());
 577                     traceObject(trace, "-lock{inflated:simple}", object, false);
 578                     counters.unlockInflatedSimple.inc();
 579                     return true;
 580                 } else {
 581                     int succOffset = objectMonitorSuccOffset(INJECTED_VMCONFIG);
 582                     Word succ = monitor.readWord(succOffset, OBJECT_MONITOR_SUCC_LOCATION);
 583                     if (probability(FREQUENT_PROBABILITY, succ.isNonNull())) {
 584                         // There may be a thread spinning on this monitor. Temporarily setting
 585                         // the monitor owner to null, and hope that the other thread will grab it.
 586                         monitor.writeWord(ownerOffset, zero());
 587                         memoryBarrier(STORE_STORE | STORE_LOAD);
 588                         succ = monitor.readWord(succOffset, OBJECT_MONITOR_SUCC_LOCATION);
 589                         if (probability(NOT_FREQUENT_PROBABILITY, succ.isNonNull())) {
 590                             // We manage to release the monitor before the other running thread even
 591                             // notices.
 592                             traceObject(trace, "-lock{inflated:transfer}", object, false);
 593                             counters.unlockInflatedTransfer.inc();
 594                             return true;
 595                         } else {
 596                             // Either the monitor is grabbed by a spinning thread, or the spinning
 597                             // thread parks. Now we attempt to reset the owner of the monitor.
 598                             if (probability(FREQUENT_PROBABILITY, !monitor.logicCompareAndSwapWord(ownerOffset, zero(), thread, OBJECT_MONITOR_OWNER_LOCATION))) {
 599                                 // The monitor is stolen.
 600                                 traceObject(trace, "-lock{inflated:transfer}", object, false);
 601                                 counters.unlockInflatedTransfer.inc();
 602                                 return true;
 603                             }
 604                         }
 605                     }
 606                 }
 607             }
 608             counters.unlockStubInflated.inc();
 609             traceObject(trace, "-lock{stub:inflated}", object, false);
 610             monitorexitStubC(MONITOREXIT, object, lock);
 611             return true;
 612         }
 613         return false;
 614     }
 615 
 616     /**
 617      * Calls straight out to the monitorexit stub.
 618      */
 619     @Snippet
 620     public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) {
 621         verifyOop(object);
 622         traceObject(trace, "-lock{stub}", object, false);
 623         final Word lock = CurrentLockNode.currentLock(lockDepth);
 624         monitorexitStubC(MONITOREXIT, object, lock);
 625         endLockScope();
 626         decCounter();
 627     }
 628 
 629     public static void traceObject(boolean enabled, String action, Object object, boolean enter) {
 630         if (doProfile(INJECTED_OPTIONVALUES)) {
 631             DynamicCounterNode.counter(enter ? "number of monitor enters" : "number of monitor exits", action, 1, PROFILE_CONTEXT);
 632         }
 633         if (enabled) {
 634             Log.print(action);
 635             Log.print(' ');
 636             Log.printlnObject(object);
 637         }
 638     }
 639 
 640     public static void trace(boolean enabled, String format, WordBase value) {
 641         if (enabled) {
 642             Log.printf(format, value.rawValue());
 643         }
 644     }
 645 
 646     /**
 647      * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode}
 648      * intrinsic.
 649      */
 650     private static final boolean ENABLE_BREAKPOINT = false;
 651 
 652     private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter");
 653 
 654     @NodeIntrinsic(BreakpointNode.class)
 655     static native void bkpt(Object object, Word mark, Word tmp, Word value);
 656 
 657     @Fold
 658     static boolean verifyBalancedMonitors(@Fold.InjectedParameter OptionValues options) {
 659         return VerifyBalancedMonitors.getValue(options);
 660     }
 661 
 662     static void incCounter() {
 663         if (verifyBalancedMonitors(INJECTED_OPTIONVALUES)) {
 664             final Word counter = MonitorCounterNode.counter();
 665             final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 666             counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION);
 667         }
 668     }
 669 
 670     public static void decCounter() {
 671         if (verifyBalancedMonitors(INJECTED_OPTIONVALUES)) {
 672             final Word counter = MonitorCounterNode.counter();
 673             final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 674             counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION);
 675         }
 676     }
 677 
 678     @Snippet
 679     private static void initCounter() {
 680         final Word counter = MonitorCounterNode.counter();
 681         counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION);
 682     }
 683 
 684     @Snippet
 685     private static void checkCounter(@ConstantParameter String errMsg) {
 686         final Word counter = MonitorCounterNode.counter();
 687         final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 688         if (count != 0) {
 689             vmError(errMsg, count);
 690         }
 691     }
 692 
 693     public static class Counters {
 694         /**
 695          * Counters for the various paths for acquiring a lock. The counters whose names start with
 696          * {@code "lock"} are mutually exclusive. The other counters are for paths that may be
 697          * shared.
 698          */
 699         public final SnippetCounter lockBiasExisting;
 700         public final SnippetCounter lockBiasAcquired;
 701         public final SnippetCounter lockBiasTransfer;
 702         public final SnippetCounter lockCas;
 703         public final SnippetCounter lockCasRecursive;
 704         public final SnippetCounter lockStubEpochExpired;
 705         public final SnippetCounter lockStubRevoke;
 706         public final SnippetCounter lockStubFailedCas;
 707         public final SnippetCounter inflatedCas;
 708         public final SnippetCounter inflatedFailedCas;
 709         public final SnippetCounter inflatedOwned;
 710         public final SnippetCounter unbiasable;
 711         public final SnippetCounter revokeBias;
 712 
 713         /**
 714          * Counters for the various paths for releasing a lock. The counters whose names start with
 715          * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be
 716          * shared.
 717          */
 718         public final SnippetCounter unlockBias;
 719         public final SnippetCounter unlockCas;
 720         public final SnippetCounter unlockCasRecursive;
 721         public final SnippetCounter unlockStub;
 722         public final SnippetCounter unlockStubInflated;
 723         public final SnippetCounter unlockInflatedSimple;
 724         public final SnippetCounter unlockInflatedTransfer;
 725 
 726         public Counters(SnippetCounter.Group.Factory factory) {
 727             SnippetCounter.Group enter = factory.createSnippetCounterGroup("MonitorEnters");
 728             SnippetCounter.Group exit = factory.createSnippetCounterGroup("MonitorExits");
 729             lockBiasExisting = new SnippetCounter(enter, "lock{bias:existing}", "bias-locked previously biased object");
 730             lockBiasAcquired = new SnippetCounter(enter, "lock{bias:acquired}", "bias-locked newly biased object");
 731             lockBiasTransfer = new SnippetCounter(enter, "lock{bias:transfer}", "bias-locked, biased transferred");
 732             lockCas = new SnippetCounter(enter, "lock{cas}", "cas-locked an object");
 733             lockCasRecursive = new SnippetCounter(enter, "lock{cas:recursive}", "cas-locked, recursive");
 734             lockStubEpochExpired = new SnippetCounter(enter, "lock{stub:epoch-expired}", "stub-locked, epoch expired");
 735             lockStubRevoke = new SnippetCounter(enter, "lock{stub:revoke}", "stub-locked, biased revoked");
 736             lockStubFailedCas = new SnippetCounter(enter, "lock{stub:failed-cas/stack}", "stub-locked, failed cas and stack locking");
 737             inflatedCas = new SnippetCounter(enter, "lock{inflated:cas}", "heavyweight-locked, cas-locked");
 738             inflatedFailedCas = new SnippetCounter(enter, "lock{inflated:failed-cas}", "heavyweight-locked, failed cas");
 739             inflatedOwned = new SnippetCounter(enter, "lock{inflated:owned}", "heavyweight-locked, already owned");
 740             unbiasable = new SnippetCounter(enter, "unbiasable", "object with unbiasable type");
 741             revokeBias = new SnippetCounter(enter, "revokeBias", "object had bias revoked");
 742 
 743             unlockBias = new SnippetCounter(exit, "unlock{bias}", "bias-unlocked an object");
 744             unlockCas = new SnippetCounter(exit, "unlock{cas}", "cas-unlocked an object");
 745             unlockCasRecursive = new SnippetCounter(exit, "unlock{cas:recursive}", "cas-unlocked an object, recursive");
 746             unlockStub = new SnippetCounter(exit, "unlock{stub}", "stub-unlocked an object");
 747             unlockStubInflated = new SnippetCounter(exit, "unlock{stub:inflated}", "stub-unlocked an object with inflated monitor");
 748             unlockInflatedSimple = new SnippetCounter(exit, "unlock{inflated}", "unlocked an object monitor");
 749             unlockInflatedTransfer = new SnippetCounter(exit, "unlock{inflated:transfer}", "unlocked an object monitor in the presence of ObjectMonitor::_succ");
 750         }
 751     }
 752 
 753     public static class Templates extends AbstractTemplates {
 754 
 755         private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter");
 756         private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit", DISPLACED_MARK_WORD_LOCATION, OBJECT_MONITOR_OWNER_LOCATION, OBJECT_MONITOR_CXQ_LOCATION,
 757                         OBJECT_MONITOR_ENTRY_LIST_LOCATION, OBJECT_MONITOR_RECURSION_LOCATION, OBJECT_MONITOR_SUCC_LOCATION);
 758         private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub");
 759         private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub");
 760         private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter");
 761         private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter");
 762 
 763         private final boolean useFastLocking;
 764         public final Counters counters;
 765 
 766         public Templates(OptionValues options, Iterable<DebugHandlersFactory> factories, SnippetCounter.Group.Factory factory, HotSpotProviders providers, TargetDescription target,
 767                         boolean useFastLocking) {
 768             super(options, factories, providers, providers.getSnippetReflection(), target);
 769             this.useFastLocking = useFastLocking;
 770 
 771             this.counters = new Counters(factory);
 772         }
 773 
 774         public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) {
 775             StructuredGraph graph = monitorenterNode.graph();
 776             checkBalancedMonitors(graph, tool);
 777 
 778             assert ((ObjectStamp) monitorenterNode.object().stamp(NodeView.DEFAULT)).nonNull();
 779 
 780             Arguments args;
 781             if (useFastLocking) {
 782                 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage());
 783                 args.add("object", monitorenterNode.object());
 784                 args.add("hub", monitorenterNode.getHub());
 785                 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
 786                 args.addConst("threadRegister", registers.getThreadRegister());
 787                 args.addConst("stackPointerRegister", registers.getStackPointerRegister());
 788                 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph));
 789                 args.addConst("counters", counters);
 790             } else {
 791                 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage());
 792                 args.add("object", monitorenterNode.object());
 793                 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
 794                 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph));
 795                 args.addConst("counters", counters);
 796             }
 797 
 798             template(monitorenterNode, args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args);
 799         }
 800 
 801         public void lower(MonitorExitNode monitorexitNode, HotSpotRegistersProvider registers, LoweringTool tool) {
 802             StructuredGraph graph = monitorexitNode.graph();
 803 
 804             Arguments args;
 805             if (useFastLocking) {
 806                 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage());
 807             } else {
 808                 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage());
 809             }
 810             args.add("object", monitorexitNode.object());
 811             args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth());
 812             args.addConst("threadRegister", registers.getThreadRegister());
 813             args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph));
 814             args.addConst("counters", counters);
 815 
 816             template(monitorexitNode, args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args);
 817         }
 818 
 819         public static boolean isTracingEnabledForType(ValueNode object) {
 820             ResolvedJavaType type = StampTool.typeOrNull(object.stamp(NodeView.DEFAULT));
 821             String filter = TraceMonitorsTypeFilter.getValue(object.getOptions());
 822             if (filter == null) {
 823                 return false;
 824             } else {
 825                 if (filter.length() == 0) {
 826                     return true;
 827                 }
 828                 if (type == null) {
 829                     return false;
 830                 }
 831                 return (type.getName().contains(filter));
 832             }
 833         }
 834 
 835         public static boolean isTracingEnabledForMethod(StructuredGraph graph) {
 836             String filter = TraceMonitorsMethodFilter.getValue(graph.getOptions());
 837             if (filter == null) {
 838                 return false;
 839             } else {
 840                 if (filter.length() == 0) {
 841                     return true;
 842                 }
 843                 if (graph.method() == null) {
 844                     return false;
 845                 }
 846                 return (graph.method().format("%H.%n").contains(filter));
 847             }
 848         }
 849 
 850         /**
 851          * If balanced monitor checking is enabled then nodes are inserted at the start and all
 852          * return points of the graph to initialize and check the monitor counter respectively.
 853          */
 854         private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) {
 855             if (VerifyBalancedMonitors.getValue(options)) {
 856                 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class);
 857                 if (nodes.isEmpty()) {
 858                     // Only insert the nodes if this is the first monitorenter being lowered.
 859                     JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass());
 860                     StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
 861                     MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null));
 862                     InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0));
 863                     invoke.setStateAfter(graph.start().stateAfter());
 864                     graph.addAfterFixed(graph.start(), invoke);
 865 
 866                     StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null, null, invoke.graph().trackNodeSourcePosition(), invoke.getNodeSourcePosition(),
 867                                     invoke.getOptions());
 868                     InliningUtil.inline(invoke, inlineeGraph, false, null);
 869 
 870                     List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot();
 871                     for (ReturnNode ret : rets) {
 872                         returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass());
 873                         String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d";
 874                         ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph);
 875                         returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
 876                         callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null));
 877                         invoke = graph.add(new InvokeNode(callTarget, 0));
 878                         Bytecode code = new ResolvedJavaMethodBytecode(graph.method());
 879                         FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false);
 880                         invoke.setStateAfter(graph.add(stateAfter));
 881                         graph.addBeforeFixed(ret, invoke);
 882 
 883                         Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage());
 884                         args.addConst("errMsg", msg);
 885                         inlineeGraph = template(invoke, args).copySpecializedGraph(graph.getDebug());
 886                         InliningUtil.inline(invoke, inlineeGraph, false, null);
 887                     }
 888                 }
 889             }
 890         }
 891     }
 892 
 893     public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class);
 894     public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class);
 895 
 896     @NodeIntrinsic(ForeignCallNode.class)
 897     private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
 898 
 899     @NodeIntrinsic(ForeignCallNode.class)
 900     public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
 901 }