src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot/src/org/graalvm/compiler/hotspot/replacements/MonitorSnippets.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot/src/org/graalvm/compiler/hotspot/replacements

src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot/src/org/graalvm/compiler/hotspot/replacements/MonitorSnippets.java

Print this page




   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.hotspot.replacements;
  24 
  25 import static org.graalvm.compiler.core.common.GraalOptions.SnippetCounters;

  26 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG;
  27 import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope;
  28 import static org.graalvm.compiler.hotspot.nodes.DirectCompareAndSwapNode.compareAndSwap;
  29 import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope;
  30 import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError;
  31 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION;
  32 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION;




  33 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION;
  34 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace;
  35 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace;
  36 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern;
  37 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.config;
  38 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace;
  39 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject;
  40 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset;
  41 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset;





  42 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize;
  43 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset;
  44 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord;
  45 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask;
  46 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking;
  47 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop;
  48 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize;
  49 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors;

  50 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter;
  51 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter;
  52 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors;

  53 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY;



  54 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY;
  55 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_SLOW_PATH_PROBABILITY;
  56 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability;
  57 import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER;
  58 
  59 import java.util.List;
  60 
  61 import org.graalvm.compiler.api.replacements.Fold;
  62 import org.graalvm.compiler.api.replacements.Snippet;
  63 import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter;
  64 import org.graalvm.compiler.bytecode.Bytecode;
  65 import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode;
  66 import org.graalvm.compiler.core.common.LocationIdentity;
  67 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor;
  68 import org.graalvm.compiler.core.common.type.ObjectStamp;
  69 import org.graalvm.compiler.core.common.type.StampFactory;
  70 import org.graalvm.compiler.core.common.type.StampPair;
  71 import org.graalvm.compiler.graph.Node.ConstantNodeParameter;
  72 import org.graalvm.compiler.graph.Node.NodeIntrinsic;
  73 import org.graalvm.compiler.graph.iterators.NodeIterable;

  74 import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
  75 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider;
  76 import org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode;
  77 import org.graalvm.compiler.hotspot.nodes.CurrentLockNode;
  78 import org.graalvm.compiler.hotspot.nodes.DirectCompareAndSwapNode;
  79 import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode;
  80 import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode;
  81 import org.graalvm.compiler.hotspot.word.KlassPointer;
  82 import org.graalvm.compiler.nodes.BreakpointNode;
  83 import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind;
  84 import org.graalvm.compiler.nodes.ConstantNode;
  85 import org.graalvm.compiler.nodes.DeoptimizeNode;
  86 import org.graalvm.compiler.nodes.FrameState;
  87 import org.graalvm.compiler.nodes.InvokeNode;
  88 import org.graalvm.compiler.nodes.NamedLocationIdentity;
  89 import org.graalvm.compiler.nodes.ReturnNode;
  90 import org.graalvm.compiler.nodes.StructuredGraph;
  91 import org.graalvm.compiler.nodes.ValueNode;
  92 import org.graalvm.compiler.nodes.debug.DynamicCounterNode;
  93 import org.graalvm.compiler.nodes.extended.BranchProbabilityNode;
  94 import org.graalvm.compiler.nodes.extended.ForeignCallNode;

  95 import org.graalvm.compiler.nodes.java.MethodCallTargetNode;
  96 import org.graalvm.compiler.nodes.java.MonitorExitNode;
  97 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode;
  98 import org.graalvm.compiler.nodes.memory.address.OffsetAddressNode;
  99 import org.graalvm.compiler.nodes.spi.LoweringTool;
 100 import org.graalvm.compiler.nodes.type.StampTool;

 101 import org.graalvm.compiler.phases.common.inlining.InliningUtil;
 102 import org.graalvm.compiler.replacements.Log;
 103 import org.graalvm.compiler.replacements.SnippetCounter;
 104 import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates;
 105 import org.graalvm.compiler.replacements.SnippetTemplate.Arguments;
 106 import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo;
 107 import org.graalvm.compiler.replacements.Snippets;

 108 import org.graalvm.compiler.word.Word;
 109 import org.graalvm.compiler.word.WordBase;
 110 
 111 import jdk.vm.ci.code.BytecodeFrame;
 112 import jdk.vm.ci.code.Register;
 113 import jdk.vm.ci.code.TargetDescription;
 114 import jdk.vm.ci.meta.DeoptimizationAction;
 115 import jdk.vm.ci.meta.DeoptimizationReason;
 116 import jdk.vm.ci.meta.JavaType;
 117 import jdk.vm.ci.meta.ResolvedJavaMethod;
 118 import jdk.vm.ci.meta.ResolvedJavaType;
 119 
 120 /**
 121  * Snippets used for implementing the monitorenter and monitorexit instructions.
 122  *
 123  * The locking algorithm used is described in the paper
 124  * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related
 125  * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David
 126  * Detlefs.
 127  *
 128  * Comment below is reproduced from {@code markOop.hpp} for convenience:
 129  *
 130  * <pre>
 131  *  Bit-format of an object header (most significant first, big endian layout below):
 132  *  32 bits:
 133  *  --------
 134  *             hash:25 ------------>| age:4    biased_lock:1 lock:2 (normal object)
 135  *             JavaThread*:23 epoch:2 age:4    biased_lock:1 lock:2 (biased object)
 136  *             size:32 ------------------------------------------>| (CMS free block)
 137  *             PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object)


 184  *
 185  *  - the two lock bits are used to describe three states: locked/unlocked and monitor.
 186  *
 187  *    [ptr             | 00]  locked             ptr points to real header on stack
 188  *    [header      | 0 | 01]  unlocked           regular object header
 189  *    [ptr             | 10]  monitor            inflated lock (header is wapped out)
 190  *    [ptr             | 11]  marked             used by markSweep to mark an object
 191  *                                               not valid at any other time
 192  *
 193  *    We assume that stack/thread pointers have the lowest two bits cleared.
 194  * </pre>
 195  *
 196  * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned
 197  * appropriately to comply with the layouts above.
 198  */
 199 public class MonitorSnippets implements Snippets {
 200 
 201     private static final boolean PROFILE_CONTEXT = false;
 202 
 203     @Fold
 204     static boolean doProfile() {
 205         return ProfileMonitors.getValue();
 206     }
 207 
 208     @Snippet
 209     public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister,
 210                     @ConstantParameter boolean trace) {
 211         verifyOop(object);
 212 
 213         // Load the mark word - this includes a null-check on object
 214         final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
 215 
 216         final Word lock = beginLockScope(lockDepth);
 217 
 218         trace(trace, "           object: 0x%016lx\n", Word.objectToTrackedPointer(object));

 219         trace(trace, "             lock: 0x%016lx\n", lock);
 220         trace(trace, "             mark: 0x%016lx\n", mark);
 221 
 222         incCounter();
 223 
 224         if (useBiasedLocking(INJECTED_VMCONFIG)) {
































































 225             // See whether the lock is currently biased toward our thread and
 226             // whether the epoch is still valid.
 227             // Note that the runtime guarantees sufficient alignment of JavaThread
 228             // pointers to allow age to be placed into low bits.
 229             final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG));
 230 
 231             // Check whether the bias pattern is present in the object's mark word
 232             // and the bias owner and the epoch are both still current.
 233             final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION);
 234             final Word thread = registerAsWord(threadRegister);
 235             final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG));
 236             trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord);
 237             trace(trace, "           thread: 0x%016lx\n", thread);
 238             trace(trace, "              tmp: 0x%016lx\n", tmp);
 239             if (probability(BranchProbabilityNode.NOT_LIKELY_PROBABILITY, tmp.equal(0))) {
 240                 // Object is already biased to current thread -> done
 241                 traceObject(trace, "+lock{bias:existing}", object, true);
 242                 lockBiasExisting.inc();
 243                 FastAcquireBiasedLockNode.mark(object);
 244                 return;
 245             }
 246 
 247             // Now check to see whether biasing is enabled for this object
 248             if (probability(BranchProbabilityNode.FAST_PATH_PROBABILITY, biasableLockBits.notEqual(Word.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
 249                 // Biasing not enabled -> fall through to lightweight locking
 250                 unbiasable.inc();
 251             } else {
 252                 // At this point we know that the mark word has the bias pattern and
 253                 // that we are not the bias owner in the current epoch. We need to
 254                 // figure out more details about the state of the mark word in order to
 255                 // know what operations can be legally performed on the object's
 256                 // mark word.
 257 
 258                 // If the low three bits in the xor result aren't clear, that means
 259                 // the prototype header is no longer biasable and we have to revoke
 260                 // the bias on this object.
 261                 if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
 262                     // Biasing is still enabled for object's type. See whether the
 263                     // epoch of the current bias is still valid, meaning that the epoch
 264                     // bits of the mark word are equal to the epoch bits of the
 265                     // prototype mark word. (Note that the prototype mark word's epoch bits
 266                     // only change at a safepoint.) If not, attempt to rebias the object
 267                     // toward the current thread. Note that we must be absolutely sure
 268                     // that the current epoch is invalid in order to do this because
 269                     // otherwise the manipulations it performs on the mark word are
 270                     // illegal.
 271                     if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
 272                         // The epoch of the current bias is still valid but we know nothing
 273                         // about the owner; it might be set or it might be clear. Try to
 274                         // acquire the bias of the object using an atomic operation. If this
 275                         // fails we will go in to the runtime to revoke the object's bias.
 276                         // Note that we first construct the presumed unbiased header so we
 277                         // don't accidentally blow away another thread's valid bias.
 278                         Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG));
 279                         Word biasedMark = unbiasedMark.or(thread);
 280                         trace(trace, "     unbiasedMark: 0x%016lx\n", unbiasedMark);
 281                         trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
 282                         if (probability(VERY_FAST_PATH_PROBABILITY,
 283                                         compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), unbiasedMark, biasedMark, MARK_WORD_LOCATION).equal(unbiasedMark))) {
 284                             // Object is now biased to current thread -> done
 285                             traceObject(trace, "+lock{bias:acquired}", object, true);
 286                             lockBiasAcquired.inc();
 287                             return;
 288                         }
 289                         // If the biasing toward our thread failed, this means that another thread
 290                         // owns the bias and we need to revoke that bias. The revocation will occur
 291                         // in the interpreter runtime.
 292                         traceObject(trace, "+lock{stub:revoke}", object, true);
 293                         lockStubRevoke.inc();
 294                     } else {
 295                         // At this point we know the epoch has expired, meaning that the
 296                         // current bias owner, if any, is actually invalid. Under these
 297                         // circumstances _only_, are we allowed to use the current mark word
 298                         // value as the comparison value when doing the CAS to acquire the
 299                         // bias in the current epoch. In other words, we allow transfer of
 300                         // the bias from one thread to another directly in this situation.
 301                         Word biasedMark = prototypeMarkWord.or(thread);
 302                         trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
 303                         if (probability(VERY_FAST_PATH_PROBABILITY,
 304                                         compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), mark, biasedMark, MARK_WORD_LOCATION).equal(mark))) {
 305                             // Object is now biased to current thread -> done
 306                             traceObject(trace, "+lock{bias:transfer}", object, true);
 307                             lockBiasTransfer.inc();
 308                             return;
 309                         }
 310                         // If the biasing toward our thread failed, then another thread
 311                         // succeeded in biasing it toward itself and we need to revoke that
 312                         // bias. The revocation will occur in the runtime in the slow case.
 313                         traceObject(trace, "+lock{stub:epoch-expired}", object, true);
 314                         lockStubEpochExpired.inc();
 315                     }

 316                     monitorenterStubC(MONITORENTER, object, lock);
 317                     return;
 318                 } else {
 319                     // The prototype mark word doesn't have the bias bit set any
 320                     // more, indicating that objects of this data type are not supposed
 321                     // to be biased any more. We are going to try to reset the mark of
 322                     // this object to the prototype value and fall through to the
 323                     // CAS-based locking scheme. Note that if our CAS fails, it means
 324                     // that another thread raced us for the privilege of revoking the
 325                     // bias of this particular object, so it's okay to continue in the
 326                     // normal locking code.
 327                     Word result = compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), mark, prototypeMarkWord, MARK_WORD_LOCATION);
 328 
 329                     // Fall through to the normal CAS-based lock, because no matter what
 330                     // the result of the above CAS, some thread must have succeeded in
 331                     // removing the bias bit from the object's header.
 332 
 333                     if (ENABLE_BREAKPOINT) {
 334                         bkpt(object, mark, tmp, result);
 335                     }
 336                     revokeBias.inc();

 337                 }




 338             }
 339         }
 340 
 341         // Create the unlocked mark word pattern
 342         Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG));
 343         trace(trace, "     unlockedMark: 0x%016lx\n", unlockedMark);

 344 
 345         // Copy this unlocked mark word into the lock slot on the stack
 346         lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION);

 347 
 348         // Test if the object's mark word is unlocked, and if so, store the
 349         // (address of) the lock slot into the object's mark word.
 350         Word currentMark = compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), unlockedMark, lock, MARK_WORD_LOCATION);
 351         if (probability(BranchProbabilityNode.SLOW_PATH_PROBABILITY, currentMark.notEqual(unlockedMark))) {
 352             trace(trace, "      currentMark: 0x%016lx\n", currentMark);
 353             // The mark word in the object header was not the same.
 354             // Either the object is locked by another thread or is already locked
 355             // by the current thread. The latter is true if the mark word
 356             // is a stack pointer into the current thread's stack, i.e.:
 357             //
 358             // 1) (currentMark & aligned_mask) == 0
 359             // 2) rsp <= currentMark
 360             // 3) currentMark <= rsp + page_size
 361             //
 362             // These 3 tests can be done by evaluating the following expression:
 363             //
 364             // (currentMark - rsp) & (aligned_mask - page_size)
 365             //
 366             // assuming both the stack pointer and page_size have their least
 367             // significant 2 bits cleared and page_size is a power of 2
 368             final Word alignedMask = Word.unsigned(wordSize() - 1);
 369             final Word stackPointer = registerAsWord(stackPointerRegister).add(config(INJECTED_VMCONFIG).stackBias);
 370             if (probability(VERY_SLOW_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).notEqual(0))) {
 371                 // Most likely not a recursive lock, go into a slow runtime call
 372                 traceObject(trace, "+lock{stub:failed-cas}", object, true);
 373                 lockStubFailedCas.inc();
 374                 monitorenterStubC(MONITORENTER, object, lock);
 375                 return;
 376             } else {
 377                 // Recursively locked => write 0 to the lock slot
 378                 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), Word.zero(), DISPLACED_MARK_WORD_LOCATION);
 379                 traceObject(trace, "+lock{cas:recursive}", object, true);
 380                 lockCasRecursive.inc();
 381             }
 382         } else {
 383             traceObject(trace, "+lock{cas}", object, true);
 384             lockCas.inc();
 385             AcquiredCASLockNode.mark(object);
 386         }

 387     }
 388 
 389     /**
 390      * Calls straight out to the monitorenter stub.
 391      */
 392     @Snippet
 393     public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) {
 394         verifyOop(object);
 395         incCounter();
 396         if (object == null) {
 397             DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException);
 398         }
 399         // BeginLockScope nodes do not read from object so a use of object
 400         // cannot float about the null check above
 401         final Word lock = beginLockScope(lockDepth);
 402         traceObject(trace, "+lock{stub}", object, true);
 403         monitorenterStubC(MONITORENTER, object, lock);
 404     }
 405 
 406     @Snippet
 407     public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) {

 408         trace(trace, "           object: 0x%016lx\n", Word.objectToTrackedPointer(object));

 409         if (useBiasedLocking(INJECTED_VMCONFIG)) {
 410             // Check for biased locking unlock case, which is a no-op
 411             // Note: we do not have to check the thread ID for two reasons.
 412             // First, the interpreter checks for IllegalMonitorStateException at
 413             // a higher level. Second, if the bias was revoked while we held the
 414             // lock, the object could not be rebiased toward another thread, so
 415             // the bias bit would be clear.
 416             final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
 417             trace(trace, "             mark: 0x%016lx\n", mark);
 418             if (probability(BranchProbabilityNode.NOT_LIKELY_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(Word.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
 419                 endLockScope();
 420                 decCounter();
 421                 traceObject(trace, "-lock{bias}", object, false);
 422                 unlockBias.inc();
 423                 return;
 424             }
 425         }
 426 
 427         final Word lock = CurrentLockNode.currentLock(lockDepth);
 428 
 429         // Load displaced mark
 430         final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION);
 431         trace(trace, "    displacedMark: 0x%016lx\n", displacedMark);
 432 
 433         if (probability(BranchProbabilityNode.NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) {
 434             // Recursive locking => done
 435             traceObject(trace, "-lock{recursive}", object, false);
 436             unlockCasRecursive.inc();
 437         } else {

 438             verifyOop(object);
 439             // Test if object's mark word is pointing to the displaced mark word, and if so, restore

 440             // the displaced mark in the object - if the object's mark word is not pointing to
 441             // the displaced mark word, do unlocking via runtime call.
 442             if (probability(VERY_SLOW_PATH_PROBABILITY,
 443                             DirectCompareAndSwapNode.compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), lock, displacedMark, MARK_WORD_LOCATION).notEqual(lock))) {
 444                 // The object's mark word was not pointing to the displaced header,
 445                 // we do unlocking via runtime call.
 446                 traceObject(trace, "-lock{stub}", object, false);
 447                 unlockStub.inc();


 448                 monitorexitStubC(MONITOREXIT, object, lock);
 449             } else {
 450                 traceObject(trace, "-lock{cas}", object, false);
 451                 unlockCas.inc();
 452             }
 453         }
 454         endLockScope();
 455         decCounter();















































 456     }
 457 
 458     /**
 459      * Calls straight out to the monitorexit stub.
 460      */
 461     @Snippet
 462     public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) {
 463         verifyOop(object);
 464         traceObject(trace, "-lock{stub}", object, false);
 465         final Word lock = CurrentLockNode.currentLock(lockDepth);
 466         monitorexitStubC(MONITOREXIT, object, lock);
 467         endLockScope();
 468         decCounter();
 469     }
 470 
 471     public static void traceObject(boolean enabled, String action, Object object, boolean enter) {
 472         if (doProfile()) {
 473             DynamicCounterNode.counter(action, enter ? "number of monitor enters" : "number of monitor exits", 1, PROFILE_CONTEXT);
 474         }
 475         if (enabled) {
 476             Log.print(action);
 477             Log.print(' ');
 478             Log.printlnObject(object);
 479         }
 480     }
 481 
 482     public static void trace(boolean enabled, String format, WordBase value) {
 483         if (enabled) {
 484             Log.printf(format, value.rawValue());
 485         }
 486     }
 487 
 488     /**
 489      * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode}
 490      * intrinsic.
 491      */
 492     private static final boolean ENABLE_BREAKPOINT = false;
 493 
 494     private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter");
 495 
 496     @NodeIntrinsic(BreakpointNode.class)
 497     static native void bkpt(Object object, Word mark, Word tmp, Word value);
 498 
 499     private static final boolean VERIFY_BALANCED_MONITORS = VerifyBalancedMonitors.getValue();



 500 
 501     public static void incCounter() {
 502         if (VERIFY_BALANCED_MONITORS) {
 503             final Word counter = MonitorCounterNode.counter();
 504             final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 505             counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION);
 506         }
 507     }
 508 
 509     public static void decCounter() {
 510         if (VERIFY_BALANCED_MONITORS) {
 511             final Word counter = MonitorCounterNode.counter();
 512             final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 513             counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION);
 514         }
 515     }
 516 
 517     @Snippet
 518     private static void initCounter() {
 519         final Word counter = MonitorCounterNode.counter();
 520         counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION);
 521     }
 522 
 523     @Snippet
 524     private static void checkCounter(@ConstantParameter String errMsg) {
 525         final Word counter = MonitorCounterNode.counter();
 526         final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 527         if (count != 0) {
 528             vmError(errMsg, count);
 529         }
 530     }
 531 


























































 532     public static class Templates extends AbstractTemplates {
 533 
 534         private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter");
 535         private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit");
 536         private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub");
 537         private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub");
 538         private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter");
 539         private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter");
 540 
 541         private final boolean useFastLocking;

 542 
 543         public Templates(HotSpotProviders providers, TargetDescription target, boolean useFastLocking) {
 544             super(providers, providers.getSnippetReflection(), target);
 545             this.useFastLocking = useFastLocking;


 546         }
 547 
 548         public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) {
 549             StructuredGraph graph = monitorenterNode.graph();
 550             checkBalancedMonitors(graph, tool);
 551 
 552             assert ((ObjectStamp) monitorenterNode.object().stamp()).nonNull();
 553 
 554             Arguments args;
 555             if (useFastLocking) {
 556                 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage());
 557                 args.add("object", monitorenterNode.object());
 558                 args.add("hub", monitorenterNode.getHub());
 559                 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
 560                 args.addConst("threadRegister", registers.getThreadRegister());
 561                 args.addConst("stackPointerRegister", registers.getStackPointerRegister());
 562                 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph.method()));


 563             } else {
 564                 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage());
 565                 args.add("object", monitorenterNode.object());
 566                 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
 567                 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph.method()));


 568             }
 569 
 570             template(args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args);
 571         }
 572 
 573         public void lower(MonitorExitNode monitorexitNode, LoweringTool tool) {
 574             StructuredGraph graph = monitorexitNode.graph();
 575 
 576             Arguments args;
 577             if (useFastLocking) {
 578                 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage());
 579             } else {
 580                 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage());
 581             }
 582             args.add("object", monitorexitNode.object());
 583             args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth());
 584             args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph.method()));



 585 
 586             template(args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args);
 587         }
 588 
 589         public static boolean isTracingEnabledForType(ValueNode object) {
 590             ResolvedJavaType type = StampTool.typeOrNull(object.stamp());
 591             String filter = TraceMonitorsTypeFilter.getValue();
 592             if (filter == null) {
 593                 return false;
 594             } else {
 595                 if (filter.length() == 0) {
 596                     return true;
 597                 }
 598                 if (type == null) {
 599                     return false;
 600                 }
 601                 return (type.getName().contains(filter));
 602             }
 603         }
 604 
 605         public static boolean isTracingEnabledForMethod(ResolvedJavaMethod method) {
 606             String filter = TraceMonitorsMethodFilter.getValue();
 607             if (filter == null) {
 608                 return false;
 609             } else {
 610                 if (filter.length() == 0) {
 611                     return true;
 612                 }
 613                 if (method == null) {
 614                     return false;
 615                 }
 616                 return (method.format("%H.%n").contains(filter));
 617             }
 618         }
 619 
 620         /**
 621          * If balanced monitor checking is enabled then nodes are inserted at the start and all
 622          * return points of the graph to initialize and check the monitor counter respectively.
 623          */
 624         private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) {
 625             if (VERIFY_BALANCED_MONITORS) {
 626                 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class);
 627                 if (nodes.isEmpty()) {
 628                     // Only insert the nodes if this is the first monitorenter being lowered.
 629                     JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass());
 630                     StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
 631                     MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null));
 632                     InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0));
 633                     invoke.setStateAfter(graph.start().stateAfter());
 634                     graph.addAfterFixed(graph.start(), invoke);
 635 
 636                     StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null);
 637                     InliningUtil.inline(invoke, inlineeGraph, false, null, null);
 638 
 639                     List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot();
 640                     for (ReturnNode ret : rets) {
 641                         returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass());
 642                         String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d";
 643                         ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph);
 644                         returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
 645                         callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null));
 646                         invoke = graph.add(new InvokeNode(callTarget, 0));
 647                         Bytecode code = new ResolvedJavaMethodBytecode(graph.method());
 648                         FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false);
 649                         invoke.setStateAfter(graph.add(stateAfter));
 650                         graph.addBeforeFixed(ret, invoke);
 651 
 652                         Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage());
 653                         args.addConst("errMsg", msg);
 654                         inlineeGraph = template(args).copySpecializedGraph();
 655                         InliningUtil.inline(invoke, inlineeGraph, false, null, null);
 656                     }
 657                 }
 658             }
 659         }
 660     }
 661 
 662     public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class);
 663     public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class);
 664 
 665     @NodeIntrinsic(ForeignCallNode.class)
 666     private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
 667 
 668     @NodeIntrinsic(ForeignCallNode.class)
 669     public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
 670 
 671     /**
 672      * Counters for the various paths for acquiring a lock. The counters whose names start with
 673      * {@code "lock"} are mutually exclusive. The other counters are for paths that may be shared.
 674      */
 675     public static final SnippetCounter.Group lockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorEnters") : null;
 676     public static final SnippetCounter lockBiasExisting = new SnippetCounter(lockCounters, "lock{bias:existing}", "bias-locked previously biased object");
 677     public static final SnippetCounter lockBiasAcquired = new SnippetCounter(lockCounters, "lock{bias:acquired}", "bias-locked newly biased object");
 678     public static final SnippetCounter lockBiasTransfer = new SnippetCounter(lockCounters, "lock{bias:transfer}", "bias-locked, biased transferred");
 679     public static final SnippetCounter lockCas = new SnippetCounter(lockCounters, "lock{cas}", "cas-locked an object");
 680     public static final SnippetCounter lockCasRecursive = new SnippetCounter(lockCounters, "lock{cas:recursive}", "cas-locked, recursive");
 681     public static final SnippetCounter lockStubEpochExpired = new SnippetCounter(lockCounters, "lock{stub:epoch-expired}", "stub-locked, epoch expired");
 682     public static final SnippetCounter lockStubRevoke = new SnippetCounter(lockCounters, "lock{stub:revoke}", "stub-locked, biased revoked");
 683     public static final SnippetCounter lockStubFailedCas = new SnippetCounter(lockCounters, "lock{stub:failed-cas}", "stub-locked, failed cas");
 684 
 685     public static final SnippetCounter unbiasable = new SnippetCounter(lockCounters, "unbiasable", "object with unbiasable type");
 686     public static final SnippetCounter revokeBias = new SnippetCounter(lockCounters, "revokeBias", "object had bias revoked");
 687 
 688     /**
 689      * Counters for the various paths for releasing a lock. The counters whose names start with
 690      * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be shared.
 691      */
 692     public static final SnippetCounter.Group unlockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorExits") : null;
 693     public static final SnippetCounter unlockBias = new SnippetCounter(unlockCounters, "unlock{bias}", "bias-unlocked an object");
 694     public static final SnippetCounter unlockCas = new SnippetCounter(unlockCounters, "unlock{cas}", "cas-unlocked an object");
 695     public static final SnippetCounter unlockCasRecursive = new SnippetCounter(unlockCounters, "unlock{cas:recursive}", "cas-unlocked an object, recursive");
 696     public static final SnippetCounter unlockStub = new SnippetCounter(unlockCounters, "unlock{stub}", "stub-unlocked an object");
 697 }


   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 package org.graalvm.compiler.hotspot.replacements;
  24 
  25 import static jdk.vm.ci.code.MemoryBarriers.LOAD_STORE;
  26 import static jdk.vm.ci.code.MemoryBarriers.STORE_STORE;
  27 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG;
  28 import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope;

  29 import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope;
  30 import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError;
  31 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION;
  32 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION;
  33 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_CXQ_LOCATION;
  34 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_ENTRY_LIST_LOCATION;
  35 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_OWNER_LOCATION;
  36 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_RECURSION_LOCATION;
  37 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION;
  38 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace;
  39 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace;
  40 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern;
  41 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.config;
  42 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace;
  43 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject;
  44 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset;
  45 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset;
  46 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.monitorMask;
  47 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorCxqOffset;
  48 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorEntryListOffset;
  49 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorOwnerOffset;
  50 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorRecursionsOffset;
  51 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize;
  52 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset;
  53 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord;
  54 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask;
  55 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking;
  56 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop;
  57 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize;
  58 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors;
  59 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.SimpleFastInflatedLocking;
  60 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter;
  61 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter;
  62 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors;
  63 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FAST_PATH_PROBABILITY;
  64 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY;
  65 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_FREQUENT_PROBABILITY;
  66 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_LIKELY_PROBABILITY;
  67 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY;
  68 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY;

  69 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability;
  70 import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER;
  71 
  72 import java.util.List;
  73 
  74 import org.graalvm.compiler.api.replacements.Fold;
  75 import org.graalvm.compiler.api.replacements.Snippet;
  76 import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter;
  77 import org.graalvm.compiler.bytecode.Bytecode;
  78 import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode;
  79 import org.graalvm.compiler.core.common.LocationIdentity;
  80 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor;
  81 import org.graalvm.compiler.core.common.type.ObjectStamp;
  82 import org.graalvm.compiler.core.common.type.StampFactory;
  83 import org.graalvm.compiler.core.common.type.StampPair;
  84 import org.graalvm.compiler.graph.Node.ConstantNodeParameter;
  85 import org.graalvm.compiler.graph.Node.NodeIntrinsic;
  86 import org.graalvm.compiler.graph.iterators.NodeIterable;
  87 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig;
  88 import org.graalvm.compiler.hotspot.meta.HotSpotProviders;
  89 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider;
  90 import org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode;
  91 import org.graalvm.compiler.hotspot.nodes.CurrentLockNode;

  92 import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode;
  93 import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode;
  94 import org.graalvm.compiler.hotspot.word.KlassPointer;
  95 import org.graalvm.compiler.nodes.BreakpointNode;
  96 import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind;
  97 import org.graalvm.compiler.nodes.ConstantNode;
  98 import org.graalvm.compiler.nodes.DeoptimizeNode;
  99 import org.graalvm.compiler.nodes.FrameState;
 100 import org.graalvm.compiler.nodes.InvokeNode;
 101 import org.graalvm.compiler.nodes.NamedLocationIdentity;
 102 import org.graalvm.compiler.nodes.ReturnNode;
 103 import org.graalvm.compiler.nodes.StructuredGraph;
 104 import org.graalvm.compiler.nodes.ValueNode;
 105 import org.graalvm.compiler.nodes.debug.DynamicCounterNode;

 106 import org.graalvm.compiler.nodes.extended.ForeignCallNode;
 107 import org.graalvm.compiler.nodes.extended.MembarNode;
 108 import org.graalvm.compiler.nodes.java.MethodCallTargetNode;
 109 import org.graalvm.compiler.nodes.java.MonitorExitNode;
 110 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode;

 111 import org.graalvm.compiler.nodes.spi.LoweringTool;
 112 import org.graalvm.compiler.nodes.type.StampTool;
 113 import org.graalvm.compiler.options.OptionValues;
 114 import org.graalvm.compiler.phases.common.inlining.InliningUtil;
 115 import org.graalvm.compiler.replacements.Log;
 116 import org.graalvm.compiler.replacements.SnippetCounter;
 117 import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates;
 118 import org.graalvm.compiler.replacements.SnippetTemplate.Arguments;
 119 import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo;
 120 import org.graalvm.compiler.replacements.Snippets;
 121 import org.graalvm.compiler.word.Pointer;
 122 import org.graalvm.compiler.word.Word;
 123 import org.graalvm.compiler.word.WordBase;
 124 
 125 import jdk.vm.ci.code.BytecodeFrame;
 126 import jdk.vm.ci.code.Register;
 127 import jdk.vm.ci.code.TargetDescription;
 128 import jdk.vm.ci.meta.DeoptimizationAction;
 129 import jdk.vm.ci.meta.DeoptimizationReason;
 130 import jdk.vm.ci.meta.JavaType;

 131 import jdk.vm.ci.meta.ResolvedJavaType;
 132 
 133 /**
 134  * Snippets used for implementing the monitorenter and monitorexit instructions.
 135  *
 136  * The locking algorithm used is described in the paper
 137  * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related
 138  * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David
 139  * Detlefs.
 140  *
 141  * Comment below is reproduced from {@code markOop.hpp} for convenience:
 142  *
 143  * <pre>
 144  *  Bit-format of an object header (most significant first, big endian layout below):
 145  *  32 bits:
 146  *  --------
 147  *             hash:25 ------------>| age:4    biased_lock:1 lock:2 (normal object)
 148  *             JavaThread*:23 epoch:2 age:4    biased_lock:1 lock:2 (biased object)
 149  *             size:32 ------------------------------------------>| (CMS free block)
 150  *             PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object)


 197  *
 198  *  - the two lock bits are used to describe three states: locked/unlocked and monitor.
 199  *
 200  *    [ptr             | 00]  locked             ptr points to real header on stack
 201  *    [header      | 0 | 01]  unlocked           regular object header
 202  *    [ptr             | 10]  monitor            inflated lock (header is wapped out)
 203  *    [ptr             | 11]  marked             used by markSweep to mark an object
 204  *                                               not valid at any other time
 205  *
 206  *    We assume that stack/thread pointers have the lowest two bits cleared.
 207  * </pre>
 208  *
 209  * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned
 210  * appropriately to comply with the layouts above.
 211  */
 212 public class MonitorSnippets implements Snippets {
 213 
 214     private static final boolean PROFILE_CONTEXT = false;
 215 
 216     @Fold
 217     static boolean doProfile(OptionValues options) {
 218         return ProfileMonitors.getValue(options);
 219     }
 220 
 221     @Snippet
 222     public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister,
 223                     @ConstantParameter boolean trace, @ConstantParameter OptionValues options, @ConstantParameter Counters counters) {
 224         verifyOop(object);
 225 
 226         // Load the mark word - this includes a null-check on object
 227         final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
 228 
 229         final Word lock = beginLockScope(lockDepth);
 230 
 231         Pointer objectPointer = Word.objectToTrackedPointer(object);
 232         trace(trace, "           object: 0x%016lx\n", objectPointer);
 233         trace(trace, "             lock: 0x%016lx\n", lock);
 234         trace(trace, "             mark: 0x%016lx\n", mark);
 235 
 236         incCounter(options);
 237 
 238         if (useBiasedLocking(INJECTED_VMCONFIG)) {
 239             if (tryEnterBiased(object, hub, lock, mark, threadRegister, trace, options, counters)) {
 240                 return;
 241             }
 242             // not biased, fall-through
 243         }
 244         if (inlineFastLockSupported(options) && probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) {
 245             // Inflated case
 246             if (tryEnterInflated(object, lock, mark, threadRegister, trace, options, counters)) {
 247                 return;
 248             }
 249         } else {
 250             // Create the unlocked mark word pattern
 251             Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG));
 252             trace(trace, "     unlockedMark: 0x%016lx\n", unlockedMark);
 253 
 254             // Copy this unlocked mark word into the lock slot on the stack
 255             lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION);
 256 
 257             // make sure previous store does not float below compareAndSwap
 258             MembarNode.memoryBarrier(STORE_STORE);
 259 
 260             // Test if the object's mark word is unlocked, and if so, store the
 261             // (address of) the lock slot into the object's mark word.
 262             Word currentMark = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), unlockedMark, lock, MARK_WORD_LOCATION);
 263             if (probability(FAST_PATH_PROBABILITY, currentMark.equal(unlockedMark))) {
 264                 traceObject(trace, "+lock{cas}", object, true, options);
 265                 counters.lockCas.inc();
 266                 AcquiredCASLockNode.mark(object);
 267                 return;
 268             } else {
 269                 trace(trace, "      currentMark: 0x%016lx\n", currentMark);
 270                 // The mark word in the object header was not the same.
 271                 // Either the object is locked by another thread or is already locked
 272                 // by the current thread. The latter is true if the mark word
 273                 // is a stack pointer into the current thread's stack, i.e.:
 274                 //
 275                 // 1) (currentMark & aligned_mask) == 0
 276                 // 2) rsp <= currentMark
 277                 // 3) currentMark <= rsp + page_size
 278                 //
 279                 // These 3 tests can be done by evaluating the following expression:
 280                 //
 281                 // (currentMark - rsp) & (aligned_mask - page_size)
 282                 //
 283                 // assuming both the stack pointer and page_size have their least
 284                 // significant 2 bits cleared and page_size is a power of 2
 285                 final Word alignedMask = Word.unsigned(wordSize() - 1);
 286                 final Word stackPointer = registerAsWord(stackPointerRegister).add(config(INJECTED_VMCONFIG).stackBias);
 287                 if (probability(FAST_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).equal(0))) {
 288                     // Recursively locked => write 0 to the lock slot
 289                     lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), Word.zero(), DISPLACED_MARK_WORD_LOCATION);
 290                     traceObject(trace, "+lock{cas:recursive}", object, true, options);
 291                     counters.lockCasRecursive.inc();
 292                     return;
 293                 }
 294                 traceObject(trace, "+lock{stub:failed-cas/stack}", object, true, options);
 295                 counters.lockStubFailedCas.inc();
 296             }
 297         }
 298         // slow-path runtime-call
 299         monitorenterStubC(MONITORENTER, object, lock);
 300     }
 301 
 302     private static boolean tryEnterBiased(Object object, KlassPointer hub, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) {
 303         // See whether the lock is currently biased toward our thread and
 304         // whether the epoch is still valid.
 305         // Note that the runtime guarantees sufficient alignment of JavaThread
 306         // pointers to allow age to be placed into low bits.
 307         final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG));
 308 
 309         // Check whether the bias pattern is present in the object's mark word
 310         // and the bias owner and the epoch are both still current.
 311         final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION);
 312         final Word thread = registerAsWord(threadRegister);
 313         final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG));
 314         trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord);
 315         trace(trace, "           thread: 0x%016lx\n", thread);
 316         trace(trace, "              tmp: 0x%016lx\n", tmp);
 317         if (probability(FAST_PATH_PROBABILITY, tmp.equal(0))) {
 318             // Object is already biased to current thread -> done
 319             traceObject(trace, "+lock{bias:existing}", object, true, options);
 320             counters.lockBiasExisting.inc();
 321             FastAcquireBiasedLockNode.mark(object);
 322             return true;
 323         }
 324 
 325         // Now check to see whether biasing is enabled for this object
 326         if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.equal(Word.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
 327             Pointer objectPointer = Word.objectToTrackedPointer(object);


 328             // At this point we know that the mark word has the bias pattern and
 329             // that we are not the bias owner in the current epoch. We need to
 330             // figure out more details about the state of the mark word in order to
 331             // know what operations can be legally performed on the object's
 332             // mark word.
 333 
 334             // If the low three bits in the xor result aren't clear, that means
 335             // the prototype header is no longer biasable and we have to revoke
 336             // the bias on this object.
 337             if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
 338                 // Biasing is still enabled for object's type. See whether the
 339                 // epoch of the current bias is still valid, meaning that the epoch
 340                 // bits of the mark word are equal to the epoch bits of the
 341                 // prototype mark word. (Note that the prototype mark word's epoch bits
 342                 // only change at a safepoint.) If not, attempt to rebias the object
 343                 // toward the current thread. Note that we must be absolutely sure
 344                 // that the current epoch is invalid in order to do this because
 345                 // otherwise the manipulations it performs on the mark word are
 346                 // illegal.
 347                 if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) {
 348                     // The epoch of the current bias is still valid but we know nothing
 349                     // about the owner; it might be set or it might be clear. Try to
 350                     // acquire the bias of the object using an atomic operation. If this
 351                     // fails we will go in to the runtime to revoke the object's bias.
 352                     // Note that we first construct the presumed unbiased header so we
 353                     // don't accidentally blow away another thread's valid bias.
 354                     Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG));
 355                     Word biasedMark = unbiasedMark.or(thread);
 356                     trace(trace, "     unbiasedMark: 0x%016lx\n", unbiasedMark);
 357                     trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
 358                     if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), unbiasedMark, biasedMark, MARK_WORD_LOCATION))) {

 359                         // Object is now biased to current thread -> done
 360                         traceObject(trace, "+lock{bias:acquired}", object, true, options);
 361                         counters.lockBiasAcquired.inc();
 362                         return true;
 363                     }
 364                     // If the biasing toward our thread failed, this means that another thread
 365                     // owns the bias and we need to revoke that bias. The revocation will occur
 366                     // in the interpreter runtime.
 367                     traceObject(trace, "+lock{stub:revoke}", object, true, options);
 368                     counters.lockStubRevoke.inc();
 369                 } else {
 370                     // At this point we know the epoch has expired, meaning that the
 371                     // current bias owner, if any, is actually invalid. Under these
 372                     // circumstances _only_, are we allowed to use the current mark word
 373                     // value as the comparison value when doing the CAS to acquire the
 374                     // bias in the current epoch. In other words, we allow transfer of
 375                     // the bias from one thread to another directly in this situation.
 376                     Word biasedMark = prototypeMarkWord.or(thread);
 377                     trace(trace, "       biasedMark: 0x%016lx\n", biasedMark);
 378                     if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, biasedMark, MARK_WORD_LOCATION))) {

 379                         // Object is now biased to current thread -> done
 380                         traceObject(trace, "+lock{bias:transfer}", object, true, options);
 381                         counters.lockBiasTransfer.inc();
 382                         return true;
 383                     }
 384                     // If the biasing toward our thread failed, then another thread
 385                     // succeeded in biasing it toward itself and we need to revoke that
 386                     // bias. The revocation will occur in the runtime in the slow case.
 387                     traceObject(trace, "+lock{stub:epoch-expired}", object, true, options);
 388                     counters.lockStubEpochExpired.inc();
 389                 }
 390                 // slow-path runtime-call
 391                 monitorenterStubC(MONITORENTER, object, lock);
 392                 return true;
 393             } else {
 394                 // The prototype mark word doesn't have the bias bit set any
 395                 // more, indicating that objects of this data type are not supposed
 396                 // to be biased any more. We are going to try to reset the mark of
 397                 // this object to the prototype value and fall through to the
 398                 // CAS-based locking scheme. Note that if our CAS fails, it means
 399                 // that another thread raced us for the privilege of revoking the
 400                 // bias of this particular object, so it's okay to continue in the
 401                 // normal locking code.
 402                 Word result = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, prototypeMarkWord, MARK_WORD_LOCATION);
 403 
 404                 // Fall through to the normal CAS-based lock, because no matter what
 405                 // the result of the above CAS, some thread must have succeeded in
 406                 // removing the bias bit from the object's header.
 407 
 408                 if (ENABLE_BREAKPOINT) {
 409                     bkpt(object, mark, tmp, result);
 410                 }
 411                 counters.revokeBias.inc();
 412                 return false;
 413             }
 414         } else {
 415             // Biasing not enabled -> fall through to lightweight locking
 416             counters.unbiasable.inc();
 417             return false;
 418         }
 419     }
 420 
 421     @Fold
 422     public static boolean useFastInflatedLocking(OptionValues options) {
 423         return SimpleFastInflatedLocking.getValue(options);
 424     }
 425 
 426     private static boolean inlineFastLockSupported(OptionValues options) {
 427         return inlineFastLockSupported(INJECTED_VMCONFIG, options);
 428     }
 429 
 430     private static boolean inlineFastLockSupported(GraalHotSpotVMConfig config, OptionValues options) {
 431         return useFastInflatedLocking(options) && monitorMask(config) >= 0 && objectMonitorOwnerOffset(config) >= 0;
 432     }
 433 
 434     private static boolean tryEnterInflated(Object object, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) {
 435         // write non-zero value to lock slot
 436         lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), lock, DISPLACED_MARK_WORD_LOCATION);
 437         // mark is a pointer to the ObjectMonitor + monitorMask
 438         Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG));
 439         int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG);
 440         Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION);
 441         if (probability(FREQUENT_PROBABILITY, owner.equal(0))) {
 442             // it appears unlocked (owner == 0)
 443             if (probability(FREQUENT_PROBABILITY, monitor.logicCompareAndSwapWord(ownerOffset, owner, registerAsWord(threadRegister), OBJECT_MONITOR_OWNER_LOCATION))) {
 444                 // success
 445                 traceObject(trace, "+lock{inflated:cas}", object, true, options);
 446                 counters.inflatedCas.inc();
 447                 return true;










 448             } else {
 449                 traceObject(trace, "+lock{stub:inflated:failed-cas}", object, true, options);
 450                 counters.inflatedFailedCas.inc();


 451             }
 452         } else {
 453             traceObject(trace, "+lock{stub:inflated:owned}", object, true, options);
 454             counters.inflatedOwned.inc();

 455         }
 456         return false;
 457     }
 458 
 459     /**
 460      * Calls straight out to the monitorenter stub.
 461      */
 462     @Snippet
 463     public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) {
 464         verifyOop(object);
 465         incCounter(options);
 466         if (object == null) {
 467             DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException);
 468         }
 469         // BeginLockScope nodes do not read from object so a use of object
 470         // cannot float about the null check above
 471         final Word lock = beginLockScope(lockDepth);
 472         traceObject(trace, "+lock{stub}", object, true, options);
 473         monitorenterStubC(MONITORENTER, object, lock);
 474     }
 475 
 476     @Snippet
 477     public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter boolean trace,
 478                     @ConstantParameter OptionValues options, @ConstantParameter Counters counters) {
 479         trace(trace, "           object: 0x%016lx\n", Word.objectToTrackedPointer(object));
 480         final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG));
 481         if (useBiasedLocking(INJECTED_VMCONFIG)) {
 482             // Check for biased locking unlock case, which is a no-op
 483             // Note: we do not have to check the thread ID for two reasons.
 484             // First, the interpreter checks for IllegalMonitorStateException at
 485             // a higher level. Second, if the bias was revoked while we held the
 486             // lock, the object could not be rebiased toward another thread, so
 487             // the bias bit would be clear.

 488             trace(trace, "             mark: 0x%016lx\n", mark);
 489             if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(Word.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) {
 490                 endLockScope();
 491                 decCounter(options);
 492                 traceObject(trace, "-lock{bias}", object, false, options);
 493                 counters.unlockBias.inc();
 494                 return;
 495             }
 496         }
 497 
 498         final Word lock = CurrentLockNode.currentLock(lockDepth);
 499 
 500         // Load displaced mark
 501         final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION);
 502         trace(trace, "    displacedMark: 0x%016lx\n", displacedMark);
 503 
 504         if (probability(NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) {
 505             // Recursive locking => done
 506             traceObject(trace, "-lock{recursive}", object, false, options);
 507             counters.unlockCasRecursive.inc();
 508         } else {
 509             if (!tryExitInflated(object, mark, lock, threadRegister, trace, options, counters)) {
 510                 verifyOop(object);
 511                 // Test if object's mark word is pointing to the displaced mark word, and if so,
 512                 // restore
 513                 // the displaced mark in the object - if the object's mark word is not pointing to
 514                 // the displaced mark word, do unlocking via runtime call.
 515                 Pointer objectPointer = Word.objectToTrackedPointer(object);
 516                 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), lock, displacedMark, MARK_WORD_LOCATION))) {
 517                     traceObject(trace, "-lock{cas}", object, false, options);
 518                     counters.unlockCas.inc();
 519                 } else {
 520                     // The object's mark word was not pointing to the displaced header
 521                     traceObject(trace, "-lock{stub}", object, false, options);
 522                     counters.unlockStub.inc();
 523                     monitorexitStubC(MONITOREXIT, object, lock);
 524                 }


 525             }
 526         }
 527         endLockScope();
 528         decCounter(options);
 529     }
 530 
 531     private static boolean inlineFastUnlockSupported(OptionValues options) {
 532         return inlineFastUnlockSupported(INJECTED_VMCONFIG, options);
 533     }
 534 
 535     private static boolean inlineFastUnlockSupported(GraalHotSpotVMConfig config, OptionValues options) {
 536         return useFastInflatedLocking(options) && objectMonitorEntryListOffset(config) >= 0 && objectMonitorCxqOffset(config) >= 0 && monitorMask(config) >= 0 &&
 537                         objectMonitorOwnerOffset(config) >= 0 && objectMonitorRecursionsOffset(config) >= 0;
 538     }
 539 
 540     private static boolean tryExitInflated(Object object, Word mark, Word lock, Register threadRegister, boolean trace, OptionValues options, Counters counters) {
 541         if (!inlineFastUnlockSupported(options)) {
 542             return false;
 543         }
 544         if (probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) {
 545             // Inflated case
 546             // mark is a pointer to the ObjectMonitor + monitorMask
 547             Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG));
 548             int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG);
 549             Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION);
 550             int recursionsOffset = objectMonitorRecursionsOffset(INJECTED_VMCONFIG);
 551             Word recursions = monitor.readWord(recursionsOffset, OBJECT_MONITOR_RECURSION_LOCATION);
 552             Word thread = registerAsWord(threadRegister);
 553             if (probability(FAST_PATH_PROBABILITY, owner.xor(thread).or(recursions).equal(0))) {
 554                 // owner == thread && recursions == 0
 555                 int cxqOffset = objectMonitorCxqOffset(INJECTED_VMCONFIG);
 556                 Word cxq = monitor.readWord(cxqOffset, OBJECT_MONITOR_CXQ_LOCATION);
 557                 int entryListOffset = objectMonitorEntryListOffset(INJECTED_VMCONFIG);
 558                 Word entryList = monitor.readWord(entryListOffset, OBJECT_MONITOR_ENTRY_LIST_LOCATION);
 559                 if (probability(FREQUENT_PROBABILITY, cxq.or(entryList).equal(0))) {
 560                     // cxq == 0 && entryList == 0
 561                     // Nobody is waiting, success
 562                     // release_store
 563                     MembarNode.memoryBarrier(LOAD_STORE | STORE_STORE);
 564                     monitor.writeWord(ownerOffset, Word.zero());
 565                     traceObject(trace, "-lock{inflated:simple}", object, false, options);
 566                     counters.unlockInflatedSimple.inc();
 567                     return true;
 568                 }
 569             }
 570             counters.unlockStubInflated.inc();
 571             traceObject(trace, "-lock{stub:inflated}", object, false, options);
 572             monitorexitStubC(MONITOREXIT, object, lock);
 573             return true;
 574         }
 575         return false;
 576     }
 577 
 578     /**
 579      * Calls straight out to the monitorexit stub.
 580      */
 581     @Snippet
 582     public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) {
 583         verifyOop(object);
 584         traceObject(trace, "-lock{stub}", object, false, options);
 585         final Word lock = CurrentLockNode.currentLock(lockDepth);
 586         monitorexitStubC(MONITOREXIT, object, lock);
 587         endLockScope();
 588         decCounter(options);
 589     }
 590 
 591     public static void traceObject(boolean enabled, String action, Object object, boolean enter, OptionValues options) {
 592         if (doProfile(options)) {
 593             DynamicCounterNode.counter(action, enter ? "number of monitor enters" : "number of monitor exits", 1, PROFILE_CONTEXT);
 594         }
 595         if (enabled) {
 596             Log.print(action);
 597             Log.print(' ');
 598             Log.printlnObject(object);
 599         }
 600     }
 601 
 602     public static void trace(boolean enabled, String format, WordBase value) {
 603         if (enabled) {
 604             Log.printf(format, value.rawValue());
 605         }
 606     }
 607 
 608     /**
 609      * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode}
 610      * intrinsic.
 611      */
 612     private static final boolean ENABLE_BREAKPOINT = false;
 613 
 614     private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter");
 615 
 616     @NodeIntrinsic(BreakpointNode.class)
 617     static native void bkpt(Object object, Word mark, Word tmp, Word value);
 618 
 619     @Fold
 620     static boolean verifyBalancedMonitors(OptionValues options) {
 621         return VerifyBalancedMonitors.getValue(options);
 622     }
 623 
 624     public static void incCounter(OptionValues options) {
 625         if (verifyBalancedMonitors(options)) {
 626             final Word counter = MonitorCounterNode.counter();
 627             final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 628             counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION);
 629         }
 630     }
 631 
 632     public static void decCounter(OptionValues options) {
 633         if (verifyBalancedMonitors(options)) {
 634             final Word counter = MonitorCounterNode.counter();
 635             final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 636             counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION);
 637         }
 638     }
 639 
 640     @Snippet
 641     private static void initCounter() {
 642         final Word counter = MonitorCounterNode.counter();
 643         counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION);
 644     }
 645 
 646     @Snippet
 647     private static void checkCounter(@ConstantParameter String errMsg) {
 648         final Word counter = MonitorCounterNode.counter();
 649         final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION);
 650         if (count != 0) {
 651             vmError(errMsg, count);
 652         }
 653     }
 654 
 655     public static class Counters {
 656         /**
 657          * Counters for the various paths for acquiring a lock. The counters whose names start with
 658          * {@code "lock"} are mutually exclusive. The other counters are for paths that may be
 659          * shared.
 660          */
 661         public final SnippetCounter lockBiasExisting;
 662         public final SnippetCounter lockBiasAcquired;
 663         public final SnippetCounter lockBiasTransfer;
 664         public final SnippetCounter lockCas;
 665         public final SnippetCounter lockCasRecursive;
 666         public final SnippetCounter lockStubEpochExpired;
 667         public final SnippetCounter lockStubRevoke;
 668         public final SnippetCounter lockStubFailedCas;
 669         public final SnippetCounter inflatedCas;
 670         public final SnippetCounter inflatedFailedCas;
 671         public final SnippetCounter inflatedOwned;
 672         public final SnippetCounter unbiasable;
 673         public final SnippetCounter revokeBias;
 674 
 675         /**
 676          * Counters for the various paths for releasing a lock. The counters whose names start with
 677          * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be
 678          * shared.
 679          */
 680         public final SnippetCounter unlockBias;
 681         public final SnippetCounter unlockCas;
 682         public final SnippetCounter unlockCasRecursive;
 683         public final SnippetCounter unlockStub;
 684         public final SnippetCounter unlockStubInflated;
 685         public final SnippetCounter unlockInflatedSimple;
 686 
 687         public Counters(SnippetCounter.Group.Factory factory) {
 688             SnippetCounter.Group enter = factory.createSnippetCounterGroup("MonitorEnters");
 689             SnippetCounter.Group exit = factory.createSnippetCounterGroup("MonitorExits");
 690             lockBiasExisting = new SnippetCounter(enter, "lock{bias:existing}", "bias-locked previously biased object");
 691             lockBiasAcquired = new SnippetCounter(enter, "lock{bias:acquired}", "bias-locked newly biased object");
 692             lockBiasTransfer = new SnippetCounter(enter, "lock{bias:transfer}", "bias-locked, biased transferred");
 693             lockCas = new SnippetCounter(enter, "lock{cas}", "cas-locked an object");
 694             lockCasRecursive = new SnippetCounter(enter, "lock{cas:recursive}", "cas-locked, recursive");
 695             lockStubEpochExpired = new SnippetCounter(enter, "lock{stub:epoch-expired}", "stub-locked, epoch expired");
 696             lockStubRevoke = new SnippetCounter(enter, "lock{stub:revoke}", "stub-locked, biased revoked");
 697             lockStubFailedCas = new SnippetCounter(enter, "lock{stub:failed-cas/stack}", "stub-locked, failed cas and stack locking");
 698             inflatedCas = new SnippetCounter(enter, "lock{inflated:cas}", "heavyweight-locked, cas-locked");
 699             inflatedFailedCas = new SnippetCounter(enter, "lock{inflated:failed-cas}", "heavyweight-locked, failed cas");
 700             inflatedOwned = new SnippetCounter(enter, "lock{inflated:owned}", "heavyweight-locked, already owned");
 701             unbiasable = new SnippetCounter(enter, "unbiasable", "object with unbiasable type");
 702             revokeBias = new SnippetCounter(enter, "revokeBias", "object had bias revoked");
 703 
 704             unlockBias = new SnippetCounter(exit, "unlock{bias}", "bias-unlocked an object");
 705             unlockCas = new SnippetCounter(exit, "unlock{cas}", "cas-unlocked an object");
 706             unlockCasRecursive = new SnippetCounter(exit, "unlock{cas:recursive}", "cas-unlocked an object, recursive");
 707             unlockStub = new SnippetCounter(exit, "unlock{stub}", "stub-unlocked an object");
 708             unlockStubInflated = new SnippetCounter(exit, "unlock{stub:inflated}", "stub-unlocked an object with inflated monitor");
 709             unlockInflatedSimple = new SnippetCounter(exit, "unlock{inflated}", "unlocked an object monitor");
 710         }
 711     }
 712 
 713     public static class Templates extends AbstractTemplates {
 714 
 715         private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter");
 716         private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit");
 717         private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub");
 718         private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub");
 719         private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter");
 720         private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter");
 721 
 722         private final boolean useFastLocking;
 723         public final Counters counters;
 724 
 725         public Templates(OptionValues options, SnippetCounter.Group.Factory factory, HotSpotProviders providers, TargetDescription target, boolean useFastLocking) {
 726             super(options, providers, providers.getSnippetReflection(), target);
 727             this.useFastLocking = useFastLocking;
 728 
 729             this.counters = new Counters(factory);
 730         }
 731 
 732         public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) {
 733             StructuredGraph graph = monitorenterNode.graph();
 734             checkBalancedMonitors(graph, tool);
 735 
 736             assert ((ObjectStamp) monitorenterNode.object().stamp()).nonNull();
 737 
 738             Arguments args;
 739             if (useFastLocking) {
 740                 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage());
 741                 args.add("object", monitorenterNode.object());
 742                 args.add("hub", monitorenterNode.getHub());
 743                 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
 744                 args.addConst("threadRegister", registers.getThreadRegister());
 745                 args.addConst("stackPointerRegister", registers.getStackPointerRegister());
 746                 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph));
 747                 args.addConst("options", graph.getOptions());
 748                 args.addConst("counters", counters);
 749             } else {
 750                 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage());
 751                 args.add("object", monitorenterNode.object());
 752                 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth());
 753                 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph));
 754                 args.addConst("options", graph.getOptions());
 755                 args.addConst("counters", counters);
 756             }
 757 
 758             template(args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args);
 759         }
 760 
 761         public void lower(MonitorExitNode monitorexitNode, HotSpotRegistersProvider registers, LoweringTool tool) {
 762             StructuredGraph graph = monitorexitNode.graph();
 763 
 764             Arguments args;
 765             if (useFastLocking) {
 766                 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage());
 767             } else {
 768                 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage());
 769             }
 770             args.add("object", monitorexitNode.object());
 771             args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth());
 772             args.addConst("threadRegister", registers.getThreadRegister());
 773             args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph));
 774             args.addConst("options", graph.getOptions());
 775             args.addConst("counters", counters);
 776 
 777             template(args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args);
 778         }
 779 
 780         public static boolean isTracingEnabledForType(ValueNode object) {
 781             ResolvedJavaType type = StampTool.typeOrNull(object.stamp());
 782             String filter = TraceMonitorsTypeFilter.getValue(object.getOptions());
 783             if (filter == null) {
 784                 return false;
 785             } else {
 786                 if (filter.length() == 0) {
 787                     return true;
 788                 }
 789                 if (type == null) {
 790                     return false;
 791                 }
 792                 return (type.getName().contains(filter));
 793             }
 794         }
 795 
 796         public static boolean isTracingEnabledForMethod(StructuredGraph graph) {
 797             String filter = TraceMonitorsMethodFilter.getValue(graph.getOptions());
 798             if (filter == null) {
 799                 return false;
 800             } else {
 801                 if (filter.length() == 0) {
 802                     return true;
 803                 }
 804                 if (graph.method() == null) {
 805                     return false;
 806                 }
 807                 return (graph.method().format("%H.%n").contains(filter));
 808             }
 809         }
 810 
 811         /**
 812          * If balanced monitor checking is enabled then nodes are inserted at the start and all
 813          * return points of the graph to initialize and check the monitor counter respectively.
 814          */
 815         private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) {
 816             if (VerifyBalancedMonitors.getValue(options)) {
 817                 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class);
 818                 if (nodes.isEmpty()) {
 819                     // Only insert the nodes if this is the first monitorenter being lowered.
 820                     JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass());
 821                     StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
 822                     MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null));
 823                     InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0));
 824                     invoke.setStateAfter(graph.start().stateAfter());
 825                     graph.addAfterFixed(graph.start(), invoke);
 826 
 827                     StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null);
 828                     InliningUtil.inline(invoke, inlineeGraph, false, null);
 829 
 830                     List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot();
 831                     for (ReturnNode ret : rets) {
 832                         returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass());
 833                         String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d";
 834                         ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph);
 835                         returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false);
 836                         callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null));
 837                         invoke = graph.add(new InvokeNode(callTarget, 0));
 838                         Bytecode code = new ResolvedJavaMethodBytecode(graph.method());
 839                         FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false);
 840                         invoke.setStateAfter(graph.add(stateAfter));
 841                         graph.addBeforeFixed(ret, invoke);
 842 
 843                         Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage());
 844                         args.addConst("errMsg", msg);
 845                         inlineeGraph = template(args).copySpecializedGraph();
 846                         InliningUtil.inline(invoke, inlineeGraph, false, null);
 847                     }
 848                 }
 849             }
 850         }
 851     }
 852 
 853     public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class);
 854     public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class);
 855 
 856     @NodeIntrinsic(ForeignCallNode.class)
 857     private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
 858 
 859     @NodeIntrinsic(ForeignCallNode.class)
 860     public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);



























 861 }
src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot/src/org/graalvm/compiler/hotspot/replacements/MonitorSnippets.java
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File