1 /* 2 * Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.hotspot.replacements; 24 25 import static org.graalvm.compiler.core.common.GraalOptions.SnippetCounters; 26 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG; 27 import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope; 28 import static org.graalvm.compiler.hotspot.nodes.DirectCompareAndSwapNode.compareAndSwap; 29 import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope; 30 import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError; 31 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION; 32 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION; 33 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION; 34 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace; 35 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace; 36 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern; 37 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.config; 38 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace; 39 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject; 40 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset; 41 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset; 42 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize; 43 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset; 44 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord; 45 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask; 46 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking; 47 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop; 48 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize; 49 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors; 50 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter; 51 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter; 52 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors; 53 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY; 54 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY; 55 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_SLOW_PATH_PROBABILITY; 56 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability; 57 import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER; 58 59 import java.util.List; 60 61 import org.graalvm.compiler.api.replacements.Fold; 62 import org.graalvm.compiler.api.replacements.Snippet; 63 import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter; 64 import org.graalvm.compiler.bytecode.Bytecode; 65 import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode; 66 import org.graalvm.compiler.core.common.LocationIdentity; 67 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor; 68 import org.graalvm.compiler.core.common.type.ObjectStamp; 69 import org.graalvm.compiler.core.common.type.StampFactory; 70 import org.graalvm.compiler.core.common.type.StampPair; 71 import org.graalvm.compiler.graph.Node.ConstantNodeParameter; 72 import org.graalvm.compiler.graph.Node.NodeIntrinsic; 73 import org.graalvm.compiler.graph.iterators.NodeIterable; 74 import org.graalvm.compiler.hotspot.meta.HotSpotProviders; 75 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider; 76 import org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode; 77 import org.graalvm.compiler.hotspot.nodes.CurrentLockNode; 78 import org.graalvm.compiler.hotspot.nodes.DirectCompareAndSwapNode; 79 import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode; 80 import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode; 81 import org.graalvm.compiler.hotspot.word.KlassPointer; 82 import org.graalvm.compiler.nodes.BreakpointNode; 83 import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind; 84 import org.graalvm.compiler.nodes.ConstantNode; 85 import org.graalvm.compiler.nodes.DeoptimizeNode; 86 import org.graalvm.compiler.nodes.FrameState; 87 import org.graalvm.compiler.nodes.InvokeNode; 88 import org.graalvm.compiler.nodes.NamedLocationIdentity; 89 import org.graalvm.compiler.nodes.ReturnNode; 90 import org.graalvm.compiler.nodes.StructuredGraph; 91 import org.graalvm.compiler.nodes.ValueNode; 92 import org.graalvm.compiler.nodes.debug.DynamicCounterNode; 93 import org.graalvm.compiler.nodes.extended.BranchProbabilityNode; 94 import org.graalvm.compiler.nodes.extended.ForeignCallNode; 95 import org.graalvm.compiler.nodes.java.MethodCallTargetNode; 96 import org.graalvm.compiler.nodes.java.MonitorExitNode; 97 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode; 98 import org.graalvm.compiler.nodes.memory.address.OffsetAddressNode; 99 import org.graalvm.compiler.nodes.spi.LoweringTool; 100 import org.graalvm.compiler.nodes.type.StampTool; 101 import org.graalvm.compiler.phases.common.inlining.InliningUtil; 102 import org.graalvm.compiler.replacements.Log; 103 import org.graalvm.compiler.replacements.SnippetCounter; 104 import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates; 105 import org.graalvm.compiler.replacements.SnippetTemplate.Arguments; 106 import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo; 107 import org.graalvm.compiler.replacements.Snippets; 108 import org.graalvm.compiler.word.Word; 109 import org.graalvm.compiler.word.WordBase; 110 111 import jdk.vm.ci.code.BytecodeFrame; 112 import jdk.vm.ci.code.Register; 113 import jdk.vm.ci.code.TargetDescription; 114 import jdk.vm.ci.meta.DeoptimizationAction; 115 import jdk.vm.ci.meta.DeoptimizationReason; 116 import jdk.vm.ci.meta.JavaType; 117 import jdk.vm.ci.meta.ResolvedJavaMethod; 118 import jdk.vm.ci.meta.ResolvedJavaType; 119 120 /** 121 * Snippets used for implementing the monitorenter and monitorexit instructions. 122 * 123 * The locking algorithm used is described in the paper 124 * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related 125 * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David 126 * Detlefs. 127 * 128 * Comment below is reproduced from {@code markOop.hpp} for convenience: 129 * 130 * <pre> 131 * Bit-format of an object header (most significant first, big endian layout below): 132 * 32 bits: 133 * -------- 134 * hash:25 ------------>| age:4 biased_lock:1 lock:2 (normal object) 135 * JavaThread*:23 epoch:2 age:4 biased_lock:1 lock:2 (biased object) 136 * size:32 ------------------------------------------>| (CMS free block) 137 * PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object) 138 * 139 * 64 bits: 140 * -------- 141 * unused:25 hash:31 -->| unused:1 age:4 biased_lock:1 lock:2 (normal object) 142 * JavaThread*:54 epoch:2 unused:1 age:4 biased_lock:1 lock:2 (biased object) 143 * PromotedObject*:61 --------------------->| promo_bits:3 ----->| (CMS promoted object) 144 * size:64 ----------------------------------------------------->| (CMS free block) 145 * 146 * unused:25 hash:31 -->| cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && normal object) 147 * JavaThread*:54 epoch:2 cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && biased object) 148 * narrowOop:32 unused:24 cms_free:1 unused:4 promo_bits:3 ----->| (COOPs && CMS promoted object) 149 * unused:21 size:35 -->| cms_free:1 unused:7 ------------------>| (COOPs && CMS free block) 150 * 151 * - hash contains the identity hash value: largest value is 152 * 31 bits, see os::random(). Also, 64-bit vm's require 153 * a hash value no bigger than 32 bits because they will not 154 * properly generate a mask larger than that: see library_call.cpp 155 * and c1_CodePatterns_sparc.cpp. 156 * 157 * - the biased lock pattern is used to bias a lock toward a given 158 * thread. When this pattern is set in the low three bits, the lock 159 * is either biased toward a given thread or "anonymously" biased, 160 * indicating that it is possible for it to be biased. When the 161 * lock is biased toward a given thread, locking and unlocking can 162 * be performed by that thread without using atomic operations. 163 * When a lock's bias is revoked, it reverts back to the normal 164 * locking scheme described below. 165 * 166 * Note that we are overloading the meaning of the "unlocked" state 167 * of the header. Because we steal a bit from the age we can 168 * guarantee that the bias pattern will never be seen for a truly 169 * unlocked object. 170 * 171 * Note also that the biased state contains the age bits normally 172 * contained in the object header. Large increases in scavenge 173 * times were seen when these bits were absent and an arbitrary age 174 * assigned to all biased objects, because they tended to consume a 175 * significant fraction of the eden semispaces and were not 176 * promoted promptly, causing an increase in the amount of copying 177 * performed. The runtime system aligns all JavaThread* pointers to 178 * a very large value (currently 128 bytes (32bVM) or 256 bytes (64bVM)) 179 * to make room for the age bits & the epoch bits (used in support of 180 * biased locking), and for the CMS "freeness" bit in the 64bVM (+COOPs). 181 * 182 * [JavaThread* | epoch | age | 1 | 01] lock is biased toward given thread 183 * [0 | epoch | age | 1 | 01] lock is anonymously biased 184 * 185 * - the two lock bits are used to describe three states: locked/unlocked and monitor. 186 * 187 * [ptr | 00] locked ptr points to real header on stack 188 * [header | 0 | 01] unlocked regular object header 189 * [ptr | 10] monitor inflated lock (header is wapped out) 190 * [ptr | 11] marked used by markSweep to mark an object 191 * not valid at any other time 192 * 193 * We assume that stack/thread pointers have the lowest two bits cleared. 194 * </pre> 195 * 196 * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned 197 * appropriately to comply with the layouts above. 198 */ 199 public class MonitorSnippets implements Snippets { 200 201 private static final boolean PROFILE_CONTEXT = false; 202 203 @Fold 204 static boolean doProfile() { 205 return ProfileMonitors.getValue(); 206 } 207 208 @Snippet 209 public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister, 210 @ConstantParameter boolean trace) { 211 verifyOop(object); 212 213 // Load the mark word - this includes a null-check on object 214 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 215 216 final Word lock = beginLockScope(lockDepth); 217 218 trace(trace, " object: 0x%016lx\n", Word.objectToTrackedPointer(object)); 219 trace(trace, " lock: 0x%016lx\n", lock); 220 trace(trace, " mark: 0x%016lx\n", mark); 221 222 incCounter(); 223 224 if (useBiasedLocking(INJECTED_VMCONFIG)) { 225 // See whether the lock is currently biased toward our thread and 226 // whether the epoch is still valid. 227 // Note that the runtime guarantees sufficient alignment of JavaThread 228 // pointers to allow age to be placed into low bits. 229 final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)); 230 231 // Check whether the bias pattern is present in the object's mark word 232 // and the bias owner and the epoch are both still current. 233 final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION); 234 final Word thread = registerAsWord(threadRegister); 235 final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG)); 236 trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord); 237 trace(trace, " thread: 0x%016lx\n", thread); 238 trace(trace, " tmp: 0x%016lx\n", tmp); 239 if (probability(BranchProbabilityNode.NOT_LIKELY_PROBABILITY, tmp.equal(0))) { 240 // Object is already biased to current thread -> done 241 traceObject(trace, "+lock{bias:existing}", object, true); 242 lockBiasExisting.inc(); 243 FastAcquireBiasedLockNode.mark(object); 244 return; 245 } 246 247 // Now check to see whether biasing is enabled for this object 248 if (probability(BranchProbabilityNode.FAST_PATH_PROBABILITY, biasableLockBits.notEqual(Word.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 249 // Biasing not enabled -> fall through to lightweight locking 250 unbiasable.inc(); 251 } else { 252 // At this point we know that the mark word has the bias pattern and 253 // that we are not the bias owner in the current epoch. We need to 254 // figure out more details about the state of the mark word in order to 255 // know what operations can be legally performed on the object's 256 // mark word. 257 258 // If the low three bits in the xor result aren't clear, that means 259 // the prototype header is no longer biasable and we have to revoke 260 // the bias on this object. 261 if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 262 // Biasing is still enabled for object's type. See whether the 263 // epoch of the current bias is still valid, meaning that the epoch 264 // bits of the mark word are equal to the epoch bits of the 265 // prototype mark word. (Note that the prototype mark word's epoch bits 266 // only change at a safepoint.) If not, attempt to rebias the object 267 // toward the current thread. Note that we must be absolutely sure 268 // that the current epoch is invalid in order to do this because 269 // otherwise the manipulations it performs on the mark word are 270 // illegal. 271 if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 272 // The epoch of the current bias is still valid but we know nothing 273 // about the owner; it might be set or it might be clear. Try to 274 // acquire the bias of the object using an atomic operation. If this 275 // fails we will go in to the runtime to revoke the object's bias. 276 // Note that we first construct the presumed unbiased header so we 277 // don't accidentally blow away another thread's valid bias. 278 Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG)); 279 Word biasedMark = unbiasedMark.or(thread); 280 trace(trace, " unbiasedMark: 0x%016lx\n", unbiasedMark); 281 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 282 if (probability(VERY_FAST_PATH_PROBABILITY, 283 compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), unbiasedMark, biasedMark, MARK_WORD_LOCATION).equal(unbiasedMark))) { 284 // Object is now biased to current thread -> done 285 traceObject(trace, "+lock{bias:acquired}", object, true); 286 lockBiasAcquired.inc(); 287 return; 288 } 289 // If the biasing toward our thread failed, this means that another thread 290 // owns the bias and we need to revoke that bias. The revocation will occur 291 // in the interpreter runtime. 292 traceObject(trace, "+lock{stub:revoke}", object, true); 293 lockStubRevoke.inc(); 294 } else { 295 // At this point we know the epoch has expired, meaning that the 296 // current bias owner, if any, is actually invalid. Under these 297 // circumstances _only_, are we allowed to use the current mark word 298 // value as the comparison value when doing the CAS to acquire the 299 // bias in the current epoch. In other words, we allow transfer of 300 // the bias from one thread to another directly in this situation. 301 Word biasedMark = prototypeMarkWord.or(thread); 302 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 303 if (probability(VERY_FAST_PATH_PROBABILITY, 304 compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), mark, biasedMark, MARK_WORD_LOCATION).equal(mark))) { 305 // Object is now biased to current thread -> done 306 traceObject(trace, "+lock{bias:transfer}", object, true); 307 lockBiasTransfer.inc(); 308 return; 309 } 310 // If the biasing toward our thread failed, then another thread 311 // succeeded in biasing it toward itself and we need to revoke that 312 // bias. The revocation will occur in the runtime in the slow case. 313 traceObject(trace, "+lock{stub:epoch-expired}", object, true); 314 lockStubEpochExpired.inc(); 315 } 316 monitorenterStubC(MONITORENTER, object, lock); 317 return; 318 } else { 319 // The prototype mark word doesn't have the bias bit set any 320 // more, indicating that objects of this data type are not supposed 321 // to be biased any more. We are going to try to reset the mark of 322 // this object to the prototype value and fall through to the 323 // CAS-based locking scheme. Note that if our CAS fails, it means 324 // that another thread raced us for the privilege of revoking the 325 // bias of this particular object, so it's okay to continue in the 326 // normal locking code. 327 Word result = compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), mark, prototypeMarkWord, MARK_WORD_LOCATION); 328 329 // Fall through to the normal CAS-based lock, because no matter what 330 // the result of the above CAS, some thread must have succeeded in 331 // removing the bias bit from the object's header. 332 333 if (ENABLE_BREAKPOINT) { 334 bkpt(object, mark, tmp, result); 335 } 336 revokeBias.inc(); 337 } 338 } 339 } 340 341 // Create the unlocked mark word pattern 342 Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG)); 343 trace(trace, " unlockedMark: 0x%016lx\n", unlockedMark); 344 345 // Copy this unlocked mark word into the lock slot on the stack 346 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION); 347 348 // Test if the object's mark word is unlocked, and if so, store the 349 // (address of) the lock slot into the object's mark word. 350 Word currentMark = compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), unlockedMark, lock, MARK_WORD_LOCATION); 351 if (probability(BranchProbabilityNode.SLOW_PATH_PROBABILITY, currentMark.notEqual(unlockedMark))) { 352 trace(trace, " currentMark: 0x%016lx\n", currentMark); 353 // The mark word in the object header was not the same. 354 // Either the object is locked by another thread or is already locked 355 // by the current thread. The latter is true if the mark word 356 // is a stack pointer into the current thread's stack, i.e.: 357 // 358 // 1) (currentMark & aligned_mask) == 0 359 // 2) rsp <= currentMark 360 // 3) currentMark <= rsp + page_size 361 // 362 // These 3 tests can be done by evaluating the following expression: 363 // 364 // (currentMark - rsp) & (aligned_mask - page_size) 365 // 366 // assuming both the stack pointer and page_size have their least 367 // significant 2 bits cleared and page_size is a power of 2 368 final Word alignedMask = Word.unsigned(wordSize() - 1); 369 final Word stackPointer = registerAsWord(stackPointerRegister).add(config(INJECTED_VMCONFIG).stackBias); 370 if (probability(VERY_SLOW_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).notEqual(0))) { 371 // Most likely not a recursive lock, go into a slow runtime call 372 traceObject(trace, "+lock{stub:failed-cas}", object, true); 373 lockStubFailedCas.inc(); 374 monitorenterStubC(MONITORENTER, object, lock); 375 return; 376 } else { 377 // Recursively locked => write 0 to the lock slot 378 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), Word.zero(), DISPLACED_MARK_WORD_LOCATION); 379 traceObject(trace, "+lock{cas:recursive}", object, true); 380 lockCasRecursive.inc(); 381 } 382 } else { 383 traceObject(trace, "+lock{cas}", object, true); 384 lockCas.inc(); 385 AcquiredCASLockNode.mark(object); 386 } 387 } 388 389 /** 390 * Calls straight out to the monitorenter stub. 391 */ 392 @Snippet 393 public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 394 verifyOop(object); 395 incCounter(); 396 if (object == null) { 397 DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException); 398 } 399 // BeginLockScope nodes do not read from object so a use of object 400 // cannot float about the null check above 401 final Word lock = beginLockScope(lockDepth); 402 traceObject(trace, "+lock{stub}", object, true); 403 monitorenterStubC(MONITORENTER, object, lock); 404 } 405 406 @Snippet 407 public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 408 trace(trace, " object: 0x%016lx\n", Word.objectToTrackedPointer(object)); 409 if (useBiasedLocking(INJECTED_VMCONFIG)) { 410 // Check for biased locking unlock case, which is a no-op 411 // Note: we do not have to check the thread ID for two reasons. 412 // First, the interpreter checks for IllegalMonitorStateException at 413 // a higher level. Second, if the bias was revoked while we held the 414 // lock, the object could not be rebiased toward another thread, so 415 // the bias bit would be clear. 416 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 417 trace(trace, " mark: 0x%016lx\n", mark); 418 if (probability(BranchProbabilityNode.NOT_LIKELY_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(Word.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 419 endLockScope(); 420 decCounter(); 421 traceObject(trace, "-lock{bias}", object, false); 422 unlockBias.inc(); 423 return; 424 } 425 } 426 427 final Word lock = CurrentLockNode.currentLock(lockDepth); 428 429 // Load displaced mark 430 final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION); 431 trace(trace, " displacedMark: 0x%016lx\n", displacedMark); 432 433 if (probability(BranchProbabilityNode.NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) { 434 // Recursive locking => done 435 traceObject(trace, "-lock{recursive}", object, false); 436 unlockCasRecursive.inc(); 437 } else { 438 verifyOop(object); 439 // Test if object's mark word is pointing to the displaced mark word, and if so, restore 440 // the displaced mark in the object - if the object's mark word is not pointing to 441 // the displaced mark word, do unlocking via runtime call. 442 if (probability(VERY_SLOW_PATH_PROBABILITY, 443 DirectCompareAndSwapNode.compareAndSwap(OffsetAddressNode.address(object, markOffset(INJECTED_VMCONFIG)), lock, displacedMark, MARK_WORD_LOCATION).notEqual(lock))) { 444 // The object's mark word was not pointing to the displaced header, 445 // we do unlocking via runtime call. 446 traceObject(trace, "-lock{stub}", object, false); 447 unlockStub.inc(); 448 monitorexitStubC(MONITOREXIT, object, lock); 449 } else { 450 traceObject(trace, "-lock{cas}", object, false); 451 unlockCas.inc(); 452 } 453 } 454 endLockScope(); 455 decCounter(); 456 } 457 458 /** 459 * Calls straight out to the monitorexit stub. 460 */ 461 @Snippet 462 public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 463 verifyOop(object); 464 traceObject(trace, "-lock{stub}", object, false); 465 final Word lock = CurrentLockNode.currentLock(lockDepth); 466 monitorexitStubC(MONITOREXIT, object, lock); 467 endLockScope(); 468 decCounter(); 469 } 470 471 public static void traceObject(boolean enabled, String action, Object object, boolean enter) { 472 if (doProfile()) { 473 DynamicCounterNode.counter(action, enter ? "number of monitor enters" : "number of monitor exits", 1, PROFILE_CONTEXT); 474 } 475 if (enabled) { 476 Log.print(action); 477 Log.print(' '); 478 Log.printlnObject(object); 479 } 480 } 481 482 public static void trace(boolean enabled, String format, WordBase value) { 483 if (enabled) { 484 Log.printf(format, value.rawValue()); 485 } 486 } 487 488 /** 489 * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode} 490 * intrinsic. 491 */ 492 private static final boolean ENABLE_BREAKPOINT = false; 493 494 private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter"); 495 496 @NodeIntrinsic(BreakpointNode.class) 497 static native void bkpt(Object object, Word mark, Word tmp, Word value); 498 499 private static final boolean VERIFY_BALANCED_MONITORS = VerifyBalancedMonitors.getValue(); 500 501 public static void incCounter() { 502 if (VERIFY_BALANCED_MONITORS) { 503 final Word counter = MonitorCounterNode.counter(); 504 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 505 counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION); 506 } 507 } 508 509 public static void decCounter() { 510 if (VERIFY_BALANCED_MONITORS) { 511 final Word counter = MonitorCounterNode.counter(); 512 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 513 counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION); 514 } 515 } 516 517 @Snippet 518 private static void initCounter() { 519 final Word counter = MonitorCounterNode.counter(); 520 counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION); 521 } 522 523 @Snippet 524 private static void checkCounter(@ConstantParameter String errMsg) { 525 final Word counter = MonitorCounterNode.counter(); 526 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 527 if (count != 0) { 528 vmError(errMsg, count); 529 } 530 } 531 532 public static class Templates extends AbstractTemplates { 533 534 private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter"); 535 private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit"); 536 private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub"); 537 private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub"); 538 private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter"); 539 private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter"); 540 541 private final boolean useFastLocking; 542 543 public Templates(HotSpotProviders providers, TargetDescription target, boolean useFastLocking) { 544 super(providers, providers.getSnippetReflection(), target); 545 this.useFastLocking = useFastLocking; 546 } 547 548 public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) { 549 StructuredGraph graph = monitorenterNode.graph(); 550 checkBalancedMonitors(graph, tool); 551 552 assert ((ObjectStamp) monitorenterNode.object().stamp()).nonNull(); 553 554 Arguments args; 555 if (useFastLocking) { 556 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage()); 557 args.add("object", monitorenterNode.object()); 558 args.add("hub", monitorenterNode.getHub()); 559 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 560 args.addConst("threadRegister", registers.getThreadRegister()); 561 args.addConst("stackPointerRegister", registers.getStackPointerRegister()); 562 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph.method())); 563 } else { 564 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage()); 565 args.add("object", monitorenterNode.object()); 566 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 567 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph.method())); 568 } 569 570 template(args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args); 571 } 572 573 public void lower(MonitorExitNode monitorexitNode, LoweringTool tool) { 574 StructuredGraph graph = monitorexitNode.graph(); 575 576 Arguments args; 577 if (useFastLocking) { 578 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage()); 579 } else { 580 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage()); 581 } 582 args.add("object", monitorexitNode.object()); 583 args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth()); 584 args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph.method())); 585 586 template(args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args); 587 } 588 589 public static boolean isTracingEnabledForType(ValueNode object) { 590 ResolvedJavaType type = StampTool.typeOrNull(object.stamp()); 591 String filter = TraceMonitorsTypeFilter.getValue(); 592 if (filter == null) { 593 return false; 594 } else { 595 if (filter.length() == 0) { 596 return true; 597 } 598 if (type == null) { 599 return false; 600 } 601 return (type.getName().contains(filter)); 602 } 603 } 604 605 public static boolean isTracingEnabledForMethod(ResolvedJavaMethod method) { 606 String filter = TraceMonitorsMethodFilter.getValue(); 607 if (filter == null) { 608 return false; 609 } else { 610 if (filter.length() == 0) { 611 return true; 612 } 613 if (method == null) { 614 return false; 615 } 616 return (method.format("%H.%n").contains(filter)); 617 } 618 } 619 620 /** 621 * If balanced monitor checking is enabled then nodes are inserted at the start and all 622 * return points of the graph to initialize and check the monitor counter respectively. 623 */ 624 private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) { 625 if (VERIFY_BALANCED_MONITORS) { 626 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class); 627 if (nodes.isEmpty()) { 628 // Only insert the nodes if this is the first monitorenter being lowered. 629 JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass()); 630 StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 631 MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null)); 632 InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0)); 633 invoke.setStateAfter(graph.start().stateAfter()); 634 graph.addAfterFixed(graph.start(), invoke); 635 636 StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null); 637 InliningUtil.inline(invoke, inlineeGraph, false, null, null); 638 639 List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot(); 640 for (ReturnNode ret : rets) { 641 returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass()); 642 String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d"; 643 ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph); 644 returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 645 callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null)); 646 invoke = graph.add(new InvokeNode(callTarget, 0)); 647 Bytecode code = new ResolvedJavaMethodBytecode(graph.method()); 648 FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false); 649 invoke.setStateAfter(graph.add(stateAfter)); 650 graph.addBeforeFixed(ret, invoke); 651 652 Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage()); 653 args.addConst("errMsg", msg); 654 inlineeGraph = template(args).copySpecializedGraph(); 655 InliningUtil.inline(invoke, inlineeGraph, false, null, null); 656 } 657 } 658 } 659 } 660 } 661 662 public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class); 663 public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class); 664 665 @NodeIntrinsic(ForeignCallNode.class) 666 private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 667 668 @NodeIntrinsic(ForeignCallNode.class) 669 public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 670 671 /** 672 * Counters for the various paths for acquiring a lock. The counters whose names start with 673 * {@code "lock"} are mutually exclusive. The other counters are for paths that may be shared. 674 */ 675 public static final SnippetCounter.Group lockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorEnters") : null; 676 public static final SnippetCounter lockBiasExisting = new SnippetCounter(lockCounters, "lock{bias:existing}", "bias-locked previously biased object"); 677 public static final SnippetCounter lockBiasAcquired = new SnippetCounter(lockCounters, "lock{bias:acquired}", "bias-locked newly biased object"); 678 public static final SnippetCounter lockBiasTransfer = new SnippetCounter(lockCounters, "lock{bias:transfer}", "bias-locked, biased transferred"); 679 public static final SnippetCounter lockCas = new SnippetCounter(lockCounters, "lock{cas}", "cas-locked an object"); 680 public static final SnippetCounter lockCasRecursive = new SnippetCounter(lockCounters, "lock{cas:recursive}", "cas-locked, recursive"); 681 public static final SnippetCounter lockStubEpochExpired = new SnippetCounter(lockCounters, "lock{stub:epoch-expired}", "stub-locked, epoch expired"); 682 public static final SnippetCounter lockStubRevoke = new SnippetCounter(lockCounters, "lock{stub:revoke}", "stub-locked, biased revoked"); 683 public static final SnippetCounter lockStubFailedCas = new SnippetCounter(lockCounters, "lock{stub:failed-cas}", "stub-locked, failed cas"); 684 685 public static final SnippetCounter unbiasable = new SnippetCounter(lockCounters, "unbiasable", "object with unbiasable type"); 686 public static final SnippetCounter revokeBias = new SnippetCounter(lockCounters, "revokeBias", "object had bias revoked"); 687 688 /** 689 * Counters for the various paths for releasing a lock. The counters whose names start with 690 * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be shared. 691 */ 692 public static final SnippetCounter.Group unlockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorExits") : null; 693 public static final SnippetCounter unlockBias = new SnippetCounter(unlockCounters, "unlock{bias}", "bias-unlocked an object"); 694 public static final SnippetCounter unlockCas = new SnippetCounter(unlockCounters, "unlock{cas}", "cas-unlocked an object"); 695 public static final SnippetCounter unlockCasRecursive = new SnippetCounter(unlockCounters, "unlock{cas:recursive}", "cas-unlocked an object, recursive"); 696 public static final SnippetCounter unlockStub = new SnippetCounter(unlockCounters, "unlock{stub}", "stub-unlocked an object"); 697 }