1 /* 2 * Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 package org.graalvm.compiler.hotspot.replacements; 24 25 import static jdk.vm.ci.code.MemoryBarriers.LOAD_STORE; 26 import static jdk.vm.ci.code.MemoryBarriers.STORE_STORE; 27 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG; 28 import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope; 29 import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope; 30 import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError; 31 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION; 32 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION; 33 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_CXQ_LOCATION; 34 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_ENTRY_LIST_LOCATION; 35 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_OWNER_LOCATION; 36 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_RECURSION_LOCATION; 37 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION; 38 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace; 39 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace; 40 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern; 41 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.config; 42 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace; 43 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject; 44 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset; 45 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset; 46 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.monitorMask; 47 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorCxqOffset; 48 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorEntryListOffset; 49 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorOwnerOffset; 50 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorRecursionsOffset; 51 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize; 52 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset; 53 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord; 54 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask; 55 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking; 56 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop; 57 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize; 58 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors; 59 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.SimpleFastInflatedLocking; 60 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter; 61 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter; 62 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors; 63 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FAST_PATH_PROBABILITY; 64 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY; 65 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_FREQUENT_PROBABILITY; 66 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_LIKELY_PROBABILITY; 67 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY; 68 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY; 69 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability; 70 import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER; 71 72 import java.util.List; 73 74 import org.graalvm.compiler.api.replacements.Fold; 75 import org.graalvm.compiler.api.replacements.Snippet; 76 import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter; 77 import org.graalvm.compiler.bytecode.Bytecode; 78 import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode; 79 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor; 80 import org.graalvm.compiler.core.common.type.ObjectStamp; 81 import org.graalvm.compiler.core.common.type.StampFactory; 82 import org.graalvm.compiler.core.common.type.StampPair; 83 import org.graalvm.compiler.graph.Node.ConstantNodeParameter; 84 import org.graalvm.compiler.graph.Node.NodeIntrinsic; 85 import org.graalvm.compiler.graph.iterators.NodeIterable; 86 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig; 87 import org.graalvm.compiler.hotspot.meta.HotSpotProviders; 88 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider; 89 import org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode; 90 import org.graalvm.compiler.hotspot.nodes.CurrentLockNode; 91 import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode; 92 import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode; 93 import org.graalvm.compiler.hotspot.word.KlassPointer; 94 import org.graalvm.compiler.nodes.BreakpointNode; 95 import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind; 96 import org.graalvm.compiler.nodes.ConstantNode; 97 import org.graalvm.compiler.nodes.DeoptimizeNode; 98 import org.graalvm.compiler.nodes.FrameState; 99 import org.graalvm.compiler.nodes.InvokeNode; 100 import org.graalvm.compiler.nodes.NamedLocationIdentity; 101 import org.graalvm.compiler.nodes.ReturnNode; 102 import org.graalvm.compiler.nodes.StructuredGraph; 103 import org.graalvm.compiler.nodes.ValueNode; 104 import org.graalvm.compiler.nodes.debug.DynamicCounterNode; 105 import org.graalvm.compiler.nodes.extended.ForeignCallNode; 106 import org.graalvm.compiler.nodes.extended.MembarNode; 107 import org.graalvm.compiler.nodes.java.MethodCallTargetNode; 108 import org.graalvm.compiler.nodes.java.MonitorExitNode; 109 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode; 110 import org.graalvm.compiler.nodes.spi.LoweringTool; 111 import org.graalvm.compiler.nodes.type.StampTool; 112 import org.graalvm.compiler.options.OptionValues; 113 import org.graalvm.compiler.phases.common.inlining.InliningUtil; 114 import org.graalvm.compiler.replacements.Log; 115 import org.graalvm.compiler.replacements.SnippetCounter; 116 import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates; 117 import org.graalvm.compiler.replacements.SnippetTemplate.Arguments; 118 import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo; 119 import org.graalvm.compiler.replacements.Snippets; 120 import org.graalvm.compiler.word.Word; 121 import org.graalvm.word.LocationIdentity; 122 import org.graalvm.word.Pointer; 123 import org.graalvm.word.WordBase; 124 import org.graalvm.word.WordFactory; 125 126 import jdk.vm.ci.code.BytecodeFrame; 127 import jdk.vm.ci.code.Register; 128 import jdk.vm.ci.code.TargetDescription; 129 import jdk.vm.ci.meta.DeoptimizationAction; 130 import jdk.vm.ci.meta.DeoptimizationReason; 131 import jdk.vm.ci.meta.JavaType; 132 import jdk.vm.ci.meta.ResolvedJavaType; 133 134 /** 135 * Snippets used for implementing the monitorenter and monitorexit instructions. 136 * 137 * The locking algorithm used is described in the paper 138 * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related 139 * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David 140 * Detlefs. 141 * 142 * Comment below is reproduced from {@code markOop.hpp} for convenience: 143 * 144 * <pre> 145 * Bit-format of an object header (most significant first, big endian layout below): 146 * 32 bits: 147 * -------- 148 * hash:25 ------------>| age:4 biased_lock:1 lock:2 (normal object) 149 * JavaThread*:23 epoch:2 age:4 biased_lock:1 lock:2 (biased object) 150 * size:32 ------------------------------------------>| (CMS free block) 151 * PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object) 152 * 153 * 64 bits: 154 * -------- 155 * unused:25 hash:31 -->| unused:1 age:4 biased_lock:1 lock:2 (normal object) 156 * JavaThread*:54 epoch:2 unused:1 age:4 biased_lock:1 lock:2 (biased object) 157 * PromotedObject*:61 --------------------->| promo_bits:3 ----->| (CMS promoted object) 158 * size:64 ----------------------------------------------------->| (CMS free block) 159 * 160 * unused:25 hash:31 -->| cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && normal object) 161 * JavaThread*:54 epoch:2 cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && biased object) 162 * narrowOop:32 unused:24 cms_free:1 unused:4 promo_bits:3 ----->| (COOPs && CMS promoted object) 163 * unused:21 size:35 -->| cms_free:1 unused:7 ------------------>| (COOPs && CMS free block) 164 * 165 * - hash contains the identity hash value: largest value is 166 * 31 bits, see os::random(). Also, 64-bit vm's require 167 * a hash value no bigger than 32 bits because they will not 168 * properly generate a mask larger than that: see library_call.cpp 169 * and c1_CodePatterns_sparc.cpp. 170 * 171 * - the biased lock pattern is used to bias a lock toward a given 172 * thread. When this pattern is set in the low three bits, the lock 173 * is either biased toward a given thread or "anonymously" biased, 174 * indicating that it is possible for it to be biased. When the 175 * lock is biased toward a given thread, locking and unlocking can 176 * be performed by that thread without using atomic operations. 177 * When a lock's bias is revoked, it reverts back to the normal 178 * locking scheme described below. 179 * 180 * Note that we are overloading the meaning of the "unlocked" state 181 * of the header. Because we steal a bit from the age we can 182 * guarantee that the bias pattern will never be seen for a truly 183 * unlocked object. 184 * 185 * Note also that the biased state contains the age bits normally 186 * contained in the object header. Large increases in scavenge 187 * times were seen when these bits were absent and an arbitrary age 188 * assigned to all biased objects, because they tended to consume a 189 * significant fraction of the eden semispaces and were not 190 * promoted promptly, causing an increase in the amount of copying 191 * performed. The runtime system aligns all JavaThread* pointers to 192 * a very large value (currently 128 bytes (32bVM) or 256 bytes (64bVM)) 193 * to make room for the age bits & the epoch bits (used in support of 194 * biased locking), and for the CMS "freeness" bit in the 64bVM (+COOPs). 195 * 196 * [JavaThread* | epoch | age | 1 | 01] lock is biased toward given thread 197 * [0 | epoch | age | 1 | 01] lock is anonymously biased 198 * 199 * - the two lock bits are used to describe three states: locked/unlocked and monitor. 200 * 201 * [ptr | 00] locked ptr points to real header on stack 202 * [header | 0 | 01] unlocked regular object header 203 * [ptr | 10] monitor inflated lock (header is wapped out) 204 * [ptr | 11] marked used by markSweep to mark an object 205 * not valid at any other time 206 * 207 * We assume that stack/thread pointers have the lowest two bits cleared. 208 * </pre> 209 * 210 * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned 211 * appropriately to comply with the layouts above. 212 */ 213 public class MonitorSnippets implements Snippets { 214 215 private static final boolean PROFILE_CONTEXT = false; 216 217 @Fold 218 static boolean doProfile(OptionValues options) { 219 return ProfileMonitors.getValue(options); 220 } 221 222 @Snippet 223 public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister, 224 @ConstantParameter boolean trace, @ConstantParameter OptionValues options, @ConstantParameter Counters counters) { 225 verifyOop(object); 226 227 // Load the mark word - this includes a null-check on object 228 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 229 230 final Word lock = beginLockScope(lockDepth); 231 232 Pointer objectPointer = Word.objectToTrackedPointer(object); 233 trace(trace, " object: 0x%016lx\n", objectPointer); 234 trace(trace, " lock: 0x%016lx\n", lock); 235 trace(trace, " mark: 0x%016lx\n", mark); 236 237 incCounter(options); 238 239 if (useBiasedLocking(INJECTED_VMCONFIG)) { 240 if (tryEnterBiased(object, hub, lock, mark, threadRegister, trace, options, counters)) { 241 return; 242 } 243 // not biased, fall-through 244 } 245 if (inlineFastLockSupported(options) && probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) { 246 // Inflated case 247 if (tryEnterInflated(object, lock, mark, threadRegister, trace, options, counters)) { 248 return; 249 } 250 } else { 251 // Create the unlocked mark word pattern 252 Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG)); 253 trace(trace, " unlockedMark: 0x%016lx\n", unlockedMark); 254 255 // Copy this unlocked mark word into the lock slot on the stack 256 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION); 257 258 // make sure previous store does not float below compareAndSwap 259 MembarNode.memoryBarrier(STORE_STORE); 260 261 // Test if the object's mark word is unlocked, and if so, store the 262 // (address of) the lock slot into the object's mark word. 263 Word currentMark = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), unlockedMark, lock, MARK_WORD_LOCATION); 264 if (probability(FAST_PATH_PROBABILITY, currentMark.equal(unlockedMark))) { 265 traceObject(trace, "+lock{cas}", object, true, options); 266 counters.lockCas.inc(); 267 AcquiredCASLockNode.mark(object); 268 return; 269 } else { 270 trace(trace, " currentMark: 0x%016lx\n", currentMark); 271 // The mark word in the object header was not the same. 272 // Either the object is locked by another thread or is already locked 273 // by the current thread. The latter is true if the mark word 274 // is a stack pointer into the current thread's stack, i.e.: 275 // 276 // 1) (currentMark & aligned_mask) == 0 277 // 2) rsp <= currentMark 278 // 3) currentMark <= rsp + page_size 279 // 280 // These 3 tests can be done by evaluating the following expression: 281 // 282 // (currentMark - rsp) & (aligned_mask - page_size) 283 // 284 // assuming both the stack pointer and page_size have their least 285 // significant 2 bits cleared and page_size is a power of 2 286 final Word alignedMask = WordFactory.unsigned(wordSize() - 1); 287 final Word stackPointer = registerAsWord(stackPointerRegister).add(config(INJECTED_VMCONFIG).stackBias); 288 if (probability(FAST_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).equal(0))) { 289 // Recursively locked => write 0 to the lock slot 290 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), WordFactory.zero(), DISPLACED_MARK_WORD_LOCATION); 291 traceObject(trace, "+lock{cas:recursive}", object, true, options); 292 counters.lockCasRecursive.inc(); 293 return; 294 } 295 traceObject(trace, "+lock{stub:failed-cas/stack}", object, true, options); 296 counters.lockStubFailedCas.inc(); 297 } 298 } 299 // slow-path runtime-call 300 monitorenterStubC(MONITORENTER, object, lock); 301 } 302 303 private static boolean tryEnterBiased(Object object, KlassPointer hub, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) { 304 // See whether the lock is currently biased toward our thread and 305 // whether the epoch is still valid. 306 // Note that the runtime guarantees sufficient alignment of JavaThread 307 // pointers to allow age to be placed into low bits. 308 final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)); 309 310 // Check whether the bias pattern is present in the object's mark word 311 // and the bias owner and the epoch are both still current. 312 final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION); 313 final Word thread = registerAsWord(threadRegister); 314 final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG)); 315 trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord); 316 trace(trace, " thread: 0x%016lx\n", thread); 317 trace(trace, " tmp: 0x%016lx\n", tmp); 318 if (probability(FAST_PATH_PROBABILITY, tmp.equal(0))) { 319 // Object is already biased to current thread -> done 320 traceObject(trace, "+lock{bias:existing}", object, true, options); 321 counters.lockBiasExisting.inc(); 322 FastAcquireBiasedLockNode.mark(object); 323 return true; 324 } 325 326 // Now check to see whether biasing is enabled for this object 327 if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 328 Pointer objectPointer = Word.objectToTrackedPointer(object); 329 // At this point we know that the mark word has the bias pattern and 330 // that we are not the bias owner in the current epoch. We need to 331 // figure out more details about the state of the mark word in order to 332 // know what operations can be legally performed on the object's 333 // mark word. 334 335 // If the low three bits in the xor result aren't clear, that means 336 // the prototype header is no longer biasable and we have to revoke 337 // the bias on this object. 338 if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 339 // Biasing is still enabled for object's type. See whether the 340 // epoch of the current bias is still valid, meaning that the epoch 341 // bits of the mark word are equal to the epoch bits of the 342 // prototype mark word. (Note that the prototype mark word's epoch bits 343 // only change at a safepoint.) If not, attempt to rebias the object 344 // toward the current thread. Note that we must be absolutely sure 345 // that the current epoch is invalid in order to do this because 346 // otherwise the manipulations it performs on the mark word are 347 // illegal. 348 if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 349 // The epoch of the current bias is still valid but we know nothing 350 // about the owner; it might be set or it might be clear. Try to 351 // acquire the bias of the object using an atomic operation. If this 352 // fails we will go in to the runtime to revoke the object's bias. 353 // Note that we first construct the presumed unbiased header so we 354 // don't accidentally blow away another thread's valid bias. 355 Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG)); 356 Word biasedMark = unbiasedMark.or(thread); 357 trace(trace, " unbiasedMark: 0x%016lx\n", unbiasedMark); 358 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 359 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), unbiasedMark, biasedMark, MARK_WORD_LOCATION))) { 360 // Object is now biased to current thread -> done 361 traceObject(trace, "+lock{bias:acquired}", object, true, options); 362 counters.lockBiasAcquired.inc(); 363 return true; 364 } 365 // If the biasing toward our thread failed, this means that another thread 366 // owns the bias and we need to revoke that bias. The revocation will occur 367 // in the interpreter runtime. 368 traceObject(trace, "+lock{stub:revoke}", object, true, options); 369 counters.lockStubRevoke.inc(); 370 } else { 371 // At this point we know the epoch has expired, meaning that the 372 // current bias owner, if any, is actually invalid. Under these 373 // circumstances _only_, are we allowed to use the current mark word 374 // value as the comparison value when doing the CAS to acquire the 375 // bias in the current epoch. In other words, we allow transfer of 376 // the bias from one thread to another directly in this situation. 377 Word biasedMark = prototypeMarkWord.or(thread); 378 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 379 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, biasedMark, MARK_WORD_LOCATION))) { 380 // Object is now biased to current thread -> done 381 traceObject(trace, "+lock{bias:transfer}", object, true, options); 382 counters.lockBiasTransfer.inc(); 383 return true; 384 } 385 // If the biasing toward our thread failed, then another thread 386 // succeeded in biasing it toward itself and we need to revoke that 387 // bias. The revocation will occur in the runtime in the slow case. 388 traceObject(trace, "+lock{stub:epoch-expired}", object, true, options); 389 counters.lockStubEpochExpired.inc(); 390 } 391 // slow-path runtime-call 392 monitorenterStubC(MONITORENTER, object, lock); 393 return true; 394 } else { 395 // The prototype mark word doesn't have the bias bit set any 396 // more, indicating that objects of this data type are not supposed 397 // to be biased any more. We are going to try to reset the mark of 398 // this object to the prototype value and fall through to the 399 // CAS-based locking scheme. Note that if our CAS fails, it means 400 // that another thread raced us for the privilege of revoking the 401 // bias of this particular object, so it's okay to continue in the 402 // normal locking code. 403 Word result = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, prototypeMarkWord, MARK_WORD_LOCATION); 404 405 // Fall through to the normal CAS-based lock, because no matter what 406 // the result of the above CAS, some thread must have succeeded in 407 // removing the bias bit from the object's header. 408 409 if (ENABLE_BREAKPOINT) { 410 bkpt(object, mark, tmp, result); 411 } 412 counters.revokeBias.inc(); 413 return false; 414 } 415 } else { 416 // Biasing not enabled -> fall through to lightweight locking 417 counters.unbiasable.inc(); 418 return false; 419 } 420 } 421 422 @Fold 423 public static boolean useFastInflatedLocking(OptionValues options) { 424 return SimpleFastInflatedLocking.getValue(options); 425 } 426 427 private static boolean inlineFastLockSupported(OptionValues options) { 428 return inlineFastLockSupported(INJECTED_VMCONFIG, options); 429 } 430 431 private static boolean inlineFastLockSupported(GraalHotSpotVMConfig config, OptionValues options) { 432 return useFastInflatedLocking(options) && monitorMask(config) >= 0 && objectMonitorOwnerOffset(config) >= 0; 433 } 434 435 private static boolean tryEnterInflated(Object object, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) { 436 // write non-zero value to lock slot 437 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), lock, DISPLACED_MARK_WORD_LOCATION); 438 // mark is a pointer to the ObjectMonitor + monitorMask 439 Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG)); 440 int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG); 441 Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION); 442 if (probability(FREQUENT_PROBABILITY, owner.equal(0))) { 443 // it appears unlocked (owner == 0) 444 if (probability(FREQUENT_PROBABILITY, monitor.logicCompareAndSwapWord(ownerOffset, owner, registerAsWord(threadRegister), OBJECT_MONITOR_OWNER_LOCATION))) { 445 // success 446 traceObject(trace, "+lock{inflated:cas}", object, true, options); 447 counters.inflatedCas.inc(); 448 return true; 449 } else { 450 traceObject(trace, "+lock{stub:inflated:failed-cas}", object, true, options); 451 counters.inflatedFailedCas.inc(); 452 } 453 } else { 454 traceObject(trace, "+lock{stub:inflated:owned}", object, true, options); 455 counters.inflatedOwned.inc(); 456 } 457 return false; 458 } 459 460 /** 461 * Calls straight out to the monitorenter stub. 462 */ 463 @Snippet 464 public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) { 465 verifyOop(object); 466 incCounter(options); 467 if (object == null) { 468 DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException); 469 } 470 // BeginLockScope nodes do not read from object so a use of object 471 // cannot float about the null check above 472 final Word lock = beginLockScope(lockDepth); 473 traceObject(trace, "+lock{stub}", object, true, options); 474 monitorenterStubC(MONITORENTER, object, lock); 475 } 476 477 @Snippet 478 public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter boolean trace, 479 @ConstantParameter OptionValues options, @ConstantParameter Counters counters) { 480 trace(trace, " object: 0x%016lx\n", Word.objectToTrackedPointer(object)); 481 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 482 if (useBiasedLocking(INJECTED_VMCONFIG)) { 483 // Check for biased locking unlock case, which is a no-op 484 // Note: we do not have to check the thread ID for two reasons. 485 // First, the interpreter checks for IllegalMonitorStateException at 486 // a higher level. Second, if the bias was revoked while we held the 487 // lock, the object could not be rebiased toward another thread, so 488 // the bias bit would be clear. 489 trace(trace, " mark: 0x%016lx\n", mark); 490 if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 491 endLockScope(); 492 decCounter(options); 493 traceObject(trace, "-lock{bias}", object, false, options); 494 counters.unlockBias.inc(); 495 return; 496 } 497 } 498 499 final Word lock = CurrentLockNode.currentLock(lockDepth); 500 501 // Load displaced mark 502 final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION); 503 trace(trace, " displacedMark: 0x%016lx\n", displacedMark); 504 505 if (probability(NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) { 506 // Recursive locking => done 507 traceObject(trace, "-lock{recursive}", object, false, options); 508 counters.unlockCasRecursive.inc(); 509 } else { 510 if (!tryExitInflated(object, mark, lock, threadRegister, trace, options, counters)) { 511 verifyOop(object); 512 // Test if object's mark word is pointing to the displaced mark word, and if so, 513 // restore 514 // the displaced mark in the object - if the object's mark word is not pointing to 515 // the displaced mark word, do unlocking via runtime call. 516 Pointer objectPointer = Word.objectToTrackedPointer(object); 517 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), lock, displacedMark, MARK_WORD_LOCATION))) { 518 traceObject(trace, "-lock{cas}", object, false, options); 519 counters.unlockCas.inc(); 520 } else { 521 // The object's mark word was not pointing to the displaced header 522 traceObject(trace, "-lock{stub}", object, false, options); 523 counters.unlockStub.inc(); 524 monitorexitStubC(MONITOREXIT, object, lock); 525 } 526 } 527 } 528 endLockScope(); 529 decCounter(options); 530 } 531 532 private static boolean inlineFastUnlockSupported(OptionValues options) { 533 return inlineFastUnlockSupported(INJECTED_VMCONFIG, options); 534 } 535 536 private static boolean inlineFastUnlockSupported(GraalHotSpotVMConfig config, OptionValues options) { 537 return useFastInflatedLocking(options) && objectMonitorEntryListOffset(config) >= 0 && objectMonitorCxqOffset(config) >= 0 && monitorMask(config) >= 0 && 538 objectMonitorOwnerOffset(config) >= 0 && objectMonitorRecursionsOffset(config) >= 0; 539 } 540 541 private static boolean tryExitInflated(Object object, Word mark, Word lock, Register threadRegister, boolean trace, OptionValues options, Counters counters) { 542 if (!inlineFastUnlockSupported(options)) { 543 return false; 544 } 545 if (probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) { 546 // Inflated case 547 // mark is a pointer to the ObjectMonitor + monitorMask 548 Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG)); 549 int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG); 550 Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION); 551 int recursionsOffset = objectMonitorRecursionsOffset(INJECTED_VMCONFIG); 552 Word recursions = monitor.readWord(recursionsOffset, OBJECT_MONITOR_RECURSION_LOCATION); 553 Word thread = registerAsWord(threadRegister); 554 if (probability(FAST_PATH_PROBABILITY, owner.xor(thread).or(recursions).equal(0))) { 555 // owner == thread && recursions == 0 556 int cxqOffset = objectMonitorCxqOffset(INJECTED_VMCONFIG); 557 Word cxq = monitor.readWord(cxqOffset, OBJECT_MONITOR_CXQ_LOCATION); 558 int entryListOffset = objectMonitorEntryListOffset(INJECTED_VMCONFIG); 559 Word entryList = monitor.readWord(entryListOffset, OBJECT_MONITOR_ENTRY_LIST_LOCATION); 560 if (probability(FREQUENT_PROBABILITY, cxq.or(entryList).equal(0))) { 561 // cxq == 0 && entryList == 0 562 // Nobody is waiting, success 563 // release_store 564 MembarNode.memoryBarrier(LOAD_STORE | STORE_STORE); 565 monitor.writeWord(ownerOffset, WordFactory.zero()); 566 traceObject(trace, "-lock{inflated:simple}", object, false, options); 567 counters.unlockInflatedSimple.inc(); 568 return true; 569 } 570 } 571 counters.unlockStubInflated.inc(); 572 traceObject(trace, "-lock{stub:inflated}", object, false, options); 573 monitorexitStubC(MONITOREXIT, object, lock); 574 return true; 575 } 576 return false; 577 } 578 579 /** 580 * Calls straight out to the monitorexit stub. 581 */ 582 @Snippet 583 public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) { 584 verifyOop(object); 585 traceObject(trace, "-lock{stub}", object, false, options); 586 final Word lock = CurrentLockNode.currentLock(lockDepth); 587 monitorexitStubC(MONITOREXIT, object, lock); 588 endLockScope(); 589 decCounter(options); 590 } 591 592 public static void traceObject(boolean enabled, String action, Object object, boolean enter, OptionValues options) { 593 if (doProfile(options)) { 594 DynamicCounterNode.counter(action, enter ? "number of monitor enters" : "number of monitor exits", 1, PROFILE_CONTEXT); 595 } 596 if (enabled) { 597 Log.print(action); 598 Log.print(' '); 599 Log.printlnObject(object); 600 } 601 } 602 603 public static void trace(boolean enabled, String format, WordBase value) { 604 if (enabled) { 605 Log.printf(format, value.rawValue()); 606 } 607 } 608 609 /** 610 * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode} 611 * intrinsic. 612 */ 613 private static final boolean ENABLE_BREAKPOINT = false; 614 615 private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter"); 616 617 @NodeIntrinsic(BreakpointNode.class) 618 static native void bkpt(Object object, Word mark, Word tmp, Word value); 619 620 @Fold 621 static boolean verifyBalancedMonitors(OptionValues options) { 622 return VerifyBalancedMonitors.getValue(options); 623 } 624 625 public static void incCounter(OptionValues options) { 626 if (verifyBalancedMonitors(options)) { 627 final Word counter = MonitorCounterNode.counter(); 628 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 629 counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION); 630 } 631 } 632 633 public static void decCounter(OptionValues options) { 634 if (verifyBalancedMonitors(options)) { 635 final Word counter = MonitorCounterNode.counter(); 636 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 637 counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION); 638 } 639 } 640 641 @Snippet 642 private static void initCounter() { 643 final Word counter = MonitorCounterNode.counter(); 644 counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION); 645 } 646 647 @Snippet 648 private static void checkCounter(@ConstantParameter String errMsg) { 649 final Word counter = MonitorCounterNode.counter(); 650 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 651 if (count != 0) { 652 vmError(errMsg, count); 653 } 654 } 655 656 public static class Counters { 657 /** 658 * Counters for the various paths for acquiring a lock. The counters whose names start with 659 * {@code "lock"} are mutually exclusive. The other counters are for paths that may be 660 * shared. 661 */ 662 public final SnippetCounter lockBiasExisting; 663 public final SnippetCounter lockBiasAcquired; 664 public final SnippetCounter lockBiasTransfer; 665 public final SnippetCounter lockCas; 666 public final SnippetCounter lockCasRecursive; 667 public final SnippetCounter lockStubEpochExpired; 668 public final SnippetCounter lockStubRevoke; 669 public final SnippetCounter lockStubFailedCas; 670 public final SnippetCounter inflatedCas; 671 public final SnippetCounter inflatedFailedCas; 672 public final SnippetCounter inflatedOwned; 673 public final SnippetCounter unbiasable; 674 public final SnippetCounter revokeBias; 675 676 /** 677 * Counters for the various paths for releasing a lock. The counters whose names start with 678 * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be 679 * shared. 680 */ 681 public final SnippetCounter unlockBias; 682 public final SnippetCounter unlockCas; 683 public final SnippetCounter unlockCasRecursive; 684 public final SnippetCounter unlockStub; 685 public final SnippetCounter unlockStubInflated; 686 public final SnippetCounter unlockInflatedSimple; 687 688 public Counters(SnippetCounter.Group.Factory factory) { 689 SnippetCounter.Group enter = factory.createSnippetCounterGroup("MonitorEnters"); 690 SnippetCounter.Group exit = factory.createSnippetCounterGroup("MonitorExits"); 691 lockBiasExisting = new SnippetCounter(enter, "lock{bias:existing}", "bias-locked previously biased object"); 692 lockBiasAcquired = new SnippetCounter(enter, "lock{bias:acquired}", "bias-locked newly biased object"); 693 lockBiasTransfer = new SnippetCounter(enter, "lock{bias:transfer}", "bias-locked, biased transferred"); 694 lockCas = new SnippetCounter(enter, "lock{cas}", "cas-locked an object"); 695 lockCasRecursive = new SnippetCounter(enter, "lock{cas:recursive}", "cas-locked, recursive"); 696 lockStubEpochExpired = new SnippetCounter(enter, "lock{stub:epoch-expired}", "stub-locked, epoch expired"); 697 lockStubRevoke = new SnippetCounter(enter, "lock{stub:revoke}", "stub-locked, biased revoked"); 698 lockStubFailedCas = new SnippetCounter(enter, "lock{stub:failed-cas/stack}", "stub-locked, failed cas and stack locking"); 699 inflatedCas = new SnippetCounter(enter, "lock{inflated:cas}", "heavyweight-locked, cas-locked"); 700 inflatedFailedCas = new SnippetCounter(enter, "lock{inflated:failed-cas}", "heavyweight-locked, failed cas"); 701 inflatedOwned = new SnippetCounter(enter, "lock{inflated:owned}", "heavyweight-locked, already owned"); 702 unbiasable = new SnippetCounter(enter, "unbiasable", "object with unbiasable type"); 703 revokeBias = new SnippetCounter(enter, "revokeBias", "object had bias revoked"); 704 705 unlockBias = new SnippetCounter(exit, "unlock{bias}", "bias-unlocked an object"); 706 unlockCas = new SnippetCounter(exit, "unlock{cas}", "cas-unlocked an object"); 707 unlockCasRecursive = new SnippetCounter(exit, "unlock{cas:recursive}", "cas-unlocked an object, recursive"); 708 unlockStub = new SnippetCounter(exit, "unlock{stub}", "stub-unlocked an object"); 709 unlockStubInflated = new SnippetCounter(exit, "unlock{stub:inflated}", "stub-unlocked an object with inflated monitor"); 710 unlockInflatedSimple = new SnippetCounter(exit, "unlock{inflated}", "unlocked an object monitor"); 711 } 712 } 713 714 public static class Templates extends AbstractTemplates { 715 716 private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter"); 717 private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit"); 718 private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub"); 719 private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub"); 720 private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter"); 721 private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter"); 722 723 private final boolean useFastLocking; 724 public final Counters counters; 725 726 public Templates(OptionValues options, SnippetCounter.Group.Factory factory, HotSpotProviders providers, TargetDescription target, boolean useFastLocking) { 727 super(options, providers, providers.getSnippetReflection(), target); 728 this.useFastLocking = useFastLocking; 729 730 this.counters = new Counters(factory); 731 } 732 733 public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) { 734 StructuredGraph graph = monitorenterNode.graph(); 735 checkBalancedMonitors(graph, tool); 736 737 assert ((ObjectStamp) monitorenterNode.object().stamp()).nonNull(); 738 739 Arguments args; 740 if (useFastLocking) { 741 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage()); 742 args.add("object", monitorenterNode.object()); 743 args.add("hub", monitorenterNode.getHub()); 744 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 745 args.addConst("threadRegister", registers.getThreadRegister()); 746 args.addConst("stackPointerRegister", registers.getStackPointerRegister()); 747 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph)); 748 args.addConst("options", graph.getOptions()); 749 args.addConst("counters", counters); 750 } else { 751 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage()); 752 args.add("object", monitorenterNode.object()); 753 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 754 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph)); 755 args.addConst("options", graph.getOptions()); 756 args.addConst("counters", counters); 757 } 758 759 template(args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args); 760 } 761 762 public void lower(MonitorExitNode monitorexitNode, HotSpotRegistersProvider registers, LoweringTool tool) { 763 StructuredGraph graph = monitorexitNode.graph(); 764 765 Arguments args; 766 if (useFastLocking) { 767 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage()); 768 } else { 769 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage()); 770 } 771 args.add("object", monitorexitNode.object()); 772 args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth()); 773 args.addConst("threadRegister", registers.getThreadRegister()); 774 args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph)); 775 args.addConst("options", graph.getOptions()); 776 args.addConst("counters", counters); 777 778 template(args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args); 779 } 780 781 public static boolean isTracingEnabledForType(ValueNode object) { 782 ResolvedJavaType type = StampTool.typeOrNull(object.stamp()); 783 String filter = TraceMonitorsTypeFilter.getValue(object.getOptions()); 784 if (filter == null) { 785 return false; 786 } else { 787 if (filter.length() == 0) { 788 return true; 789 } 790 if (type == null) { 791 return false; 792 } 793 return (type.getName().contains(filter)); 794 } 795 } 796 797 public static boolean isTracingEnabledForMethod(StructuredGraph graph) { 798 String filter = TraceMonitorsMethodFilter.getValue(graph.getOptions()); 799 if (filter == null) { 800 return false; 801 } else { 802 if (filter.length() == 0) { 803 return true; 804 } 805 if (graph.method() == null) { 806 return false; 807 } 808 return (graph.method().format("%H.%n").contains(filter)); 809 } 810 } 811 812 /** 813 * If balanced monitor checking is enabled then nodes are inserted at the start and all 814 * return points of the graph to initialize and check the monitor counter respectively. 815 */ 816 private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) { 817 if (VerifyBalancedMonitors.getValue(options)) { 818 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class); 819 if (nodes.isEmpty()) { 820 // Only insert the nodes if this is the first monitorenter being lowered. 821 JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass()); 822 StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 823 MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null)); 824 InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0)); 825 invoke.setStateAfter(graph.start().stateAfter()); 826 graph.addAfterFixed(graph.start(), invoke); 827 828 StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null); 829 InliningUtil.inline(invoke, inlineeGraph, false, null); 830 831 List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot(); 832 for (ReturnNode ret : rets) { 833 returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass()); 834 String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d"; 835 ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph); 836 returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 837 callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null)); 838 invoke = graph.add(new InvokeNode(callTarget, 0)); 839 Bytecode code = new ResolvedJavaMethodBytecode(graph.method()); 840 FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false); 841 invoke.setStateAfter(graph.add(stateAfter)); 842 graph.addBeforeFixed(ret, invoke); 843 844 Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage()); 845 args.addConst("errMsg", msg); 846 inlineeGraph = template(args).copySpecializedGraph(); 847 InliningUtil.inline(invoke, inlineeGraph, false, null); 848 } 849 } 850 } 851 } 852 } 853 854 public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class); 855 public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class); 856 857 @NodeIntrinsic(ForeignCallNode.class) 858 private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 859 860 @NodeIntrinsic(ForeignCallNode.class) 861 public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 862 }