1 /* 2 * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.hotspot.replacements; 26 27 import static jdk.vm.ci.code.MemoryBarriers.LOAD_STORE; 28 import static jdk.vm.ci.code.MemoryBarriers.STORE_STORE; 29 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG; 30 import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope; 31 import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope; 32 import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError; 33 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION; 34 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION; 35 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_CXQ_LOCATION; 36 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_ENTRY_LIST_LOCATION; 37 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_OWNER_LOCATION; 38 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_RECURSION_LOCATION; 39 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION; 40 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace; 41 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace; 42 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern; 43 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.config; 44 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace; 45 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject; 46 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset; 47 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset; 48 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.monitorMask; 49 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorCxqOffset; 50 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorEntryListOffset; 51 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorOwnerOffset; 52 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorRecursionsOffset; 53 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize; 54 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset; 55 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord; 56 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask; 57 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking; 58 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop; 59 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize; 60 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors; 61 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.SimpleFastInflatedLocking; 62 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter; 63 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter; 64 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors; 65 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FAST_PATH_PROBABILITY; 66 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY; 67 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_FREQUENT_PROBABILITY; 68 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_LIKELY_PROBABILITY; 69 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY; 70 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY; 71 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability; 72 import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER; 73 74 import java.util.List; 75 76 import org.graalvm.compiler.api.replacements.Fold; 77 import org.graalvm.compiler.api.replacements.Snippet; 78 import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter; 79 import org.graalvm.compiler.bytecode.Bytecode; 80 import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode; 81 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor; 82 import org.graalvm.compiler.core.common.type.ObjectStamp; 83 import org.graalvm.compiler.core.common.type.StampFactory; 84 import org.graalvm.compiler.core.common.type.StampPair; 85 import org.graalvm.compiler.debug.DebugHandlersFactory; 86 import org.graalvm.compiler.graph.Node.ConstantNodeParameter; 87 import org.graalvm.compiler.graph.Node.NodeIntrinsic; 88 import org.graalvm.compiler.graph.iterators.NodeIterable; 89 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig; 90 import org.graalvm.compiler.hotspot.meta.HotSpotProviders; 91 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider; 92 import org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode; 93 import org.graalvm.compiler.hotspot.nodes.CurrentLockNode; 94 import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode; 95 import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode; 96 import org.graalvm.compiler.hotspot.word.KlassPointer; 97 import org.graalvm.compiler.nodes.BreakpointNode; 98 import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind; 99 import org.graalvm.compiler.nodes.ConstantNode; 100 import org.graalvm.compiler.nodes.DeoptimizeNode; 101 import org.graalvm.compiler.nodes.FrameState; 102 import org.graalvm.compiler.nodes.InvokeNode; 103 import org.graalvm.compiler.nodes.NamedLocationIdentity; 104 import org.graalvm.compiler.nodes.NodeView; 105 import org.graalvm.compiler.nodes.ReturnNode; 106 import org.graalvm.compiler.nodes.StructuredGraph; 107 import org.graalvm.compiler.nodes.ValueNode; 108 import org.graalvm.compiler.nodes.debug.DynamicCounterNode; 109 import org.graalvm.compiler.nodes.extended.ForeignCallNode; 110 import org.graalvm.compiler.nodes.extended.MembarNode; 111 import org.graalvm.compiler.nodes.java.MethodCallTargetNode; 112 import org.graalvm.compiler.nodes.java.MonitorExitNode; 113 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode; 114 import org.graalvm.compiler.nodes.spi.LoweringTool; 115 import org.graalvm.compiler.nodes.type.StampTool; 116 import org.graalvm.compiler.options.OptionValues; 117 import org.graalvm.compiler.phases.common.inlining.InliningUtil; 118 import org.graalvm.compiler.replacements.Log; 119 import org.graalvm.compiler.replacements.SnippetCounter; 120 import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates; 121 import org.graalvm.compiler.replacements.SnippetTemplate.Arguments; 122 import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo; 123 import org.graalvm.compiler.replacements.Snippets; 124 import org.graalvm.compiler.word.Word; 125 import jdk.internal.vm.compiler.word.LocationIdentity; 126 import jdk.internal.vm.compiler.word.Pointer; 127 import jdk.internal.vm.compiler.word.WordBase; 128 import jdk.internal.vm.compiler.word.WordFactory; 129 130 import jdk.vm.ci.code.BytecodeFrame; 131 import jdk.vm.ci.code.Register; 132 import jdk.vm.ci.code.TargetDescription; 133 import jdk.vm.ci.meta.DeoptimizationAction; 134 import jdk.vm.ci.meta.DeoptimizationReason; 135 import jdk.vm.ci.meta.JavaType; 136 import jdk.vm.ci.meta.ResolvedJavaType; 137 138 /** 139 * Snippets used for implementing the monitorenter and monitorexit instructions. 140 * 141 * The locking algorithm used is described in the paper 142 * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related 143 * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David 144 * Detlefs. 145 * 146 * Comment below is reproduced from {@code markOop.hpp} for convenience: 147 * 148 * <pre> 149 * Bit-format of an object header (most significant first, big endian layout below): 150 * 32 bits: 151 * -------- 152 * hash:25 ------------>| age:4 biased_lock:1 lock:2 (normal object) 153 * JavaThread*:23 epoch:2 age:4 biased_lock:1 lock:2 (biased object) 154 * size:32 ------------------------------------------>| (CMS free block) 155 * PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object) 156 * 157 * 64 bits: 158 * -------- 159 * unused:25 hash:31 -->| unused:1 age:4 biased_lock:1 lock:2 (normal object) 160 * JavaThread*:54 epoch:2 unused:1 age:4 biased_lock:1 lock:2 (biased object) 161 * PromotedObject*:61 --------------------->| promo_bits:3 ----->| (CMS promoted object) 162 * size:64 ----------------------------------------------------->| (CMS free block) 163 * 164 * unused:25 hash:31 -->| cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && normal object) 165 * JavaThread*:54 epoch:2 cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && biased object) 166 * narrowOop:32 unused:24 cms_free:1 unused:4 promo_bits:3 ----->| (COOPs && CMS promoted object) 167 * unused:21 size:35 -->| cms_free:1 unused:7 ------------------>| (COOPs && CMS free block) 168 * 169 * - hash contains the identity hash value: largest value is 170 * 31 bits, see os::random(). Also, 64-bit vm's require 171 * a hash value no bigger than 32 bits because they will not 172 * properly generate a mask larger than that: see library_call.cpp 173 * and c1_CodePatterns_sparc.cpp. 174 * 175 * - the biased lock pattern is used to bias a lock toward a given 176 * thread. When this pattern is set in the low three bits, the lock 177 * is either biased toward a given thread or "anonymously" biased, 178 * indicating that it is possible for it to be biased. When the 179 * lock is biased toward a given thread, locking and unlocking can 180 * be performed by that thread without using atomic operations. 181 * When a lock's bias is revoked, it reverts back to the normal 182 * locking scheme described below. 183 * 184 * Note that we are overloading the meaning of the "unlocked" state 185 * of the header. Because we steal a bit from the age we can 186 * guarantee that the bias pattern will never be seen for a truly 187 * unlocked object. 188 * 189 * Note also that the biased state contains the age bits normally 190 * contained in the object header. Large increases in scavenge 191 * times were seen when these bits were absent and an arbitrary age 192 * assigned to all biased objects, because they tended to consume a 193 * significant fraction of the eden semispaces and were not 194 * promoted promptly, causing an increase in the amount of copying 195 * performed. The runtime system aligns all JavaThread* pointers to 196 * a very large value (currently 128 bytes (32bVM) or 256 bytes (64bVM)) 197 * to make room for the age bits & the epoch bits (used in support of 198 * biased locking), and for the CMS "freeness" bit in the 64bVM (+COOPs). 199 * 200 * [JavaThread* | epoch | age | 1 | 01] lock is biased toward given thread 201 * [0 | epoch | age | 1 | 01] lock is anonymously biased 202 * 203 * - the two lock bits are used to describe three states: locked/unlocked and monitor. 204 * 205 * [ptr | 00] locked ptr points to real header on stack 206 * [header | 0 | 01] unlocked regular object header 207 * [ptr | 10] monitor inflated lock (header is wapped out) 208 * [ptr | 11] marked used by markSweep to mark an object 209 * not valid at any other time 210 * 211 * We assume that stack/thread pointers have the lowest two bits cleared. 212 * </pre> 213 * 214 * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned 215 * appropriately to comply with the layouts above. 216 */ 217 public class MonitorSnippets implements Snippets { 218 219 private static final boolean PROFILE_CONTEXT = false; 220 221 @Fold 222 static boolean doProfile(OptionValues options) { 223 return ProfileMonitors.getValue(options); 224 } 225 226 @Snippet 227 public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister, 228 @ConstantParameter boolean trace, @ConstantParameter OptionValues options, @ConstantParameter Counters counters) { 229 verifyOop(object); 230 231 // Load the mark word - this includes a null-check on object 232 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 233 234 final Word lock = beginLockScope(lockDepth); 235 236 Pointer objectPointer = Word.objectToTrackedPointer(object); 237 trace(trace, " object: 0x%016lx\n", objectPointer); 238 trace(trace, " lock: 0x%016lx\n", lock); 239 trace(trace, " mark: 0x%016lx\n", mark); 240 241 incCounter(options); 242 243 if (useBiasedLocking(INJECTED_VMCONFIG)) { 244 if (tryEnterBiased(object, hub, lock, mark, threadRegister, trace, options, counters)) { 245 return; 246 } 247 // not biased, fall-through 248 } 249 if (inlineFastLockSupported(options) && probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) { 250 // Inflated case 251 if (tryEnterInflated(object, lock, mark, threadRegister, trace, options, counters)) { 252 return; 253 } 254 } else { 255 // Create the unlocked mark word pattern 256 Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG)); 257 trace(trace, " unlockedMark: 0x%016lx\n", unlockedMark); 258 259 // Copy this unlocked mark word into the lock slot on the stack 260 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION); 261 262 // make sure previous store does not float below compareAndSwap 263 MembarNode.memoryBarrier(STORE_STORE); 264 265 // Test if the object's mark word is unlocked, and if so, store the 266 // (address of) the lock slot into the object's mark word. 267 Word currentMark = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), unlockedMark, lock, MARK_WORD_LOCATION); 268 if (probability(FAST_PATH_PROBABILITY, currentMark.equal(unlockedMark))) { 269 traceObject(trace, "+lock{cas}", object, true, options); 270 counters.lockCas.inc(); 271 AcquiredCASLockNode.mark(object); 272 return; 273 } else { 274 trace(trace, " currentMark: 0x%016lx\n", currentMark); 275 // The mark word in the object header was not the same. 276 // Either the object is locked by another thread or is already locked 277 // by the current thread. The latter is true if the mark word 278 // is a stack pointer into the current thread's stack, i.e.: 279 // 280 // 1) (currentMark & aligned_mask) == 0 281 // 2) rsp <= currentMark 282 // 3) currentMark <= rsp + page_size 283 // 284 // These 3 tests can be done by evaluating the following expression: 285 // 286 // (currentMark - rsp) & (aligned_mask - page_size) 287 // 288 // assuming both the stack pointer and page_size have their least 289 // significant 2 bits cleared and page_size is a power of 2 290 final Word alignedMask = WordFactory.unsigned(wordSize() - 1); 291 final Word stackPointer = registerAsWord(stackPointerRegister).add(config(INJECTED_VMCONFIG).stackBias); 292 if (probability(FAST_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).equal(0))) { 293 // Recursively locked => write 0 to the lock slot 294 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), WordFactory.zero(), DISPLACED_MARK_WORD_LOCATION); 295 traceObject(trace, "+lock{cas:recursive}", object, true, options); 296 counters.lockCasRecursive.inc(); 297 return; 298 } 299 traceObject(trace, "+lock{stub:failed-cas/stack}", object, true, options); 300 counters.lockStubFailedCas.inc(); 301 } 302 } 303 // slow-path runtime-call 304 monitorenterStubC(MONITORENTER, object, lock); 305 } 306 307 private static boolean tryEnterBiased(Object object, KlassPointer hub, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) { 308 // See whether the lock is currently biased toward our thread and 309 // whether the epoch is still valid. 310 // Note that the runtime guarantees sufficient alignment of JavaThread 311 // pointers to allow age to be placed into low bits. 312 final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)); 313 314 // Check whether the bias pattern is present in the object's mark word 315 // and the bias owner and the epoch are both still current. 316 final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION); 317 final Word thread = registerAsWord(threadRegister); 318 final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG)); 319 trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord); 320 trace(trace, " thread: 0x%016lx\n", thread); 321 trace(trace, " tmp: 0x%016lx\n", tmp); 322 if (probability(FAST_PATH_PROBABILITY, tmp.equal(0))) { 323 // Object is already biased to current thread -> done 324 traceObject(trace, "+lock{bias:existing}", object, true, options); 325 counters.lockBiasExisting.inc(); 326 FastAcquireBiasedLockNode.mark(object); 327 return true; 328 } 329 330 // Now check to see whether biasing is enabled for this object 331 if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 332 Pointer objectPointer = Word.objectToTrackedPointer(object); 333 // At this point we know that the mark word has the bias pattern and 334 // that we are not the bias owner in the current epoch. We need to 335 // figure out more details about the state of the mark word in order to 336 // know what operations can be legally performed on the object's 337 // mark word. 338 339 // If the low three bits in the xor result aren't clear, that means 340 // the prototype header is no longer biasable and we have to revoke 341 // the bias on this object. 342 if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 343 // Biasing is still enabled for object's type. See whether the 344 // epoch of the current bias is still valid, meaning that the epoch 345 // bits of the mark word are equal to the epoch bits of the 346 // prototype mark word. (Note that the prototype mark word's epoch bits 347 // only change at a safepoint.) If not, attempt to rebias the object 348 // toward the current thread. Note that we must be absolutely sure 349 // that the current epoch is invalid in order to do this because 350 // otherwise the manipulations it performs on the mark word are 351 // illegal. 352 if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 353 // The epoch of the current bias is still valid but we know nothing 354 // about the owner; it might be set or it might be clear. Try to 355 // acquire the bias of the object using an atomic operation. If this 356 // fails we will go in to the runtime to revoke the object's bias. 357 // Note that we first construct the presumed unbiased header so we 358 // don't accidentally blow away another thread's valid bias. 359 Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG)); 360 Word biasedMark = unbiasedMark.or(thread); 361 trace(trace, " unbiasedMark: 0x%016lx\n", unbiasedMark); 362 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 363 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), unbiasedMark, biasedMark, MARK_WORD_LOCATION))) { 364 // Object is now biased to current thread -> done 365 traceObject(trace, "+lock{bias:acquired}", object, true, options); 366 counters.lockBiasAcquired.inc(); 367 return true; 368 } 369 // If the biasing toward our thread failed, this means that another thread 370 // owns the bias and we need to revoke that bias. The revocation will occur 371 // in the interpreter runtime. 372 traceObject(trace, "+lock{stub:revoke}", object, true, options); 373 counters.lockStubRevoke.inc(); 374 } else { 375 // At this point we know the epoch has expired, meaning that the 376 // current bias owner, if any, is actually invalid. Under these 377 // circumstances _only_, are we allowed to use the current mark word 378 // value as the comparison value when doing the CAS to acquire the 379 // bias in the current epoch. In other words, we allow transfer of 380 // the bias from one thread to another directly in this situation. 381 Word biasedMark = prototypeMarkWord.or(thread); 382 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 383 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, biasedMark, MARK_WORD_LOCATION))) { 384 // Object is now biased to current thread -> done 385 traceObject(trace, "+lock{bias:transfer}", object, true, options); 386 counters.lockBiasTransfer.inc(); 387 return true; 388 } 389 // If the biasing toward our thread failed, then another thread 390 // succeeded in biasing it toward itself and we need to revoke that 391 // bias. The revocation will occur in the runtime in the slow case. 392 traceObject(trace, "+lock{stub:epoch-expired}", object, true, options); 393 counters.lockStubEpochExpired.inc(); 394 } 395 // slow-path runtime-call 396 monitorenterStubC(MONITORENTER, object, lock); 397 return true; 398 } else { 399 // The prototype mark word doesn't have the bias bit set any 400 // more, indicating that objects of this data type are not supposed 401 // to be biased any more. We are going to try to reset the mark of 402 // this object to the prototype value and fall through to the 403 // CAS-based locking scheme. Note that if our CAS fails, it means 404 // that another thread raced us for the privilege of revoking the 405 // bias of this particular object, so it's okay to continue in the 406 // normal locking code. 407 Word result = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, prototypeMarkWord, MARK_WORD_LOCATION); 408 409 // Fall through to the normal CAS-based lock, because no matter what 410 // the result of the above CAS, some thread must have succeeded in 411 // removing the bias bit from the object's header. 412 413 if (ENABLE_BREAKPOINT) { 414 bkpt(object, mark, tmp, result); 415 } 416 counters.revokeBias.inc(); 417 return false; 418 } 419 } else { 420 // Biasing not enabled -> fall through to lightweight locking 421 counters.unbiasable.inc(); 422 return false; 423 } 424 } 425 426 @Fold 427 public static boolean useFastInflatedLocking(OptionValues options) { 428 return SimpleFastInflatedLocking.getValue(options); 429 } 430 431 private static boolean inlineFastLockSupported(OptionValues options) { 432 return inlineFastLockSupported(INJECTED_VMCONFIG, options); 433 } 434 435 private static boolean inlineFastLockSupported(GraalHotSpotVMConfig config, OptionValues options) { 436 return useFastInflatedLocking(options) && monitorMask(config) >= 0 && objectMonitorOwnerOffset(config) >= 0; 437 } 438 439 private static boolean tryEnterInflated(Object object, Word lock, Word mark, Register threadRegister, boolean trace, OptionValues options, Counters counters) { 440 // write non-zero value to lock slot 441 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), lock, DISPLACED_MARK_WORD_LOCATION); 442 // mark is a pointer to the ObjectMonitor + monitorMask 443 Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG)); 444 int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG); 445 Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION); 446 if (probability(FREQUENT_PROBABILITY, owner.equal(0))) { 447 // it appears unlocked (owner == 0) 448 if (probability(FREQUENT_PROBABILITY, monitor.logicCompareAndSwapWord(ownerOffset, owner, registerAsWord(threadRegister), OBJECT_MONITOR_OWNER_LOCATION))) { 449 // success 450 traceObject(trace, "+lock{inflated:cas}", object, true, options); 451 counters.inflatedCas.inc(); 452 return true; 453 } else { 454 traceObject(trace, "+lock{stub:inflated:failed-cas}", object, true, options); 455 counters.inflatedFailedCas.inc(); 456 } 457 } else { 458 traceObject(trace, "+lock{stub:inflated:owned}", object, true, options); 459 counters.inflatedOwned.inc(); 460 } 461 return false; 462 } 463 464 /** 465 * Calls straight out to the monitorenter stub. 466 */ 467 @Snippet 468 public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) { 469 verifyOop(object); 470 incCounter(options); 471 if (object == null) { 472 DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException); 473 } 474 // BeginLockScope nodes do not read from object so a use of object 475 // cannot float about the null check above 476 final Word lock = beginLockScope(lockDepth); 477 traceObject(trace, "+lock{stub}", object, true, options); 478 monitorenterStubC(MONITORENTER, object, lock); 479 } 480 481 @Snippet 482 public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter boolean trace, 483 @ConstantParameter OptionValues options, @ConstantParameter Counters counters) { 484 trace(trace, " object: 0x%016lx\n", Word.objectToTrackedPointer(object)); 485 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 486 if (useBiasedLocking(INJECTED_VMCONFIG)) { 487 // Check for biased locking unlock case, which is a no-op 488 // Note: we do not have to check the thread ID for two reasons. 489 // First, the interpreter checks for IllegalMonitorStateException at 490 // a higher level. Second, if the bias was revoked while we held the 491 // lock, the object could not be rebiased toward another thread, so 492 // the bias bit would be clear. 493 trace(trace, " mark: 0x%016lx\n", mark); 494 if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 495 endLockScope(); 496 decCounter(options); 497 traceObject(trace, "-lock{bias}", object, false, options); 498 counters.unlockBias.inc(); 499 return; 500 } 501 } 502 503 final Word lock = CurrentLockNode.currentLock(lockDepth); 504 505 // Load displaced mark 506 final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION); 507 trace(trace, " displacedMark: 0x%016lx\n", displacedMark); 508 509 if (probability(NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) { 510 // Recursive locking => done 511 traceObject(trace, "-lock{recursive}", object, false, options); 512 counters.unlockCasRecursive.inc(); 513 } else { 514 if (!tryExitInflated(object, mark, lock, threadRegister, trace, options, counters)) { 515 verifyOop(object); 516 // Test if object's mark word is pointing to the displaced mark word, and if so, 517 // restore 518 // the displaced mark in the object - if the object's mark word is not pointing to 519 // the displaced mark word, do unlocking via runtime call. 520 Pointer objectPointer = Word.objectToTrackedPointer(object); 521 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), lock, displacedMark, MARK_WORD_LOCATION))) { 522 traceObject(trace, "-lock{cas}", object, false, options); 523 counters.unlockCas.inc(); 524 } else { 525 // The object's mark word was not pointing to the displaced header 526 traceObject(trace, "-lock{stub}", object, false, options); 527 counters.unlockStub.inc(); 528 monitorexitStubC(MONITOREXIT, object, lock); 529 } 530 } 531 } 532 endLockScope(); 533 decCounter(options); 534 } 535 536 private static boolean inlineFastUnlockSupported(OptionValues options) { 537 return inlineFastUnlockSupported(INJECTED_VMCONFIG, options); 538 } 539 540 private static boolean inlineFastUnlockSupported(GraalHotSpotVMConfig config, OptionValues options) { 541 return useFastInflatedLocking(options) && objectMonitorEntryListOffset(config) >= 0 && objectMonitorCxqOffset(config) >= 0 && monitorMask(config) >= 0 && 542 objectMonitorOwnerOffset(config) >= 0 && objectMonitorRecursionsOffset(config) >= 0; 543 } 544 545 private static boolean tryExitInflated(Object object, Word mark, Word lock, Register threadRegister, boolean trace, OptionValues options, Counters counters) { 546 if (!inlineFastUnlockSupported(options)) { 547 return false; 548 } 549 if (probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) { 550 // Inflated case 551 // mark is a pointer to the ObjectMonitor + monitorMask 552 Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG)); 553 int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG); 554 Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION); 555 int recursionsOffset = objectMonitorRecursionsOffset(INJECTED_VMCONFIG); 556 Word recursions = monitor.readWord(recursionsOffset, OBJECT_MONITOR_RECURSION_LOCATION); 557 Word thread = registerAsWord(threadRegister); 558 if (probability(FAST_PATH_PROBABILITY, owner.xor(thread).or(recursions).equal(0))) { 559 // owner == thread && recursions == 0 560 int cxqOffset = objectMonitorCxqOffset(INJECTED_VMCONFIG); 561 Word cxq = monitor.readWord(cxqOffset, OBJECT_MONITOR_CXQ_LOCATION); 562 int entryListOffset = objectMonitorEntryListOffset(INJECTED_VMCONFIG); 563 Word entryList = monitor.readWord(entryListOffset, OBJECT_MONITOR_ENTRY_LIST_LOCATION); 564 if (probability(FREQUENT_PROBABILITY, cxq.or(entryList).equal(0))) { 565 // cxq == 0 && entryList == 0 566 // Nobody is waiting, success 567 // release_store 568 MembarNode.memoryBarrier(LOAD_STORE | STORE_STORE); 569 monitor.writeWord(ownerOffset, WordFactory.zero()); 570 traceObject(trace, "-lock{inflated:simple}", object, false, options); 571 counters.unlockInflatedSimple.inc(); 572 return true; 573 } 574 } 575 counters.unlockStubInflated.inc(); 576 traceObject(trace, "-lock{stub:inflated}", object, false, options); 577 monitorexitStubC(MONITOREXIT, object, lock); 578 return true; 579 } 580 return false; 581 } 582 583 /** 584 * Calls straight out to the monitorexit stub. 585 */ 586 @Snippet 587 public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace, @ConstantParameter OptionValues options) { 588 verifyOop(object); 589 traceObject(trace, "-lock{stub}", object, false, options); 590 final Word lock = CurrentLockNode.currentLock(lockDepth); 591 monitorexitStubC(MONITOREXIT, object, lock); 592 endLockScope(); 593 decCounter(options); 594 } 595 596 public static void traceObject(boolean enabled, String action, Object object, boolean enter, OptionValues options) { 597 if (doProfile(options)) { 598 DynamicCounterNode.counter(enter ? "number of monitor enters" : "number of monitor exits", action, 1, PROFILE_CONTEXT); 599 } 600 if (enabled) { 601 Log.print(action); 602 Log.print(' '); 603 Log.printlnObject(object); 604 } 605 } 606 607 public static void trace(boolean enabled, String format, WordBase value) { 608 if (enabled) { 609 Log.printf(format, value.rawValue()); 610 } 611 } 612 613 /** 614 * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode} 615 * intrinsic. 616 */ 617 private static final boolean ENABLE_BREAKPOINT = false; 618 619 private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter"); 620 621 @NodeIntrinsic(BreakpointNode.class) 622 static native void bkpt(Object object, Word mark, Word tmp, Word value); 623 624 @Fold 625 static boolean verifyBalancedMonitors(OptionValues options) { 626 return VerifyBalancedMonitors.getValue(options); 627 } 628 629 public static void incCounter(OptionValues options) { 630 if (verifyBalancedMonitors(options)) { 631 final Word counter = MonitorCounterNode.counter(); 632 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 633 counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION); 634 } 635 } 636 637 public static void decCounter(OptionValues options) { 638 if (verifyBalancedMonitors(options)) { 639 final Word counter = MonitorCounterNode.counter(); 640 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 641 counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION); 642 } 643 } 644 645 @Snippet 646 private static void initCounter() { 647 final Word counter = MonitorCounterNode.counter(); 648 counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION); 649 } 650 651 @Snippet 652 private static void checkCounter(@ConstantParameter String errMsg) { 653 final Word counter = MonitorCounterNode.counter(); 654 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 655 if (count != 0) { 656 vmError(errMsg, count); 657 } 658 } 659 660 public static class Counters { 661 /** 662 * Counters for the various paths for acquiring a lock. The counters whose names start with 663 * {@code "lock"} are mutually exclusive. The other counters are for paths that may be 664 * shared. 665 */ 666 public final SnippetCounter lockBiasExisting; 667 public final SnippetCounter lockBiasAcquired; 668 public final SnippetCounter lockBiasTransfer; 669 public final SnippetCounter lockCas; 670 public final SnippetCounter lockCasRecursive; 671 public final SnippetCounter lockStubEpochExpired; 672 public final SnippetCounter lockStubRevoke; 673 public final SnippetCounter lockStubFailedCas; 674 public final SnippetCounter inflatedCas; 675 public final SnippetCounter inflatedFailedCas; 676 public final SnippetCounter inflatedOwned; 677 public final SnippetCounter unbiasable; 678 public final SnippetCounter revokeBias; 679 680 /** 681 * Counters for the various paths for releasing a lock. The counters whose names start with 682 * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be 683 * shared. 684 */ 685 public final SnippetCounter unlockBias; 686 public final SnippetCounter unlockCas; 687 public final SnippetCounter unlockCasRecursive; 688 public final SnippetCounter unlockStub; 689 public final SnippetCounter unlockStubInflated; 690 public final SnippetCounter unlockInflatedSimple; 691 692 public Counters(SnippetCounter.Group.Factory factory) { 693 SnippetCounter.Group enter = factory.createSnippetCounterGroup("MonitorEnters"); 694 SnippetCounter.Group exit = factory.createSnippetCounterGroup("MonitorExits"); 695 lockBiasExisting = new SnippetCounter(enter, "lock{bias:existing}", "bias-locked previously biased object"); 696 lockBiasAcquired = new SnippetCounter(enter, "lock{bias:acquired}", "bias-locked newly biased object"); 697 lockBiasTransfer = new SnippetCounter(enter, "lock{bias:transfer}", "bias-locked, biased transferred"); 698 lockCas = new SnippetCounter(enter, "lock{cas}", "cas-locked an object"); 699 lockCasRecursive = new SnippetCounter(enter, "lock{cas:recursive}", "cas-locked, recursive"); 700 lockStubEpochExpired = new SnippetCounter(enter, "lock{stub:epoch-expired}", "stub-locked, epoch expired"); 701 lockStubRevoke = new SnippetCounter(enter, "lock{stub:revoke}", "stub-locked, biased revoked"); 702 lockStubFailedCas = new SnippetCounter(enter, "lock{stub:failed-cas/stack}", "stub-locked, failed cas and stack locking"); 703 inflatedCas = new SnippetCounter(enter, "lock{inflated:cas}", "heavyweight-locked, cas-locked"); 704 inflatedFailedCas = new SnippetCounter(enter, "lock{inflated:failed-cas}", "heavyweight-locked, failed cas"); 705 inflatedOwned = new SnippetCounter(enter, "lock{inflated:owned}", "heavyweight-locked, already owned"); 706 unbiasable = new SnippetCounter(enter, "unbiasable", "object with unbiasable type"); 707 revokeBias = new SnippetCounter(enter, "revokeBias", "object had bias revoked"); 708 709 unlockBias = new SnippetCounter(exit, "unlock{bias}", "bias-unlocked an object"); 710 unlockCas = new SnippetCounter(exit, "unlock{cas}", "cas-unlocked an object"); 711 unlockCasRecursive = new SnippetCounter(exit, "unlock{cas:recursive}", "cas-unlocked an object, recursive"); 712 unlockStub = new SnippetCounter(exit, "unlock{stub}", "stub-unlocked an object"); 713 unlockStubInflated = new SnippetCounter(exit, "unlock{stub:inflated}", "stub-unlocked an object with inflated monitor"); 714 unlockInflatedSimple = new SnippetCounter(exit, "unlock{inflated}", "unlocked an object monitor"); 715 } 716 } 717 718 public static class Templates extends AbstractTemplates { 719 720 private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter"); 721 private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit"); 722 private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub"); 723 private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub"); 724 private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter"); 725 private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter"); 726 727 private final boolean useFastLocking; 728 public final Counters counters; 729 730 public Templates(OptionValues options, Iterable<DebugHandlersFactory> factories, SnippetCounter.Group.Factory factory, HotSpotProviders providers, TargetDescription target, 731 boolean useFastLocking) { 732 super(options, factories, providers, providers.getSnippetReflection(), target); 733 this.useFastLocking = useFastLocking; 734 735 this.counters = new Counters(factory); 736 } 737 738 public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) { 739 StructuredGraph graph = monitorenterNode.graph(); 740 checkBalancedMonitors(graph, tool); 741 742 assert ((ObjectStamp) monitorenterNode.object().stamp(NodeView.DEFAULT)).nonNull(); 743 744 Arguments args; 745 if (useFastLocking) { 746 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage()); 747 args.add("object", monitorenterNode.object()); 748 args.add("hub", monitorenterNode.getHub()); 749 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 750 args.addConst("threadRegister", registers.getThreadRegister()); 751 args.addConst("stackPointerRegister", registers.getStackPointerRegister()); 752 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph)); 753 args.addConst("options", graph.getOptions()); 754 args.addConst("counters", counters); 755 } else { 756 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage()); 757 args.add("object", monitorenterNode.object()); 758 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 759 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph)); 760 args.addConst("options", graph.getOptions()); 761 args.addConst("counters", counters); 762 } 763 764 template(monitorenterNode, args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args); 765 } 766 767 public void lower(MonitorExitNode monitorexitNode, HotSpotRegistersProvider registers, LoweringTool tool) { 768 StructuredGraph graph = monitorexitNode.graph(); 769 770 Arguments args; 771 if (useFastLocking) { 772 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage()); 773 } else { 774 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage()); 775 } 776 args.add("object", monitorexitNode.object()); 777 args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth()); 778 args.addConst("threadRegister", registers.getThreadRegister()); 779 args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph)); 780 args.addConst("options", graph.getOptions()); 781 args.addConst("counters", counters); 782 783 template(monitorexitNode, args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args); 784 } 785 786 public static boolean isTracingEnabledForType(ValueNode object) { 787 ResolvedJavaType type = StampTool.typeOrNull(object.stamp(NodeView.DEFAULT)); 788 String filter = TraceMonitorsTypeFilter.getValue(object.getOptions()); 789 if (filter == null) { 790 return false; 791 } else { 792 if (filter.length() == 0) { 793 return true; 794 } 795 if (type == null) { 796 return false; 797 } 798 return (type.getName().contains(filter)); 799 } 800 } 801 802 public static boolean isTracingEnabledForMethod(StructuredGraph graph) { 803 String filter = TraceMonitorsMethodFilter.getValue(graph.getOptions()); 804 if (filter == null) { 805 return false; 806 } else { 807 if (filter.length() == 0) { 808 return true; 809 } 810 if (graph.method() == null) { 811 return false; 812 } 813 return (graph.method().format("%H.%n").contains(filter)); 814 } 815 } 816 817 /** 818 * If balanced monitor checking is enabled then nodes are inserted at the start and all 819 * return points of the graph to initialize and check the monitor counter respectively. 820 */ 821 private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) { 822 if (VerifyBalancedMonitors.getValue(options)) { 823 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class); 824 if (nodes.isEmpty()) { 825 // Only insert the nodes if this is the first monitorenter being lowered. 826 JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass()); 827 StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 828 MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null)); 829 InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0)); 830 invoke.setStateAfter(graph.start().stateAfter()); 831 graph.addAfterFixed(graph.start(), invoke); 832 833 StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null, invoke.graph().trackNodeSourcePosition(), invoke.getNodeSourcePosition()); 834 InliningUtil.inline(invoke, inlineeGraph, false, null); 835 836 List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot(); 837 for (ReturnNode ret : rets) { 838 returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass()); 839 String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d"; 840 ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph); 841 returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 842 callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null)); 843 invoke = graph.add(new InvokeNode(callTarget, 0)); 844 Bytecode code = new ResolvedJavaMethodBytecode(graph.method()); 845 FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false); 846 invoke.setStateAfter(graph.add(stateAfter)); 847 graph.addBeforeFixed(ret, invoke); 848 849 Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage()); 850 args.addConst("errMsg", msg); 851 inlineeGraph = template(invoke, args).copySpecializedGraph(graph.getDebug()); 852 InliningUtil.inline(invoke, inlineeGraph, false, null); 853 } 854 } 855 } 856 } 857 } 858 859 public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class); 860 public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class); 861 862 @NodeIntrinsic(ForeignCallNode.class) 863 private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 864 865 @NodeIntrinsic(ForeignCallNode.class) 866 public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 867 }