1 /* 2 * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.hotspot.replacements; 26 27 import static jdk.vm.ci.code.MemoryBarriers.LOAD_STORE; 28 import static jdk.vm.ci.code.MemoryBarriers.STORE_STORE; 29 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_OPTIONVALUES; 30 import static org.graalvm.compiler.hotspot.GraalHotSpotVMConfig.INJECTED_VMCONFIG; 31 import static org.graalvm.compiler.hotspot.nodes.AcquiredCASLockNode.mark; 32 import static org.graalvm.compiler.hotspot.nodes.BeginLockScopeNode.beginLockScope; 33 import static org.graalvm.compiler.hotspot.nodes.EndLockScopeNode.endLockScope; 34 import static org.graalvm.compiler.hotspot.nodes.VMErrorNode.vmError; 35 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.DISPLACED_MARK_WORD_LOCATION; 36 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.MARK_WORD_LOCATION; 37 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_CXQ_LOCATION; 38 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_ENTRY_LIST_LOCATION; 39 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_OWNER_LOCATION; 40 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.OBJECT_MONITOR_RECURSION_LOCATION; 41 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.PROTOTYPE_MARK_WORD_LOCATION; 42 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.ageMaskInPlace; 43 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockMaskInPlace; 44 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.biasedLockPattern; 45 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.epochMaskInPlace; 46 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.loadWordFromObject; 47 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.lockDisplacedMarkOffset; 48 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.markOffset; 49 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.monitorMask; 50 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorCxqOffset; 51 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorEntryListOffset; 52 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorOwnerOffset; 53 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.objectMonitorRecursionsOffset; 54 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.pageSize; 55 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.prototypeMarkWordOffset; 56 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.registerAsWord; 57 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.stackBias; 58 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.unlockedMask; 59 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.useBiasedLocking; 60 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.verifyOop; 61 import static org.graalvm.compiler.hotspot.replacements.HotSpotReplacementsUtil.wordSize; 62 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.ProfileMonitors; 63 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.SimpleFastInflatedLocking; 64 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsMethodFilter; 65 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.TraceMonitorsTypeFilter; 66 import static org.graalvm.compiler.hotspot.replacements.HotspotSnippetsOptions.VerifyBalancedMonitors; 67 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FAST_PATH_PROBABILITY; 68 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.FREQUENT_PROBABILITY; 69 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_FREQUENT_PROBABILITY; 70 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.NOT_LIKELY_PROBABILITY; 71 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY; 72 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.VERY_FAST_PATH_PROBABILITY; 73 import static org.graalvm.compiler.nodes.extended.BranchProbabilityNode.probability; 74 import static org.graalvm.compiler.nodes.extended.MembarNode.memoryBarrier; 75 import static org.graalvm.compiler.replacements.SnippetTemplate.DEFAULT_REPLACER; 76 import static jdk.internal.vm.compiler.word.WordFactory.unsigned; 77 import static jdk.internal.vm.compiler.word.WordFactory.zero; 78 79 import java.util.List; 80 81 import org.graalvm.compiler.api.replacements.Fold; 82 import org.graalvm.compiler.api.replacements.Snippet; 83 import org.graalvm.compiler.api.replacements.Snippet.ConstantParameter; 84 import org.graalvm.compiler.bytecode.Bytecode; 85 import org.graalvm.compiler.bytecode.ResolvedJavaMethodBytecode; 86 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor; 87 import org.graalvm.compiler.core.common.type.ObjectStamp; 88 import org.graalvm.compiler.core.common.type.StampFactory; 89 import org.graalvm.compiler.core.common.type.StampPair; 90 import org.graalvm.compiler.debug.DebugHandlersFactory; 91 import org.graalvm.compiler.graph.Node.ConstantNodeParameter; 92 import org.graalvm.compiler.graph.Node.NodeIntrinsic; 93 import org.graalvm.compiler.graph.iterators.NodeIterable; 94 import org.graalvm.compiler.hotspot.GraalHotSpotVMConfig; 95 import org.graalvm.compiler.hotspot.meta.HotSpotProviders; 96 import org.graalvm.compiler.hotspot.meta.HotSpotRegistersProvider; 97 import org.graalvm.compiler.hotspot.nodes.CurrentLockNode; 98 import org.graalvm.compiler.hotspot.nodes.FastAcquireBiasedLockNode; 99 import org.graalvm.compiler.hotspot.nodes.MonitorCounterNode; 100 import org.graalvm.compiler.hotspot.word.KlassPointer; 101 import org.graalvm.compiler.nodes.BreakpointNode; 102 import org.graalvm.compiler.nodes.CallTargetNode.InvokeKind; 103 import org.graalvm.compiler.nodes.ConstantNode; 104 import org.graalvm.compiler.nodes.DeoptimizeNode; 105 import org.graalvm.compiler.nodes.FrameState; 106 import org.graalvm.compiler.nodes.InvokeNode; 107 import org.graalvm.compiler.nodes.NamedLocationIdentity; 108 import org.graalvm.compiler.nodes.NodeView; 109 import org.graalvm.compiler.nodes.ReturnNode; 110 import org.graalvm.compiler.nodes.StructuredGraph; 111 import org.graalvm.compiler.nodes.ValueNode; 112 import org.graalvm.compiler.nodes.debug.DynamicCounterNode; 113 import org.graalvm.compiler.nodes.extended.ForeignCallNode; 114 import org.graalvm.compiler.nodes.extended.MembarNode; 115 import org.graalvm.compiler.nodes.java.MethodCallTargetNode; 116 import org.graalvm.compiler.nodes.java.MonitorExitNode; 117 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode; 118 import org.graalvm.compiler.nodes.spi.LoweringTool; 119 import org.graalvm.compiler.nodes.type.StampTool; 120 import org.graalvm.compiler.options.OptionValues; 121 import org.graalvm.compiler.phases.common.inlining.InliningUtil; 122 import org.graalvm.compiler.replacements.Log; 123 import org.graalvm.compiler.replacements.SnippetCounter; 124 import org.graalvm.compiler.replacements.SnippetTemplate.AbstractTemplates; 125 import org.graalvm.compiler.replacements.SnippetTemplate.Arguments; 126 import org.graalvm.compiler.replacements.SnippetTemplate.SnippetInfo; 127 import org.graalvm.compiler.replacements.Snippets; 128 import org.graalvm.compiler.word.Word; 129 import jdk.internal.vm.compiler.word.LocationIdentity; 130 import jdk.internal.vm.compiler.word.Pointer; 131 import jdk.internal.vm.compiler.word.WordBase; 132 import jdk.internal.vm.compiler.word.WordFactory; 133 134 import jdk.vm.ci.code.BytecodeFrame; 135 import jdk.vm.ci.code.Register; 136 import jdk.vm.ci.code.TargetDescription; 137 import jdk.vm.ci.meta.DeoptimizationAction; 138 import jdk.vm.ci.meta.DeoptimizationReason; 139 import jdk.vm.ci.meta.JavaType; 140 import jdk.vm.ci.meta.ResolvedJavaType; 141 142 /** 143 * Snippets used for implementing the monitorenter and monitorexit instructions. 144 * 145 * The locking algorithm used is described in the paper 146 * <a href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related 147 * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David 148 * Detlefs. 149 * 150 * Comment below is reproduced from {@code markOop.hpp} for convenience: 151 * 152 * <pre> 153 * Bit-format of an object header (most significant first, big endian layout below): 154 * 32 bits: 155 * -------- 156 * hash:25 ------------>| age:4 biased_lock:1 lock:2 (normal object) 157 * JavaThread*:23 epoch:2 age:4 biased_lock:1 lock:2 (biased object) 158 * size:32 ------------------------------------------>| (CMS free block) 159 * PromotedObject*:29 ---------->| promo_bits:3 ----->| (CMS promoted object) 160 * 161 * 64 bits: 162 * -------- 163 * unused:25 hash:31 -->| unused:1 age:4 biased_lock:1 lock:2 (normal object) 164 * JavaThread*:54 epoch:2 unused:1 age:4 biased_lock:1 lock:2 (biased object) 165 * PromotedObject*:61 --------------------->| promo_bits:3 ----->| (CMS promoted object) 166 * size:64 ----------------------------------------------------->| (CMS free block) 167 * 168 * unused:25 hash:31 -->| cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && normal object) 169 * JavaThread*:54 epoch:2 cms_free:1 age:4 biased_lock:1 lock:2 (COOPs && biased object) 170 * narrowOop:32 unused:24 cms_free:1 unused:4 promo_bits:3 ----->| (COOPs && CMS promoted object) 171 * unused:21 size:35 -->| cms_free:1 unused:7 ------------------>| (COOPs && CMS free block) 172 * 173 * - hash contains the identity hash value: largest value is 174 * 31 bits, see os::random(). Also, 64-bit vm's require 175 * a hash value no bigger than 32 bits because they will not 176 * properly generate a mask larger than that: see library_call.cpp 177 * and c1_CodePatterns_sparc.cpp. 178 * 179 * - the biased lock pattern is used to bias a lock toward a given 180 * thread. When this pattern is set in the low three bits, the lock 181 * is either biased toward a given thread or "anonymously" biased, 182 * indicating that it is possible for it to be biased. When the 183 * lock is biased toward a given thread, locking and unlocking can 184 * be performed by that thread without using atomic operations. 185 * When a lock's bias is revoked, it reverts back to the normal 186 * locking scheme described below. 187 * 188 * Note that we are overloading the meaning of the "unlocked" state 189 * of the header. Because we steal a bit from the age we can 190 * guarantee that the bias pattern will never be seen for a truly 191 * unlocked object. 192 * 193 * Note also that the biased state contains the age bits normally 194 * contained in the object header. Large increases in scavenge 195 * times were seen when these bits were absent and an arbitrary age 196 * assigned to all biased objects, because they tended to consume a 197 * significant fraction of the eden semispaces and were not 198 * promoted promptly, causing an increase in the amount of copying 199 * performed. The runtime system aligns all JavaThread* pointers to 200 * a very large value (currently 128 bytes (32bVM) or 256 bytes (64bVM)) 201 * to make room for the age bits & the epoch bits (used in support of 202 * biased locking), and for the CMS "freeness" bit in the 64bVM (+COOPs). 203 * 204 * [JavaThread* | epoch | age | 1 | 01] lock is biased toward given thread 205 * [0 | epoch | age | 1 | 01] lock is anonymously biased 206 * 207 * - the two lock bits are used to describe three states: locked/unlocked and monitor. 208 * 209 * [ptr | 00] locked ptr points to real header on stack 210 * [header | 0 | 01] unlocked regular object header 211 * [ptr | 10] monitor inflated lock (header is wapped out) 212 * [ptr | 11] marked used by markSweep to mark an object 213 * not valid at any other time 214 * 215 * We assume that stack/thread pointers have the lowest two bits cleared. 216 * </pre> 217 * 218 * Note that {@code Thread::allocate} enforces {@code JavaThread} objects to be aligned 219 * appropriately to comply with the layouts above. 220 */ 221 public class MonitorSnippets implements Snippets { 222 223 private static final boolean PROFILE_CONTEXT = false; 224 225 @Fold 226 static boolean doProfile(@Fold.InjectedParameter OptionValues options) { 227 return ProfileMonitors.getValue(options); 228 } 229 230 @Snippet 231 public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister, 232 @ConstantParameter boolean trace, @ConstantParameter Counters counters) { 233 verifyOop(object); 234 235 // Load the mark word - this includes a null-check on object 236 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 237 238 final Word lock = beginLockScope(lockDepth); 239 240 Pointer objectPointer = Word.objectToTrackedPointer(object); 241 trace(trace, " object: 0x%016lx\n", objectPointer); 242 trace(trace, " lock: 0x%016lx\n", lock); 243 trace(trace, " mark: 0x%016lx\n", mark); 244 245 incCounter(); 246 247 if (useBiasedLocking(INJECTED_VMCONFIG)) { 248 if (tryEnterBiased(object, hub, lock, mark, threadRegister, trace, counters)) { 249 return; 250 } 251 // not biased, fall-through 252 } 253 if (inlineFastLockSupported() && probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) { 254 // Inflated case 255 if (tryEnterInflated(object, lock, mark, threadRegister, trace, counters)) { 256 return; 257 } 258 } else { 259 // Create the unlocked mark word pattern 260 Word unlockedMark = mark.or(unlockedMask(INJECTED_VMCONFIG)); 261 trace(trace, " unlockedMark: 0x%016lx\n", unlockedMark); 262 263 // Copy this unlocked mark word into the lock slot on the stack 264 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), unlockedMark, DISPLACED_MARK_WORD_LOCATION); 265 266 // make sure previous store does not float below compareAndSwap 267 MembarNode.memoryBarrier(STORE_STORE); 268 269 // Test if the object's mark word is unlocked, and if so, store the 270 // (address of) the lock slot into the object's mark word. 271 Word currentMark = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), unlockedMark, lock, MARK_WORD_LOCATION); 272 if (probability(FAST_PATH_PROBABILITY, currentMark.equal(unlockedMark))) { 273 traceObject(trace, "+lock{cas}", object, true); 274 counters.lockCas.inc(); 275 mark(object); 276 return; 277 } else { 278 trace(trace, " currentMark: 0x%016lx\n", currentMark); 279 // The mark word in the object header was not the same. 280 // Either the object is locked by another thread or is already locked 281 // by the current thread. The latter is true if the mark word 282 // is a stack pointer into the current thread's stack, i.e.: 283 // 284 // 1) (currentMark & aligned_mask) == 0 285 // 2) rsp <= currentMark 286 // 3) currentMark <= rsp + page_size 287 // 288 // These 3 tests can be done by evaluating the following expression: 289 // 290 // (currentMark - rsp) & (aligned_mask - page_size) 291 // 292 // assuming both the stack pointer and page_size have their least 293 // significant 2 bits cleared and page_size is a power of 2 294 final Word alignedMask = unsigned(wordSize() - 1); 295 final Word stackPointer = registerAsWord(stackPointerRegister).add(stackBias(INJECTED_VMCONFIG)); 296 if (probability(FAST_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize(INJECTED_VMCONFIG))).equal(0))) { 297 // Recursively locked => write 0 to the lock slot 298 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), zero(), DISPLACED_MARK_WORD_LOCATION); 299 traceObject(trace, "+lock{cas:recursive}", object, true); 300 counters.lockCasRecursive.inc(); 301 return; 302 } 303 traceObject(trace, "+lock{stub:failed-cas/stack}", object, true); 304 counters.lockStubFailedCas.inc(); 305 } 306 } 307 // slow-path runtime-call 308 monitorenterStubC(MONITORENTER, object, lock); 309 } 310 311 private static boolean tryEnterBiased(Object object, KlassPointer hub, Word lock, Word mark, Register threadRegister, boolean trace, Counters counters) { 312 // See whether the lock is currently biased toward our thread and 313 // whether the epoch is still valid. 314 // Note that the runtime guarantees sufficient alignment of JavaThread 315 // pointers to allow age to be placed into low bits. 316 final Word biasableLockBits = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)); 317 318 // Check whether the bias pattern is present in the object's mark word 319 // and the bias owner and the epoch are both still current. 320 final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(INJECTED_VMCONFIG), PROTOTYPE_MARK_WORD_LOCATION); 321 final Word thread = registerAsWord(threadRegister); 322 final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace(INJECTED_VMCONFIG)); 323 trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord); 324 trace(trace, " thread: 0x%016lx\n", thread); 325 trace(trace, " tmp: 0x%016lx\n", tmp); 326 if (probability(FAST_PATH_PROBABILITY, tmp.equal(0))) { 327 // Object is already biased to current thread -> done 328 traceObject(trace, "+lock{bias:existing}", object, true); 329 counters.lockBiasExisting.inc(); 330 FastAcquireBiasedLockNode.mark(object); 331 return true; 332 } 333 334 // Now check to see whether biasing is enabled for this object 335 if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 336 Pointer objectPointer = Word.objectToTrackedPointer(object); 337 // At this point we know that the mark word has the bias pattern and 338 // that we are not the bias owner in the current epoch. We need to 339 // figure out more details about the state of the mark word in order to 340 // know what operations can be legally performed on the object's 341 // mark word. 342 343 // If the low three bits in the xor result aren't clear, that means 344 // the prototype header is no longer biasable and we have to revoke 345 // the bias on this object. 346 if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 347 // Biasing is still enabled for object's type. See whether the 348 // epoch of the current bias is still valid, meaning that the epoch 349 // bits of the mark word are equal to the epoch bits of the 350 // prototype mark word. (Note that the prototype mark word's epoch bits 351 // only change at a safepoint.) If not, attempt to rebias the object 352 // toward the current thread. Note that we must be absolutely sure 353 // that the current epoch is invalid in order to do this because 354 // otherwise the manipulations it performs on the mark word are 355 // illegal. 356 if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace(INJECTED_VMCONFIG)).equal(0))) { 357 // The epoch of the current bias is still valid but we know nothing 358 // about the owner; it might be set or it might be clear. Try to 359 // acquire the bias of the object using an atomic operation. If this 360 // fails we will go in to the runtime to revoke the object's bias. 361 // Note that we first construct the presumed unbiased header so we 362 // don't accidentally blow away another thread's valid bias. 363 Word unbiasedMark = mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG) | ageMaskInPlace(INJECTED_VMCONFIG) | epochMaskInPlace(INJECTED_VMCONFIG)); 364 Word biasedMark = unbiasedMark.or(thread); 365 trace(trace, " unbiasedMark: 0x%016lx\n", unbiasedMark); 366 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 367 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), unbiasedMark, biasedMark, MARK_WORD_LOCATION))) { 368 // Object is now biased to current thread -> done 369 traceObject(trace, "+lock{bias:acquired}", object, true); 370 counters.lockBiasAcquired.inc(); 371 return true; 372 } 373 // If the biasing toward our thread failed, this means that another thread 374 // owns the bias and we need to revoke that bias. The revocation will occur 375 // in the interpreter runtime. 376 traceObject(trace, "+lock{stub:revoke}", object, true); 377 counters.lockStubRevoke.inc(); 378 } else { 379 // At this point we know the epoch has expired, meaning that the 380 // current bias owner, if any, is actually invalid. Under these 381 // circumstances _only_, are we allowed to use the current mark word 382 // value as the comparison value when doing the CAS to acquire the 383 // bias in the current epoch. In other words, we allow transfer of 384 // the bias from one thread to another directly in this situation. 385 Word biasedMark = prototypeMarkWord.or(thread); 386 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 387 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, biasedMark, MARK_WORD_LOCATION))) { 388 // Object is now biased to current thread -> done 389 traceObject(trace, "+lock{bias:transfer}", object, true); 390 counters.lockBiasTransfer.inc(); 391 return true; 392 } 393 // If the biasing toward our thread failed, then another thread 394 // succeeded in biasing it toward itself and we need to revoke that 395 // bias. The revocation will occur in the runtime in the slow case. 396 traceObject(trace, "+lock{stub:epoch-expired}", object, true); 397 counters.lockStubEpochExpired.inc(); 398 } 399 // slow-path runtime-call 400 monitorenterStubC(MONITORENTER, object, lock); 401 return true; 402 } else { 403 // The prototype mark word doesn't have the bias bit set any 404 // more, indicating that objects of this data type are not supposed 405 // to be biased any more. We are going to try to reset the mark of 406 // this object to the prototype value and fall through to the 407 // CAS-based locking scheme. Note that if our CAS fails, it means 408 // that another thread raced us for the privilege of revoking the 409 // bias of this particular object, so it's okay to continue in the 410 // normal locking code. 411 Word result = objectPointer.compareAndSwapWord(markOffset(INJECTED_VMCONFIG), mark, prototypeMarkWord, MARK_WORD_LOCATION); 412 413 // Fall through to the normal CAS-based lock, because no matter what 414 // the result of the above CAS, some thread must have succeeded in 415 // removing the bias bit from the object's header. 416 417 if (ENABLE_BREAKPOINT) { 418 bkpt(object, mark, tmp, result); 419 } 420 counters.revokeBias.inc(); 421 return false; 422 } 423 } else { 424 // Biasing not enabled -> fall through to lightweight locking 425 counters.unbiasable.inc(); 426 return false; 427 } 428 } 429 430 @Fold 431 public static boolean useFastInflatedLocking(@Fold.InjectedParameter OptionValues options) { 432 return SimpleFastInflatedLocking.getValue(options); 433 } 434 435 private static boolean inlineFastLockSupported() { 436 return inlineFastLockSupported(INJECTED_VMCONFIG, INJECTED_OPTIONVALUES); 437 } 438 439 private static boolean inlineFastLockSupported(GraalHotSpotVMConfig config, OptionValues options) { 440 return useFastInflatedLocking(options) && monitorMask(config) >= 0 && objectMonitorOwnerOffset(config) >= 0; 441 } 442 443 private static boolean tryEnterInflated(Object object, Word lock, Word mark, Register threadRegister, boolean trace, Counters counters) { 444 // write non-zero value to lock slot 445 lock.writeWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), lock, DISPLACED_MARK_WORD_LOCATION); 446 // mark is a pointer to the ObjectMonitor + monitorMask 447 Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG)); 448 int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG); 449 Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION); 450 if (probability(FREQUENT_PROBABILITY, owner.equal(0))) { 451 // it appears unlocked (owner == 0) 452 if (probability(FREQUENT_PROBABILITY, monitor.logicCompareAndSwapWord(ownerOffset, owner, registerAsWord(threadRegister), OBJECT_MONITOR_OWNER_LOCATION))) { 453 // success 454 traceObject(trace, "+lock{inflated:cas}", object, true); 455 counters.inflatedCas.inc(); 456 return true; 457 } else { 458 traceObject(trace, "+lock{stub:inflated:failed-cas}", object, true); 459 counters.inflatedFailedCas.inc(); 460 } 461 } else { 462 traceObject(trace, "+lock{stub:inflated:owned}", object, true); 463 counters.inflatedOwned.inc(); 464 } 465 return false; 466 } 467 468 /** 469 * Calls straight out to the monitorenter stub. 470 */ 471 @Snippet 472 public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 473 verifyOop(object); 474 incCounter(); 475 if (object == null) { 476 DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException); 477 } 478 // BeginLockScope nodes do not read from object so a use of object 479 // cannot float about the null check above 480 final Word lock = beginLockScope(lockDepth); 481 traceObject(trace, "+lock{stub}", object, true); 482 monitorenterStubC(MONITORENTER, object, lock); 483 } 484 485 @Snippet 486 public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter boolean trace, 487 @ConstantParameter Counters counters) { 488 trace(trace, " object: 0x%016lx\n", Word.objectToTrackedPointer(object)); 489 final Word mark = loadWordFromObject(object, markOffset(INJECTED_VMCONFIG)); 490 if (useBiasedLocking(INJECTED_VMCONFIG)) { 491 // Check for biased locking unlock case, which is a no-op 492 // Note: we do not have to check the thread ID for two reasons. 493 // First, the interpreter checks for IllegalMonitorStateException at 494 // a higher level. Second, if the bias was revoked while we held the 495 // lock, the object could not be rebiased toward another thread, so 496 // the bias bit would be clear. 497 trace(trace, " mark: 0x%016lx\n", mark); 498 if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace(INJECTED_VMCONFIG)).equal(WordFactory.unsigned(biasedLockPattern(INJECTED_VMCONFIG))))) { 499 endLockScope(); 500 decCounter(); 501 traceObject(trace, "-lock{bias}", object, false); 502 counters.unlockBias.inc(); 503 return; 504 } 505 } 506 507 final Word lock = CurrentLockNode.currentLock(lockDepth); 508 509 // Load displaced mark 510 final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(INJECTED_VMCONFIG), DISPLACED_MARK_WORD_LOCATION); 511 trace(trace, " displacedMark: 0x%016lx\n", displacedMark); 512 513 if (probability(NOT_LIKELY_PROBABILITY, displacedMark.equal(0))) { 514 // Recursive locking => done 515 traceObject(trace, "-lock{recursive}", object, false); 516 counters.unlockCasRecursive.inc(); 517 } else { 518 if (!tryExitInflated(object, mark, lock, threadRegister, trace, counters)) { 519 verifyOop(object); 520 // Test if object's mark word is pointing to the displaced mark word, and if so, 521 // restore 522 // the displaced mark in the object - if the object's mark word is not pointing to 523 // the displaced mark word, do unlocking via runtime call. 524 Pointer objectPointer = Word.objectToTrackedPointer(object); 525 if (probability(VERY_FAST_PATH_PROBABILITY, objectPointer.logicCompareAndSwapWord(markOffset(INJECTED_VMCONFIG), lock, displacedMark, MARK_WORD_LOCATION))) { 526 traceObject(trace, "-lock{cas}", object, false); 527 counters.unlockCas.inc(); 528 } else { 529 // The object's mark word was not pointing to the displaced header 530 traceObject(trace, "-lock{stub}", object, false); 531 counters.unlockStub.inc(); 532 monitorexitStubC(MONITOREXIT, object, lock); 533 } 534 } 535 } 536 endLockScope(); 537 decCounter(); 538 } 539 540 private static boolean inlineFastUnlockSupported(OptionValues options) { 541 return inlineFastUnlockSupported(INJECTED_VMCONFIG, options); 542 } 543 544 private static boolean inlineFastUnlockSupported(GraalHotSpotVMConfig config, OptionValues options) { 545 return useFastInflatedLocking(options) && objectMonitorEntryListOffset(config) >= 0 && objectMonitorCxqOffset(config) >= 0 && monitorMask(config) >= 0 && 546 objectMonitorOwnerOffset(config) >= 0 && objectMonitorRecursionsOffset(config) >= 0; 547 } 548 549 private static boolean tryExitInflated(Object object, Word mark, Word lock, Register threadRegister, boolean trace, Counters counters) { 550 if (!inlineFastUnlockSupported(INJECTED_OPTIONVALUES)) { 551 return false; 552 } 553 if (probability(SLOW_PATH_PROBABILITY, mark.and(monitorMask(INJECTED_VMCONFIG)).notEqual(0))) { 554 // Inflated case 555 // mark is a pointer to the ObjectMonitor + monitorMask 556 Word monitor = mark.subtract(monitorMask(INJECTED_VMCONFIG)); 557 int ownerOffset = objectMonitorOwnerOffset(INJECTED_VMCONFIG); 558 Word owner = monitor.readWord(ownerOffset, OBJECT_MONITOR_OWNER_LOCATION); 559 int recursionsOffset = objectMonitorRecursionsOffset(INJECTED_VMCONFIG); 560 Word recursions = monitor.readWord(recursionsOffset, OBJECT_MONITOR_RECURSION_LOCATION); 561 Word thread = registerAsWord(threadRegister); 562 if (probability(FAST_PATH_PROBABILITY, owner.xor(thread).or(recursions).equal(0))) { 563 // owner == thread && recursions == 0 564 int cxqOffset = objectMonitorCxqOffset(INJECTED_VMCONFIG); 565 Word cxq = monitor.readWord(cxqOffset, OBJECT_MONITOR_CXQ_LOCATION); 566 int entryListOffset = objectMonitorEntryListOffset(INJECTED_VMCONFIG); 567 Word entryList = monitor.readWord(entryListOffset, OBJECT_MONITOR_ENTRY_LIST_LOCATION); 568 if (probability(FREQUENT_PROBABILITY, cxq.or(entryList).equal(0))) { 569 // cxq == 0 && entryList == 0 570 // Nobody is waiting, success 571 // release_store 572 memoryBarrier(LOAD_STORE | STORE_STORE); 573 monitor.writeWord(ownerOffset, zero()); 574 traceObject(trace, "-lock{inflated:simple}", object, false); 575 counters.unlockInflatedSimple.inc(); 576 return true; 577 } 578 } 579 counters.unlockStubInflated.inc(); 580 traceObject(trace, "-lock{stub:inflated}", object, false); 581 monitorexitStubC(MONITOREXIT, object, lock); 582 return true; 583 } 584 return false; 585 } 586 587 /** 588 * Calls straight out to the monitorexit stub. 589 */ 590 @Snippet 591 public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 592 verifyOop(object); 593 traceObject(trace, "-lock{stub}", object, false); 594 final Word lock = CurrentLockNode.currentLock(lockDepth); 595 monitorexitStubC(MONITOREXIT, object, lock); 596 endLockScope(); 597 decCounter(); 598 } 599 600 public static void traceObject(boolean enabled, String action, Object object, boolean enter) { 601 if (doProfile(INJECTED_OPTIONVALUES)) { 602 DynamicCounterNode.counter(enter ? "number of monitor enters" : "number of monitor exits", action, 1, PROFILE_CONTEXT); 603 } 604 if (enabled) { 605 Log.print(action); 606 Log.print(' '); 607 Log.printlnObject(object); 608 } 609 } 610 611 public static void trace(boolean enabled, String format, WordBase value) { 612 if (enabled) { 613 Log.printf(format, value.rawValue()); 614 } 615 } 616 617 /** 618 * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode} 619 * intrinsic. 620 */ 621 private static final boolean ENABLE_BREAKPOINT = false; 622 623 private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter"); 624 625 @NodeIntrinsic(BreakpointNode.class) 626 static native void bkpt(Object object, Word mark, Word tmp, Word value); 627 628 @Fold 629 static boolean verifyBalancedMonitors(@Fold.InjectedParameter OptionValues options) { 630 return VerifyBalancedMonitors.getValue(options); 631 } 632 633 static void incCounter() { 634 if (verifyBalancedMonitors(INJECTED_OPTIONVALUES)) { 635 final Word counter = MonitorCounterNode.counter(); 636 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 637 counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION); 638 } 639 } 640 641 public static void decCounter() { 642 if (verifyBalancedMonitors(INJECTED_OPTIONVALUES)) { 643 final Word counter = MonitorCounterNode.counter(); 644 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 645 counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION); 646 } 647 } 648 649 @Snippet 650 private static void initCounter() { 651 final Word counter = MonitorCounterNode.counter(); 652 counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION); 653 } 654 655 @Snippet 656 private static void checkCounter(@ConstantParameter String errMsg) { 657 final Word counter = MonitorCounterNode.counter(); 658 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 659 if (count != 0) { 660 vmError(errMsg, count); 661 } 662 } 663 664 public static class Counters { 665 /** 666 * Counters for the various paths for acquiring a lock. The counters whose names start with 667 * {@code "lock"} are mutually exclusive. The other counters are for paths that may be 668 * shared. 669 */ 670 public final SnippetCounter lockBiasExisting; 671 public final SnippetCounter lockBiasAcquired; 672 public final SnippetCounter lockBiasTransfer; 673 public final SnippetCounter lockCas; 674 public final SnippetCounter lockCasRecursive; 675 public final SnippetCounter lockStubEpochExpired; 676 public final SnippetCounter lockStubRevoke; 677 public final SnippetCounter lockStubFailedCas; 678 public final SnippetCounter inflatedCas; 679 public final SnippetCounter inflatedFailedCas; 680 public final SnippetCounter inflatedOwned; 681 public final SnippetCounter unbiasable; 682 public final SnippetCounter revokeBias; 683 684 /** 685 * Counters for the various paths for releasing a lock. The counters whose names start with 686 * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be 687 * shared. 688 */ 689 public final SnippetCounter unlockBias; 690 public final SnippetCounter unlockCas; 691 public final SnippetCounter unlockCasRecursive; 692 public final SnippetCounter unlockStub; 693 public final SnippetCounter unlockStubInflated; 694 public final SnippetCounter unlockInflatedSimple; 695 696 public Counters(SnippetCounter.Group.Factory factory) { 697 SnippetCounter.Group enter = factory.createSnippetCounterGroup("MonitorEnters"); 698 SnippetCounter.Group exit = factory.createSnippetCounterGroup("MonitorExits"); 699 lockBiasExisting = new SnippetCounter(enter, "lock{bias:existing}", "bias-locked previously biased object"); 700 lockBiasAcquired = new SnippetCounter(enter, "lock{bias:acquired}", "bias-locked newly biased object"); 701 lockBiasTransfer = new SnippetCounter(enter, "lock{bias:transfer}", "bias-locked, biased transferred"); 702 lockCas = new SnippetCounter(enter, "lock{cas}", "cas-locked an object"); 703 lockCasRecursive = new SnippetCounter(enter, "lock{cas:recursive}", "cas-locked, recursive"); 704 lockStubEpochExpired = new SnippetCounter(enter, "lock{stub:epoch-expired}", "stub-locked, epoch expired"); 705 lockStubRevoke = new SnippetCounter(enter, "lock{stub:revoke}", "stub-locked, biased revoked"); 706 lockStubFailedCas = new SnippetCounter(enter, "lock{stub:failed-cas/stack}", "stub-locked, failed cas and stack locking"); 707 inflatedCas = new SnippetCounter(enter, "lock{inflated:cas}", "heavyweight-locked, cas-locked"); 708 inflatedFailedCas = new SnippetCounter(enter, "lock{inflated:failed-cas}", "heavyweight-locked, failed cas"); 709 inflatedOwned = new SnippetCounter(enter, "lock{inflated:owned}", "heavyweight-locked, already owned"); 710 unbiasable = new SnippetCounter(enter, "unbiasable", "object with unbiasable type"); 711 revokeBias = new SnippetCounter(enter, "revokeBias", "object had bias revoked"); 712 713 unlockBias = new SnippetCounter(exit, "unlock{bias}", "bias-unlocked an object"); 714 unlockCas = new SnippetCounter(exit, "unlock{cas}", "cas-unlocked an object"); 715 unlockCasRecursive = new SnippetCounter(exit, "unlock{cas:recursive}", "cas-unlocked an object, recursive"); 716 unlockStub = new SnippetCounter(exit, "unlock{stub}", "stub-unlocked an object"); 717 unlockStubInflated = new SnippetCounter(exit, "unlock{stub:inflated}", "stub-unlocked an object with inflated monitor"); 718 unlockInflatedSimple = new SnippetCounter(exit, "unlock{inflated}", "unlocked an object monitor"); 719 } 720 } 721 722 public static class Templates extends AbstractTemplates { 723 724 private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter"); 725 private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit"); 726 private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub"); 727 private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub"); 728 private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter"); 729 private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter"); 730 731 private final boolean useFastLocking; 732 public final Counters counters; 733 734 public Templates(OptionValues options, Iterable<DebugHandlersFactory> factories, SnippetCounter.Group.Factory factory, HotSpotProviders providers, TargetDescription target, 735 boolean useFastLocking) { 736 super(options, factories, providers, providers.getSnippetReflection(), target); 737 this.useFastLocking = useFastLocking; 738 739 this.counters = new Counters(factory); 740 } 741 742 public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) { 743 StructuredGraph graph = monitorenterNode.graph(); 744 checkBalancedMonitors(graph, tool); 745 746 assert ((ObjectStamp) monitorenterNode.object().stamp(NodeView.DEFAULT)).nonNull(); 747 748 Arguments args; 749 if (useFastLocking) { 750 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage()); 751 args.add("object", monitorenterNode.object()); 752 args.add("hub", monitorenterNode.getHub()); 753 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 754 args.addConst("threadRegister", registers.getThreadRegister()); 755 args.addConst("stackPointerRegister", registers.getStackPointerRegister()); 756 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph)); 757 args.addConst("counters", counters); 758 } else { 759 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage()); 760 args.add("object", monitorenterNode.object()); 761 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 762 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph)); 763 args.addConst("counters", counters); 764 } 765 766 template(monitorenterNode, args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args); 767 } 768 769 public void lower(MonitorExitNode monitorexitNode, HotSpotRegistersProvider registers, LoweringTool tool) { 770 StructuredGraph graph = monitorexitNode.graph(); 771 772 Arguments args; 773 if (useFastLocking) { 774 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage()); 775 } else { 776 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage()); 777 } 778 args.add("object", monitorexitNode.object()); 779 args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth()); 780 args.addConst("threadRegister", registers.getThreadRegister()); 781 args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph)); 782 args.addConst("counters", counters); 783 784 template(monitorexitNode, args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args); 785 } 786 787 public static boolean isTracingEnabledForType(ValueNode object) { 788 ResolvedJavaType type = StampTool.typeOrNull(object.stamp(NodeView.DEFAULT)); 789 String filter = TraceMonitorsTypeFilter.getValue(object.getOptions()); 790 if (filter == null) { 791 return false; 792 } else { 793 if (filter.length() == 0) { 794 return true; 795 } 796 if (type == null) { 797 return false; 798 } 799 return (type.getName().contains(filter)); 800 } 801 } 802 803 public static boolean isTracingEnabledForMethod(StructuredGraph graph) { 804 String filter = TraceMonitorsMethodFilter.getValue(graph.getOptions()); 805 if (filter == null) { 806 return false; 807 } else { 808 if (filter.length() == 0) { 809 return true; 810 } 811 if (graph.method() == null) { 812 return false; 813 } 814 return (graph.method().format("%H.%n").contains(filter)); 815 } 816 } 817 818 /** 819 * If balanced monitor checking is enabled then nodes are inserted at the start and all 820 * return points of the graph to initialize and check the monitor counter respectively. 821 */ 822 private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) { 823 if (VerifyBalancedMonitors.getValue(options)) { 824 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class); 825 if (nodes.isEmpty()) { 826 // Only insert the nodes if this is the first monitorenter being lowered. 827 JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass()); 828 StampPair returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 829 MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnStamp, null)); 830 InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0)); 831 invoke.setStateAfter(graph.start().stateAfter()); 832 graph.addAfterFixed(graph.start(), invoke); 833 834 StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null, null, invoke.graph().trackNodeSourcePosition(), invoke.getNodeSourcePosition(), 835 invoke.getOptions()); 836 InliningUtil.inline(invoke, inlineeGraph, false, null); 837 838 List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot(); 839 for (ReturnNode ret : rets) { 840 returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass()); 841 String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d"; 842 ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph); 843 returnStamp = StampFactory.forDeclaredType(graph.getAssumptions(), returnType, false); 844 callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnStamp, null)); 845 invoke = graph.add(new InvokeNode(callTarget, 0)); 846 Bytecode code = new ResolvedJavaMethodBytecode(graph.method()); 847 FrameState stateAfter = new FrameState(null, code, BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false); 848 invoke.setStateAfter(graph.add(stateAfter)); 849 graph.addBeforeFixed(ret, invoke); 850 851 Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage()); 852 args.addConst("errMsg", msg); 853 inlineeGraph = template(invoke, args).copySpecializedGraph(graph.getDebug()); 854 InliningUtil.inline(invoke, inlineeGraph, false, null); 855 } 856 } 857 } 858 } 859 } 860 861 public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class); 862 public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class); 863 864 @NodeIntrinsic(ForeignCallNode.class) 865 private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 866 867 @NodeIntrinsic(ForeignCallNode.class) 868 public static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 869 }