< prev index next >

src/hotspot/share/runtime/synchronizer.cpp

Print this page

        

*** 127,136 **** --- 127,149 ---- static volatile intptr_t gListLock = 0; // protects global monitor lists static volatile int gMonitorFreeCount = 0; // # on gFreeList static volatile int gMonitorPopulation = 0; // # Extant -- in circulation + #define CHECK_THROW_NOSYNC_IMSE(obj) \ + if ((obj)->mark()->is_always_locked()) { \ + ResourceMark rm(THREAD); \ + THROW_MSG(vmSymbols::java_lang_IllegalMonitorStateException(), obj->klass()->external_name()); \ + } + + #define CHECK_THROW_NOSYNC_IMSE_0(obj) \ + if ((obj)->mark()->is_always_locked()) { \ + ResourceMark rm(THREAD); \ + THROW_MSG_0(vmSymbols::java_lang_IllegalMonitorStateException(), obj->klass()->external_name()); \ + } + + #define CHAINMARKER (cast_to_oop<intptr_t>(-1)) // =====================> Quick functions
*** 158,167 **** --- 171,181 ---- assert(!SafepointSynchronize::is_at_safepoint(), "invariant"); assert(self->is_Java_thread(), "invariant"); assert(((JavaThread *) self)->thread_state() == _thread_in_Java, "invariant"); NoSafepointVerifier nsv; if (obj == NULL) return false; // slow-path for invalid obj + assert(!EnableValhalla || !obj->klass()->is_value(), "monitor op on value type"); const markOop mark = obj->mark(); if (mark->has_locker() && self->is_lock_owned((address)mark->locker())) { // Degenerate notify // stack-locked by caller so by definition the implied waitset is empty.
*** 208,217 **** --- 222,232 ---- assert(!SafepointSynchronize::is_at_safepoint(), "invariant"); assert(Self->is_Java_thread(), "invariant"); assert(((JavaThread *) Self)->thread_state() == _thread_in_Java, "invariant"); NoSafepointVerifier nsv; if (obj == NULL) return false; // Need to throw NPE + assert(!EnableValhalla || !obj->klass()->is_value(), "monitor op on value type"); const markOop mark = obj->mark(); if (mark->has_monitor()) { ObjectMonitor * const m = mark->monitor(); assert(oopDesc::equals((oop) m->object(), obj), "invariant");
*** 263,272 **** --- 278,288 ---- // if the following function is changed. The implementation is // extremely sensitive to race condition. Be careful. void ObjectSynchronizer::fast_enter(Handle obj, BasicLock* lock, bool attempt_rebias, TRAPS) { + CHECK_THROW_NOSYNC_IMSE(obj); if (UseBiasedLocking) { if (!SafepointSynchronize::is_at_safepoint()) { BiasedLocking::Condition cond = BiasedLocking::revoke_and_rebias(obj, attempt_rebias, THREAD); if (cond == BiasedLocking::BIAS_REVOKED_AND_REBIASED) { return;
*** 281,290 **** --- 297,310 ---- slow_enter(obj, lock, THREAD); } void ObjectSynchronizer::fast_exit(oop object, BasicLock* lock, TRAPS) { markOop mark = object->mark(); + if (EnableValhalla && mark->is_always_locked()) { + return; + } + assert(!EnableValhalla || !object->klass()->is_value(), "monitor op on value type"); // We cannot check for Biased Locking if we are racing an inflation. assert(mark == markOopDesc::INFLATING() || !mark->has_bias_pattern(), "should not see bias pattern here"); markOop dhw = lock->displaced_header();
*** 334,343 **** --- 354,364 ---- // Interpreter/Compiler Slow Case // This routine is used to handle interpreter/compiler slow case // We don't need to use fast path here, because it must have been // failed in the interpreter/compiler code. void ObjectSynchronizer::slow_enter(Handle obj, BasicLock* lock, TRAPS) { + CHECK_THROW_NOSYNC_IMSE(obj); markOop mark = obj->mark(); assert(!mark->has_bias_pattern(), "should not see bias pattern here"); if (mark->is_neutral()) { // Anticipate successful CAS -- the ST of the displaced mark must
*** 382,391 **** --- 403,413 ---- // 3) when notified on lock2, unlock lock2 // 4) reenter lock1 with original recursion count // 5) lock lock2 // NOTE: must use heavy weight monitor to handle complete_exit/reenter() intptr_t ObjectSynchronizer::complete_exit(Handle obj, TRAPS) { + assert(!EnableValhalla || !obj->klass()->is_value(), "monitor op on value type"); if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(obj, false, THREAD); assert(!obj->mark()->has_bias_pattern(), "biases should be revoked by now"); }
*** 394,403 **** --- 416,426 ---- return monitor->complete_exit(THREAD); } // NOTE: must use heavy weight monitor to handle complete_exit/reenter() void ObjectSynchronizer::reenter(Handle obj, intptr_t recursion, TRAPS) { + assert(!EnableValhalla || !obj->klass()->is_value(), "monitor op on value type"); if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(obj, false, THREAD); assert(!obj->mark()->has_bias_pattern(), "biases should be revoked by now"); }
*** 408,417 **** --- 431,441 ---- // ----------------------------------------------------------------------------- // JNI locks on java objects // NOTE: must use heavy weight monitor to handle jni monitor enter void ObjectSynchronizer::jni_enter(Handle obj, TRAPS) { // the current locking is from JNI instead of Java code + CHECK_THROW_NOSYNC_IMSE(obj); if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(obj, false, THREAD); assert(!obj->mark()->has_bias_pattern(), "biases should be revoked by now"); } THREAD->set_current_pending_monitor_is_from_java(false);
*** 419,428 **** --- 443,453 ---- THREAD->set_current_pending_monitor_is_from_java(true); } // NOTE: must use heavy weight monitor to handle jni monitor exit void ObjectSynchronizer::jni_exit(oop obj, Thread* THREAD) { + CHECK_THROW_NOSYNC_IMSE(obj); if (UseBiasedLocking) { Handle h_obj(THREAD, obj); BiasedLocking::revoke_and_rebias(h_obj, false, THREAD); obj = h_obj(); }
*** 459,468 **** --- 484,494 ---- // ----------------------------------------------------------------------------- // Wait/Notify/NotifyAll // NOTE: must use heavy weight monitor to handle wait() int ObjectSynchronizer::wait(Handle obj, jlong millis, TRAPS) { + CHECK_THROW_NOSYNC_IMSE_0(obj); if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(obj, false, THREAD); assert(!obj->mark()->has_bias_pattern(), "biases should be revoked by now"); } if (millis < 0) {
*** 479,488 **** --- 505,515 ---- // DTRACE_MONITOR_PROBE(waited, monitor, obj(), THREAD); return dtrace_waited_probe(monitor, obj, THREAD); } void ObjectSynchronizer::waitUninterruptibly(Handle obj, jlong millis, TRAPS) { + CHECK_THROW_NOSYNC_IMSE(obj); if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(obj, false, THREAD); assert(!obj->mark()->has_bias_pattern(), "biases should be revoked by now"); } if (millis < 0) {
*** 490,499 **** --- 517,527 ---- } inflate(THREAD, obj(), inflate_cause_wait)->wait(millis, false, THREAD); } void ObjectSynchronizer::notify(Handle obj, TRAPS) { + CHECK_THROW_NOSYNC_IMSE(obj); if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(obj, false, THREAD); assert(!obj->mark()->has_bias_pattern(), "biases should be revoked by now"); }
*** 504,513 **** --- 532,542 ---- inflate(THREAD, obj(), inflate_cause_notify)->notify(THREAD); } // NOTE: see comment of notify() void ObjectSynchronizer::notifyall(Handle obj, TRAPS) { + CHECK_THROW_NOSYNC_IMSE(obj); if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(obj, false, THREAD); assert(!obj->mark()->has_bias_pattern(), "biases should be revoked by now"); }
*** 677,686 **** --- 706,723 ---- assert(value != markOopDesc::no_hash, "invariant"); return value; } intptr_t ObjectSynchronizer::FastHashCode(Thread * Self, oop obj) { + if (EnableValhalla && obj->klass()->is_value()) { + // Expected tooling to override hashCode for value type, just don't crash + if (log_is_enabled(Debug, monitorinflation)) { + ResourceMark rm; + log_debug(monitorinflation)("FastHashCode for value type: %s", obj->klass()->external_name()); + } + return obj->klass()->java_mirror()->identity_hash(); + } if (UseBiasedLocking) { // NOTE: many places throughout the JVM do not expect a safepoint // to be taken here, in particular most operations on perm gen // objects. However, we only ever bias Java instances and all of // the call sites of identity_hash that might revoke biases have
*** 781,799 **** } // We finally get the hash return hash; } - // Deprecated -- use FastHashCode() instead. - - intptr_t ObjectSynchronizer::identity_hash_value_for(Handle obj) { - return FastHashCode(Thread::current(), obj()); - } - bool ObjectSynchronizer::current_thread_holds_lock(JavaThread* thread, Handle h_obj) { if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(h_obj, false, thread); assert(!h_obj->mark()->has_bias_pattern(), "biases should be revoked by now"); } --- 818,833 ---- } // We finally get the hash return hash; } bool ObjectSynchronizer::current_thread_holds_lock(JavaThread* thread, Handle h_obj) { + if (EnableValhalla && h_obj->mark()->is_always_locked()) { + return false; + } if (UseBiasedLocking) { BiasedLocking::revoke_and_rebias(h_obj, false, thread); assert(!h_obj->mark()->has_bias_pattern(), "biases should be revoked by now"); }
*** 1300,1309 **** --- 1334,1347 ---- // Inflate mutates the heap ... // Relaxing assertion for bug 6320749. assert(Universe::verify_in_progress() || !SafepointSynchronize::is_at_safepoint(), "invariant"); + if (EnableValhalla) { + guarantee(!object->klass()->is_value(), "Attempt to inflate value type"); + } + EventJavaMonitorInflate event; for (;;) { const markOop mark = object->mark(); assert(!mark->has_bias_pattern(), "invariant");
< prev index next >