< prev index next >

src/hotspot/share/code/codeCache.cpp

Print this page
rev 54838 : [mq]: 8221734-v2
rev 54839 : [mq]: 8221734-v3
rev 54840 : [mq]: 8221734-v3-stress-test

*** 1136,1167 **** // stopped during the safepoint so CodeCache will be safe to update without // holding the CodeCache_lock. // At least one nmethod has been marked for deoptimization ! // All this already happens inside a VM_Operation, so we'll do all the work here. ! // Stuff copied from VM_Deoptimize and modified slightly. ! ! // We do not want any GCs to happen while we are in the middle of this VM operation ! ResourceMark rm; ! DeoptimizationMarker dm; ! ! // Deoptimize all activations depending on marked nmethods ! Deoptimization::deoptimize_dependents(); ! ! // Make the dependent methods not entrant ! make_marked_nmethods_not_entrant(); } #endif // INCLUDE_JVMTI ! // Deoptimize all methods void CodeCache::mark_all_nmethods_for_deoptimization() { MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading); while(iter.next()) { CompiledMethod* nm = iter.method(); ! if (!nm->method()->is_method_handle_intrinsic()) { nm->mark_for_deoptimization(); } } } --- 1136,1164 ---- // stopped during the safepoint so CodeCache will be safe to update without // holding the CodeCache_lock. // At least one nmethod has been marked for deoptimization ! Deoptimization::deoptimize_all_marked(); } #endif // INCLUDE_JVMTI ! // Deoptimize all(most) methods void CodeCache::mark_all_nmethods_for_deoptimization() { MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading); while(iter.next()) { CompiledMethod* nm = iter.method(); ! // Not installed are unsafe to mark for deopt, normally never deopted. ! // A not_entrant method may become a zombie at any time, ! // since we don't know on which side of last safepoint it became not_entrant ! // (state must be in_use). ! // Native method are unsafe to mark for deopt, normally never deopted. ! if (!nm->method()->is_method_handle_intrinsic() && ! !nm->is_not_installed() && ! nm->is_in_use() && ! !nm->is_native_method()) { nm->mark_for_deoptimization(); } } }
*** 1185,1195 **** void CodeCache::make_marked_nmethods_not_entrant() { assert_locked_or_safepoint(CodeCache_lock); CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading); while(iter.next()) { CompiledMethod* nm = iter.method(); ! if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) { nm->make_not_entrant(); } } } --- 1182,1197 ---- void CodeCache::make_marked_nmethods_not_entrant() { assert_locked_or_safepoint(CodeCache_lock); CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading); while(iter.next()) { CompiledMethod* nm = iter.method(); ! // only_alive_and_not_unloading returns not_entrant nmethods. ! // A not_entrant can become a zombie at anytime, ! // if it was made not_entrant before previous safepoint/handshake. ! // We check that it is not not_entrant and not zombie, ! // by checking is_in_use(). ! if (nm->is_marked_for_deoptimization() && nm->is_in_use()) { nm->make_not_entrant(); } } }
*** 1197,1245 **** void CodeCache::flush_dependents_on(InstanceKlass* dependee) { assert_lock_strong(Compile_lock); if (number_of_nmethods_with_dependencies() == 0) return; - // CodeCache can only be updated by a thread_in_VM and they will all be - // stopped during the safepoint so CodeCache will be safe to update without - // holding the CodeCache_lock. - KlassDepChange changes(dependee); // Compute the dependent nmethods if (mark_for_deoptimization(changes) > 0) { // At least one nmethod has been marked for deoptimization ! VM_Deoptimize op; ! VMThread::execute(&op); } } // Flushes compiled methods dependent on dependee void CodeCache::flush_dependents_on_method(const methodHandle& m_h) { // --- Compile_lock is not held. However we are at a safepoint. assert_locked_or_safepoint(Compile_lock); - // CodeCache can only be updated by a thread_in_VM and they will all be - // stopped dring the safepoint so CodeCache will be safe to update without - // holding the CodeCache_lock. - // Compute the dependent nmethods if (mark_for_deoptimization(m_h()) > 0) { ! // At least one nmethod has been marked for deoptimization ! ! // All this already happens inside a VM_Operation, so we'll do all the work here. ! // Stuff copied from VM_Deoptimize and modified slightly. ! ! // We do not want any GCs to happen while we are in the middle of this VM operation ! ResourceMark rm; ! DeoptimizationMarker dm; ! ! // Deoptimize all activations depending on marked nmethods ! Deoptimization::deoptimize_dependents(); ! ! // Make the dependent methods not entrant ! make_marked_nmethods_not_entrant(); } } void CodeCache::verify() { assert_locked_or_safepoint(CodeCache_lock); --- 1199,1225 ---- void CodeCache::flush_dependents_on(InstanceKlass* dependee) { assert_lock_strong(Compile_lock); if (number_of_nmethods_with_dependencies() == 0) return; KlassDepChange changes(dependee); // Compute the dependent nmethods if (mark_for_deoptimization(changes) > 0) { // At least one nmethod has been marked for deoptimization ! Deoptimization::deoptimize_all_marked(); } } // Flushes compiled methods dependent on dependee void CodeCache::flush_dependents_on_method(const methodHandle& m_h) { // --- Compile_lock is not held. However we are at a safepoint. assert_locked_or_safepoint(Compile_lock); // Compute the dependent nmethods if (mark_for_deoptimization(m_h()) > 0) { ! Deoptimization::deoptimize_all_marked(); } } void CodeCache::verify() { assert_locked_or_safepoint(CodeCache_lock);
< prev index next >