< prev index next >

src/share/vm/code/codeCache.cpp

Print this page

        

*** 1139,1149 **** return false; #endif } #ifdef HOTSWAP ! int CodeCache::mark_for_evol_deoptimization(instanceKlassHandle dependee) { MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); int number_of_marked_CodeBlobs = 0; // Deoptimize all methods of the evolving class itself Array<Method*>* old_methods = dependee->methods(); --- 1139,1149 ---- return false; #endif } #ifdef HOTSWAP ! int CodeCache::mark_for_evol_deoptimization(InstanceKlass* dependee) { MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); int number_of_marked_CodeBlobs = 0; // Deoptimize all methods of the evolving class itself Array<Method*>* old_methods = dependee->methods();
*** 1160,1170 **** CompiledMethodIterator iter; while(iter.next_alive()) { CompiledMethod* nm = iter.method(); if (nm->is_marked_for_deoptimization()) { // ...Already marked in the previous pass; don't count it again. ! } else if (nm->is_evol_dependent_on(dependee())) { ResourceMark rm; nm->mark_for_deoptimization(); number_of_marked_CodeBlobs++; } else { // flush caches in case they refer to a redefined Method* --- 1160,1170 ---- CompiledMethodIterator iter; while(iter.next_alive()) { CompiledMethod* nm = iter.method(); if (nm->is_marked_for_deoptimization()) { // ...Already marked in the previous pass; don't count it again. ! } else if (nm->is_evol_dependent_on(dependee)) { ResourceMark rm; nm->mark_for_deoptimization(); number_of_marked_CodeBlobs++; } else { // flush caches in case they refer to a redefined Method*
*** 1216,1226 **** } } } // Flushes compiled methods dependent on dependee. ! void CodeCache::flush_dependents_on(instanceKlassHandle dependee) { assert_lock_strong(Compile_lock); if (number_of_nmethods_with_dependencies() == 0) return; // CodeCache can only be updated by a thread_in_VM and they will all be --- 1216,1226 ---- } } } // Flushes compiled methods dependent on dependee. ! void CodeCache::flush_dependents_on(InstanceKlass* dependee) { assert_lock_strong(Compile_lock); if (number_of_nmethods_with_dependencies() == 0) return; // CodeCache can only be updated by a thread_in_VM and they will all be
*** 1237,1257 **** } } #ifdef HOTSWAP // Flushes compiled methods dependent on dependee in the evolutionary sense ! void CodeCache::flush_evol_dependents_on(instanceKlassHandle ev_k_h) { // --- Compile_lock is not held. However we are at a safepoint. assert_locked_or_safepoint(Compile_lock); if (number_of_nmethods_with_dependencies() == 0 && !UseAOT) return; // CodeCache can only be updated by a thread_in_VM and they will all be // stopped during the safepoint so CodeCache will be safe to update without // holding the CodeCache_lock. // Compute the dependent nmethods ! if (mark_for_evol_deoptimization(ev_k_h) > 0) { // At least one nmethod has been marked for deoptimization // All this already happens inside a VM_Operation, so we'll do all the work here. // Stuff copied from VM_Deoptimize and modified slightly. --- 1237,1257 ---- } } #ifdef HOTSWAP // Flushes compiled methods dependent on dependee in the evolutionary sense ! void CodeCache::flush_evol_dependents_on(InstanceKlass* ev_k) { // --- Compile_lock is not held. However we are at a safepoint. assert_locked_or_safepoint(Compile_lock); if (number_of_nmethods_with_dependencies() == 0 && !UseAOT) return; // CodeCache can only be updated by a thread_in_VM and they will all be // stopped during the safepoint so CodeCache will be safe to update without // holding the CodeCache_lock. // Compute the dependent nmethods ! if (mark_for_evol_deoptimization(ev_k) > 0) { // At least one nmethod has been marked for deoptimization // All this already happens inside a VM_Operation, so we'll do all the work here. // Stuff copied from VM_Deoptimize and modified slightly.
< prev index next >