< prev index next >

src/hotspot/share/code/codeCache.cpp

Print this page
rev 54697 : imported patch 8221734-v2-merge


1120     if (!nm->method()->is_method_handle_intrinsic()) {
1121       nm->mark_for_deoptimization();
1122       if (nm->has_evol_metadata()) {
1123         add_to_old_table(nm);
1124       }
1125     }
1126   }
1127 }
1128 
1129 // Flushes compiled methods dependent on redefined classes, that have already been
1130 // marked for deoptimization.
1131 void CodeCache::flush_evol_dependents() {
1132   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1133 
1134   // CodeCache can only be updated by a thread_in_VM and they will all be
1135   // stopped during the safepoint so CodeCache will be safe to update without
1136   // holding the CodeCache_lock.
1137 
1138   // At least one nmethod has been marked for deoptimization
1139 
1140   // All this already happens inside a VM_Operation, so we'll do all the work here.
1141   // Stuff copied from VM_Deoptimize and modified slightly.
1142 
1143   // We do not want any GCs to happen while we are in the middle of this VM operation
1144   ResourceMark rm;
1145   DeoptimizationMarker dm;
1146 
1147   // Deoptimize all activations depending on marked nmethods
1148   Deoptimization::deoptimize_dependents();
1149 
1150   // Make the dependent methods not entrant
1151   make_marked_nmethods_not_entrant();
1152 }
1153 #endif // INCLUDE_JVMTI
1154 
1155 // Deoptimize all methods
1156 void CodeCache::mark_all_nmethods_for_deoptimization() {
1157   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1158   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1159   while(iter.next()) {
1160     CompiledMethod* nm = iter.method();
1161     if (!nm->method()->is_method_handle_intrinsic()) {
1162       nm->mark_for_deoptimization();
1163     }
1164   }
1165 }
1166 
1167 int CodeCache::mark_for_deoptimization(Method* dependee) {
1168   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1169   int number_of_marked_CodeBlobs = 0;
1170 
1171   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);


1190       nm->make_not_entrant();
1191     }
1192   }
1193 }
1194 
1195 // Flushes compiled methods dependent on dependee.
1196 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1197   assert_lock_strong(Compile_lock);
1198 
1199   if (number_of_nmethods_with_dependencies() == 0) return;
1200 
1201   // CodeCache can only be updated by a thread_in_VM and they will all be
1202   // stopped during the safepoint so CodeCache will be safe to update without
1203   // holding the CodeCache_lock.
1204 
1205   KlassDepChange changes(dependee);
1206 
1207   // Compute the dependent nmethods
1208   if (mark_for_deoptimization(changes) > 0) {
1209     // At least one nmethod has been marked for deoptimization
1210     VM_Deoptimize op;
1211     VMThread::execute(&op);
1212   }
1213 }
1214 
1215 // Flushes compiled methods dependent on dependee
1216 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1217   // --- Compile_lock is not held. However we are at a safepoint.
1218   assert_locked_or_safepoint(Compile_lock);
1219 
1220   // CodeCache can only be updated by a thread_in_VM and they will all be
1221   // stopped dring the safepoint so CodeCache will be safe to update without
1222   // holding the CodeCache_lock.
1223 
1224   // Compute the dependent nmethods
1225   if (mark_for_deoptimization(m_h()) > 0) {
1226     // At least one nmethod has been marked for deoptimization
1227 
1228     // All this already happens inside a VM_Operation, so we'll do all the work here.
1229     // Stuff copied from VM_Deoptimize and modified slightly.
1230 
1231     // We do not want any GCs to happen while we are in the middle of this VM operation
1232     ResourceMark rm;
1233     DeoptimizationMarker dm;
1234 
1235     // Deoptimize all activations depending on marked nmethods
1236     Deoptimization::deoptimize_dependents();
1237 
1238     // Make the dependent methods not entrant
1239     make_marked_nmethods_not_entrant();
1240   }
1241 }
1242 
1243 void CodeCache::verify() {
1244   assert_locked_or_safepoint(CodeCache_lock);
1245   FOR_ALL_HEAPS(heap) {
1246     (*heap)->verify();
1247     FOR_ALL_BLOBS(cb, *heap) {
1248       if (cb->is_alive()) {
1249         cb->verify();
1250       }
1251     }
1252   }
1253 }
1254 
1255 // A CodeHeap is full. Print out warning and report event.
1256 PRAGMA_DIAG_PUSH
1257 PRAGMA_FORMAT_NONLITERAL_IGNORED
1258 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1259   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event




1120     if (!nm->method()->is_method_handle_intrinsic()) {
1121       nm->mark_for_deoptimization();
1122       if (nm->has_evol_metadata()) {
1123         add_to_old_table(nm);
1124       }
1125     }
1126   }
1127 }
1128 
1129 // Flushes compiled methods dependent on redefined classes, that have already been
1130 // marked for deoptimization.
1131 void CodeCache::flush_evol_dependents() {
1132   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1133 
1134   // CodeCache can only be updated by a thread_in_VM and they will all be
1135   // stopped during the safepoint so CodeCache will be safe to update without
1136   // holding the CodeCache_lock.
1137 
1138   // At least one nmethod has been marked for deoptimization
1139 
1140   Deoptimization::deoptimize_all_marked();











1141 }
1142 #endif // INCLUDE_JVMTI
1143 
1144 // Deoptimize all methods
1145 void CodeCache::mark_all_nmethods_for_deoptimization() {
1146   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1147   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1148   while(iter.next()) {
1149     CompiledMethod* nm = iter.method();
1150     if (!nm->method()->is_method_handle_intrinsic()) {
1151       nm->mark_for_deoptimization();
1152     }
1153   }
1154 }
1155 
1156 int CodeCache::mark_for_deoptimization(Method* dependee) {
1157   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1158   int number_of_marked_CodeBlobs = 0;
1159 
1160   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);


1179       nm->make_not_entrant();
1180     }
1181   }
1182 }
1183 
1184 // Flushes compiled methods dependent on dependee.
1185 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1186   assert_lock_strong(Compile_lock);
1187 
1188   if (number_of_nmethods_with_dependencies() == 0) return;
1189 
1190   // CodeCache can only be updated by a thread_in_VM and they will all be
1191   // stopped during the safepoint so CodeCache will be safe to update without
1192   // holding the CodeCache_lock.
1193 
1194   KlassDepChange changes(dependee);
1195 
1196   // Compute the dependent nmethods
1197   if (mark_for_deoptimization(changes) > 0) {
1198     // At least one nmethod has been marked for deoptimization
1199     Deoptimization::deoptimize_all_marked();

1200   }
1201 }
1202 
1203 // Flushes compiled methods dependent on dependee
1204 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1205   // --- Compile_lock is not held. However we are at a safepoint.
1206   assert_locked_or_safepoint(Compile_lock);
1207 
1208   // CodeCache can only be updated by a thread_in_VM and they will all be
1209   // stopped dring the safepoint so CodeCache will be safe to update without
1210   // holding the CodeCache_lock.
1211 
1212   // Compute the dependent nmethods
1213   if (mark_for_deoptimization(m_h()) > 0) {
1214     Deoptimization::deoptimize_all_marked();













1215   }
1216 }
1217 
1218 void CodeCache::verify() {
1219   assert_locked_or_safepoint(CodeCache_lock);
1220   FOR_ALL_HEAPS(heap) {
1221     (*heap)->verify();
1222     FOR_ALL_BLOBS(cb, *heap) {
1223       if (cb->is_alive()) {
1224         cb->verify();
1225       }
1226     }
1227   }
1228 }
1229 
1230 // A CodeHeap is full. Print out warning and report event.
1231 PRAGMA_DIAG_PUSH
1232 PRAGMA_FORMAT_NONLITERAL_IGNORED
1233 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1234   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event


< prev index next >