< prev index next >

src/hotspot/share/code/codeCache.cpp

Print this page
rev 54936 : [mq]: 8221734-v3
rev 54937 : [mq]: 8221734-v5


1125     if (!nm->method()->is_method_handle_intrinsic()) {
1126       nm->mark_for_deoptimization();
1127       if (nm->has_evol_metadata()) {
1128         add_to_old_table(nm);
1129       }
1130     }
1131   }
1132 }
1133 
1134 // Flushes compiled methods dependent on redefined classes, that have already been
1135 // marked for deoptimization.
1136 void CodeCache::flush_evol_dependents() {
1137   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1138 
1139   // CodeCache can only be updated by a thread_in_VM and they will all be
1140   // stopped during the safepoint so CodeCache will be safe to update without
1141   // holding the CodeCache_lock.
1142 
1143   // At least one nmethod has been marked for deoptimization
1144 
1145   // All this already happens inside a VM_Operation, so we'll do all the work here.
1146   // Stuff copied from VM_Deoptimize and modified slightly.
1147 
1148   // We do not want any GCs to happen while we are in the middle of this VM operation
1149   ResourceMark rm;
1150   DeoptimizationMarker dm;
1151 
1152   // Deoptimize all activations depending on marked nmethods
1153   Deoptimization::deoptimize_dependents();
1154 
1155   // Make the dependent methods not entrant
1156   make_marked_nmethods_not_entrant();
1157 }
1158 #endif // INCLUDE_JVMTI
1159 
1160 // Deoptimize all methods
1161 void CodeCache::mark_all_nmethods_for_deoptimization() {
1162   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1163   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1164   while(iter.next()) {
1165     CompiledMethod* nm = iter.method();
1166     if (!nm->method()->is_method_handle_intrinsic()) {








1167       nm->mark_for_deoptimization();
1168     }
1169   }
1170 }
1171 
1172 int CodeCache::mark_for_deoptimization(Method* dependee) {
1173   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1174   int number_of_marked_CodeBlobs = 0;
1175 
1176   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1177   while(iter.next()) {
1178     CompiledMethod* nm = iter.method();
1179     if (nm->is_dependent_on_method(dependee)) {
1180       ResourceMark rm;
1181       nm->mark_for_deoptimization();
1182       number_of_marked_CodeBlobs++;
1183     }
1184   }
1185 
1186   return number_of_marked_CodeBlobs;
1187 }
1188 
1189 void CodeCache::make_marked_nmethods_not_entrant() {
1190   assert_locked_or_safepoint(CodeCache_lock);
1191   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1192   while(iter.next()) {
1193     CompiledMethod* nm = iter.method();
1194     if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {





1195       nm->make_not_entrant();
1196     }
1197   }
1198 }
1199 
1200 // Flushes compiled methods dependent on dependee.
1201 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1202   assert_lock_strong(Compile_lock);
1203 
1204   if (number_of_nmethods_with_dependencies() == 0) return;
1205 
1206   // CodeCache can only be updated by a thread_in_VM and they will all be
1207   // stopped during the safepoint so CodeCache will be safe to update without
1208   // holding the CodeCache_lock.
1209 
1210   KlassDepChange changes(dependee);
1211 
1212   // Compute the dependent nmethods
1213   if (mark_for_deoptimization(changes) > 0) {
1214     // At least one nmethod has been marked for deoptimization
1215     VM_Deoptimize op;
1216     VMThread::execute(&op);
1217   }
1218 }
1219 
1220 // Flushes compiled methods dependent on dependee
1221 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1222   // --- Compile_lock is not held. However we are at a safepoint.
1223   assert_locked_or_safepoint(Compile_lock);
1224 
1225   // CodeCache can only be updated by a thread_in_VM and they will all be
1226   // stopped dring the safepoint so CodeCache will be safe to update without
1227   // holding the CodeCache_lock.
1228 
1229   // Compute the dependent nmethods
1230   if (mark_for_deoptimization(m_h()) > 0) {
1231     // At least one nmethod has been marked for deoptimization
1232 
1233     // All this already happens inside a VM_Operation, so we'll do all the work here.
1234     // Stuff copied from VM_Deoptimize and modified slightly.
1235 
1236     // We do not want any GCs to happen while we are in the middle of this VM operation
1237     ResourceMark rm;
1238     DeoptimizationMarker dm;
1239 
1240     // Deoptimize all activations depending on marked nmethods
1241     Deoptimization::deoptimize_dependents();
1242 
1243     // Make the dependent methods not entrant
1244     make_marked_nmethods_not_entrant();
1245   }
1246 }
1247 
1248 void CodeCache::verify() {
1249   assert_locked_or_safepoint(CodeCache_lock);
1250   FOR_ALL_HEAPS(heap) {
1251     (*heap)->verify();
1252     FOR_ALL_BLOBS(cb, *heap) {
1253       if (cb->is_alive()) {
1254         cb->verify();
1255       }
1256     }
1257   }
1258 }
1259 
1260 // A CodeHeap is full. Print out warning and report event.
1261 PRAGMA_DIAG_PUSH
1262 PRAGMA_FORMAT_NONLITERAL_IGNORED
1263 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1264   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event




1125     if (!nm->method()->is_method_handle_intrinsic()) {
1126       nm->mark_for_deoptimization();
1127       if (nm->has_evol_metadata()) {
1128         add_to_old_table(nm);
1129       }
1130     }
1131   }
1132 }
1133 
1134 // Flushes compiled methods dependent on redefined classes, that have already been
1135 // marked for deoptimization.
1136 void CodeCache::flush_evol_dependents() {
1137   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1138 
1139   // CodeCache can only be updated by a thread_in_VM and they will all be
1140   // stopped during the safepoint so CodeCache will be safe to update without
1141   // holding the CodeCache_lock.
1142 
1143   // At least one nmethod has been marked for deoptimization
1144 
1145   Deoptimization::deoptimize_all_marked();











1146 }
1147 #endif // INCLUDE_JVMTI
1148 
1149 // Mark methods for deopt (if safe or possible).
1150 void CodeCache::mark_all_nmethods_for_deoptimization() {
1151   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1152   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1153   while(iter.next()) {
1154     CompiledMethod* nm = iter.method();
1155     if (!nm->method()->is_method_handle_intrinsic() &&
1156         !nm->is_not_installed() &&
1157         nm->is_in_use() &&
1158         !nm->is_native_method()) {
1159       // Intrinsics and native methods are never deopted. A method that is
1160       // not installed yet or is not in use is not safe to deopt; the
1161       // is_in_use() check covers the not_entrant and not zombie cases.
1162       // Note: A not_entrant method can become a zombie at anytime if it was
1163       // made not_entrant before the previous safepoint/handshake.
1164       nm->mark_for_deoptimization();
1165     }
1166   }
1167 }
1168 
1169 int CodeCache::mark_for_deoptimization(Method* dependee) {
1170   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1171   int number_of_marked_CodeBlobs = 0;
1172 
1173   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1174   while(iter.next()) {
1175     CompiledMethod* nm = iter.method();
1176     if (nm->is_dependent_on_method(dependee)) {
1177       ResourceMark rm;
1178       nm->mark_for_deoptimization();
1179       number_of_marked_CodeBlobs++;
1180     }
1181   }
1182 
1183   return number_of_marked_CodeBlobs;
1184 }
1185 
1186 void CodeCache::make_marked_nmethods_not_entrant() {
1187   assert_locked_or_safepoint(CodeCache_lock);
1188   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1189   while(iter.next()) {
1190     CompiledMethod* nm = iter.method();
1191     if (nm->is_marked_for_deoptimization() && nm->is_in_use()) {
1192       // only_alive_and_not_unloading() can return not_entrant nmethods.
1193       // A not_entrant method can become a zombie at anytime if it was
1194       // made not_entrant before the previous safepoint/handshake. The
1195       // is_in_use() check covers the not_entrant and not zombie cases
1196       // that have become true after the method was marked for deopt.
1197       nm->make_not_entrant();
1198     }
1199   }
1200 }
1201 
1202 // Flushes compiled methods dependent on dependee.
1203 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1204   assert_lock_strong(Compile_lock);
1205 
1206   if (number_of_nmethods_with_dependencies() == 0) return;
1207 




1208   KlassDepChange changes(dependee);
1209 
1210   // Compute the dependent nmethods
1211   if (mark_for_deoptimization(changes) > 0) {
1212     // At least one nmethod has been marked for deoptimization
1213     Deoptimization::deoptimize_all_marked();

1214   }
1215 }
1216 
1217 // Flushes compiled methods dependent on dependee
1218 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1219   // --- Compile_lock is not held. However we are at a safepoint.
1220   assert_locked_or_safepoint(Compile_lock);
1221 




1222   // Compute the dependent nmethods
1223   if (mark_for_deoptimization(m_h()) > 0) {
1224     Deoptimization::deoptimize_all_marked();













1225   }
1226 }
1227 
1228 void CodeCache::verify() {
1229   assert_locked_or_safepoint(CodeCache_lock);
1230   FOR_ALL_HEAPS(heap) {
1231     (*heap)->verify();
1232     FOR_ALL_BLOBS(cb, *heap) {
1233       if (cb->is_alive()) {
1234         cb->verify();
1235       }
1236     }
1237   }
1238 }
1239 
1240 // A CodeHeap is full. Print out warning and report event.
1241 PRAGMA_DIAG_PUSH
1242 PRAGMA_FORMAT_NONLITERAL_IGNORED
1243 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1244   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event


< prev index next >