< prev index next >

src/hotspot/share/code/codeCache.cpp

Print this page
rev 54838 : [mq]: 8221734-v2
rev 54839 : [mq]: 8221734-v3
rev 54840 : [mq]: 8221734-v3-stress-test


1121     if (!nm->method()->is_method_handle_intrinsic()) {
1122       nm->mark_for_deoptimization();
1123       if (nm->has_evol_metadata()) {
1124         add_to_old_table(nm);
1125       }
1126     }
1127   }
1128 }
1129 
1130 // Flushes compiled methods dependent on redefined classes, that have already been
1131 // marked for deoptimization.
1132 void CodeCache::flush_evol_dependents() {
1133   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1134 
1135   // CodeCache can only be updated by a thread_in_VM and they will all be
1136   // stopped during the safepoint so CodeCache will be safe to update without
1137   // holding the CodeCache_lock.
1138 
1139   // At least one nmethod has been marked for deoptimization
1140 
1141   // All this already happens inside a VM_Operation, so we'll do all the work here.
1142   // Stuff copied from VM_Deoptimize and modified slightly.
1143 
1144   // We do not want any GCs to happen while we are in the middle of this VM operation
1145   ResourceMark rm;
1146   DeoptimizationMarker dm;
1147 
1148   // Deoptimize all activations depending on marked nmethods
1149   Deoptimization::deoptimize_dependents();
1150 
1151   // Make the dependent methods not entrant
1152   make_marked_nmethods_not_entrant();
1153 }
1154 #endif // INCLUDE_JVMTI
1155 
1156 // Deoptimize all methods
1157 void CodeCache::mark_all_nmethods_for_deoptimization() {
1158   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1159   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1160   while(iter.next()) {
1161     CompiledMethod* nm = iter.method();
1162     if (!nm->method()->is_method_handle_intrinsic()) {








1163       nm->mark_for_deoptimization();
1164     }
1165   }
1166 }
1167 
1168 int CodeCache::mark_for_deoptimization(Method* dependee) {
1169   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1170   int number_of_marked_CodeBlobs = 0;
1171 
1172   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1173   while(iter.next()) {
1174     CompiledMethod* nm = iter.method();
1175     if (nm->is_dependent_on_method(dependee)) {
1176       ResourceMark rm;
1177       nm->mark_for_deoptimization();
1178       number_of_marked_CodeBlobs++;
1179     }
1180   }
1181 
1182   return number_of_marked_CodeBlobs;
1183 }
1184 
1185 void CodeCache::make_marked_nmethods_not_entrant() {
1186   assert_locked_or_safepoint(CodeCache_lock);
1187   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1188   while(iter.next()) {
1189     CompiledMethod* nm = iter.method();
1190     if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {





1191       nm->make_not_entrant();
1192     }
1193   }
1194 }
1195 
1196 // Flushes compiled methods dependent on dependee.
1197 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1198   assert_lock_strong(Compile_lock);
1199 
1200   if (number_of_nmethods_with_dependencies() == 0) return;
1201 
1202   // CodeCache can only be updated by a thread_in_VM and they will all be
1203   // stopped during the safepoint so CodeCache will be safe to update without
1204   // holding the CodeCache_lock.
1205 
1206   KlassDepChange changes(dependee);
1207 
1208   // Compute the dependent nmethods
1209   if (mark_for_deoptimization(changes) > 0) {
1210     // At least one nmethod has been marked for deoptimization
1211     VM_Deoptimize op;
1212     VMThread::execute(&op);
1213   }
1214 }
1215 
1216 // Flushes compiled methods dependent on dependee
1217 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1218   // --- Compile_lock is not held. However we are at a safepoint.
1219   assert_locked_or_safepoint(Compile_lock);
1220 
1221   // CodeCache can only be updated by a thread_in_VM and they will all be
1222   // stopped dring the safepoint so CodeCache will be safe to update without
1223   // holding the CodeCache_lock.
1224 
1225   // Compute the dependent nmethods
1226   if (mark_for_deoptimization(m_h()) > 0) {
1227     // At least one nmethod has been marked for deoptimization
1228 
1229     // All this already happens inside a VM_Operation, so we'll do all the work here.
1230     // Stuff copied from VM_Deoptimize and modified slightly.
1231 
1232     // We do not want any GCs to happen while we are in the middle of this VM operation
1233     ResourceMark rm;
1234     DeoptimizationMarker dm;
1235 
1236     // Deoptimize all activations depending on marked nmethods
1237     Deoptimization::deoptimize_dependents();
1238 
1239     // Make the dependent methods not entrant
1240     make_marked_nmethods_not_entrant();
1241   }
1242 }
1243 
1244 void CodeCache::verify() {
1245   assert_locked_or_safepoint(CodeCache_lock);
1246   FOR_ALL_HEAPS(heap) {
1247     (*heap)->verify();
1248     FOR_ALL_BLOBS(cb, *heap) {
1249       if (cb->is_alive()) {
1250         cb->verify();
1251       }
1252     }
1253   }
1254 }
1255 
1256 // A CodeHeap is full. Print out warning and report event.
1257 PRAGMA_DIAG_PUSH
1258 PRAGMA_FORMAT_NONLITERAL_IGNORED
1259 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1260   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event




1121     if (!nm->method()->is_method_handle_intrinsic()) {
1122       nm->mark_for_deoptimization();
1123       if (nm->has_evol_metadata()) {
1124         add_to_old_table(nm);
1125       }
1126     }
1127   }
1128 }
1129 
1130 // Flushes compiled methods dependent on redefined classes, that have already been
1131 // marked for deoptimization.
1132 void CodeCache::flush_evol_dependents() {
1133   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1134 
1135   // CodeCache can only be updated by a thread_in_VM and they will all be
1136   // stopped during the safepoint so CodeCache will be safe to update without
1137   // holding the CodeCache_lock.
1138 
1139   // At least one nmethod has been marked for deoptimization
1140 
1141   Deoptimization::deoptimize_all_marked();











1142 }
1143 #endif // INCLUDE_JVMTI
1144 
1145 // Deoptimize all(most) methods
1146 void CodeCache::mark_all_nmethods_for_deoptimization() {
1147   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1148   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1149   while(iter.next()) {
1150     CompiledMethod* nm = iter.method();
1151     // Not installed are unsafe to mark for deopt, normally never deopted.
1152     // A not_entrant method may become a zombie at any time,
1153     // since we don't know on which side of last safepoint it became not_entrant
1154     // (state must be in_use).
1155     // Native method are unsafe to mark for deopt, normally never deopted.
1156     if (!nm->method()->is_method_handle_intrinsic() &&
1157         !nm->is_not_installed() &&
1158         nm->is_in_use() &&
1159         !nm->is_native_method()) {
1160       nm->mark_for_deoptimization();
1161     }
1162   }
1163 }
1164 
1165 int CodeCache::mark_for_deoptimization(Method* dependee) {
1166   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1167   int number_of_marked_CodeBlobs = 0;
1168 
1169   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1170   while(iter.next()) {
1171     CompiledMethod* nm = iter.method();
1172     if (nm->is_dependent_on_method(dependee)) {
1173       ResourceMark rm;
1174       nm->mark_for_deoptimization();
1175       number_of_marked_CodeBlobs++;
1176     }
1177   }
1178 
1179   return number_of_marked_CodeBlobs;
1180 }
1181 
1182 void CodeCache::make_marked_nmethods_not_entrant() {
1183   assert_locked_or_safepoint(CodeCache_lock);
1184   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1185   while(iter.next()) {
1186     CompiledMethod* nm = iter.method();
1187     // only_alive_and_not_unloading returns not_entrant nmethods.
1188     // A not_entrant can become a zombie at anytime,
1189     // if it was made not_entrant before previous safepoint/handshake.
1190     // We check that it is not not_entrant and not zombie,
1191     // by checking is_in_use().
1192     if (nm->is_marked_for_deoptimization() && nm->is_in_use()) {
1193       nm->make_not_entrant();
1194     }
1195   }
1196 }
1197 
1198 // Flushes compiled methods dependent on dependee.
1199 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1200   assert_lock_strong(Compile_lock);
1201 
1202   if (number_of_nmethods_with_dependencies() == 0) return;
1203 




1204   KlassDepChange changes(dependee);
1205 
1206   // Compute the dependent nmethods
1207   if (mark_for_deoptimization(changes) > 0) {
1208     // At least one nmethod has been marked for deoptimization
1209     Deoptimization::deoptimize_all_marked();

1210   }
1211 }
1212 
1213 // Flushes compiled methods dependent on dependee
1214 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1215   // --- Compile_lock is not held. However we are at a safepoint.
1216   assert_locked_or_safepoint(Compile_lock);
1217 




1218   // Compute the dependent nmethods
1219   if (mark_for_deoptimization(m_h()) > 0) {
1220     Deoptimization::deoptimize_all_marked();













1221   }
1222 }
1223 
1224 void CodeCache::verify() {
1225   assert_locked_or_safepoint(CodeCache_lock);
1226   FOR_ALL_HEAPS(heap) {
1227     (*heap)->verify();
1228     FOR_ALL_BLOBS(cb, *heap) {
1229       if (cb->is_alive()) {
1230         cb->verify();
1231       }
1232     }
1233   }
1234 }
1235 
1236 // A CodeHeap is full. Print out warning and report event.
1237 PRAGMA_DIAG_PUSH
1238 PRAGMA_FORMAT_NONLITERAL_IGNORED
1239 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1240   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event


< prev index next >