< prev index next >

src/hotspot/share/code/codeCache.cpp

Print this page
rev 56098 : imported patch 8226705-8221734-baseline


1126     if (!nm->method()->is_method_handle_intrinsic()) {
1127       nm->mark_for_deoptimization();
1128       if (nm->has_evol_metadata()) {
1129         add_to_old_table(nm);
1130       }
1131     }
1132   }
1133 }
1134 
1135 // Flushes compiled methods dependent on redefined classes, that have already been
1136 // marked for deoptimization.
1137 void CodeCache::flush_evol_dependents() {
1138   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1139 
1140   // CodeCache can only be updated by a thread_in_VM and they will all be
1141   // stopped during the safepoint so CodeCache will be safe to update without
1142   // holding the CodeCache_lock.
1143 
1144   // At least one nmethod has been marked for deoptimization
1145 
1146   // All this already happens inside a VM_Operation, so we'll do all the work here.
1147   // Stuff copied from VM_Deoptimize and modified slightly.
1148 
1149   // We do not want any GCs to happen while we are in the middle of this VM operation
1150   ResourceMark rm;
1151   DeoptimizationMarker dm;
1152 
1153   // Deoptimize all activations depending on marked nmethods
1154   Deoptimization::deoptimize_dependents();
1155 
1156   // Make the dependent methods not entrant
1157   make_marked_nmethods_not_entrant();
1158 }
1159 #endif // INCLUDE_JVMTI
1160 
1161 // Deoptimize all methods
1162 void CodeCache::mark_all_nmethods_for_deoptimization() {
1163   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1164   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1165   while(iter.next()) {
1166     CompiledMethod* nm = iter.method();
1167     if (!nm->method()->is_method_handle_intrinsic()) {








1168       nm->mark_for_deoptimization();
1169     }
1170   }
1171 }
1172 
1173 int CodeCache::mark_for_deoptimization(Method* dependee) {
1174   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1175   int number_of_marked_CodeBlobs = 0;
1176 
1177   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1178   while(iter.next()) {
1179     CompiledMethod* nm = iter.method();
1180     if (nm->is_dependent_on_method(dependee)) {
1181       ResourceMark rm;
1182       nm->mark_for_deoptimization();
1183       number_of_marked_CodeBlobs++;
1184     }
1185   }
1186 
1187   return number_of_marked_CodeBlobs;
1188 }
1189 
1190 void CodeCache::make_marked_nmethods_not_entrant() {
1191   assert_locked_or_safepoint(CodeCache_lock);
1192   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1193   while(iter.next()) {
1194     CompiledMethod* nm = iter.method();
1195     if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {





1196       nm->make_not_entrant();
1197     }
1198   }
1199 }
1200 
1201 // Flushes compiled methods dependent on dependee.
1202 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1203   assert_lock_strong(Compile_lock);
1204 
1205   if (number_of_nmethods_with_dependencies() == 0) return;
1206 
1207   // CodeCache can only be updated by a thread_in_VM and they will all be
1208   // stopped during the safepoint so CodeCache will be safe to update without
1209   // holding the CodeCache_lock.
1210 
1211   KlassDepChange changes(dependee);
1212 
1213   // Compute the dependent nmethods
1214   if (mark_for_deoptimization(changes) > 0) {
1215     // At least one nmethod has been marked for deoptimization
1216     VM_Deoptimize op;
1217     VMThread::execute(&op);
1218   }
1219 }
1220 
1221 // Flushes compiled methods dependent on dependee
1222 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1223   // --- Compile_lock is not held. However we are at a safepoint.
1224   assert_locked_or_safepoint(Compile_lock);
1225 
1226   // CodeCache can only be updated by a thread_in_VM and they will all be
1227   // stopped dring the safepoint so CodeCache will be safe to update without
1228   // holding the CodeCache_lock.
1229 
1230   // Compute the dependent nmethods
1231   if (mark_for_deoptimization(m_h()) > 0) {
1232     // At least one nmethod has been marked for deoptimization
1233 
1234     // All this already happens inside a VM_Operation, so we'll do all the work here.
1235     // Stuff copied from VM_Deoptimize and modified slightly.
1236 
1237     // We do not want any GCs to happen while we are in the middle of this VM operation
1238     ResourceMark rm;
1239     DeoptimizationMarker dm;
1240 
1241     // Deoptimize all activations depending on marked nmethods
1242     Deoptimization::deoptimize_dependents();
1243 
1244     // Make the dependent methods not entrant
1245     make_marked_nmethods_not_entrant();
1246   }
1247 }
1248 
1249 void CodeCache::verify() {
1250   assert_locked_or_safepoint(CodeCache_lock);
1251   FOR_ALL_HEAPS(heap) {
1252     (*heap)->verify();
1253     FOR_ALL_BLOBS(cb, *heap) {
1254       if (cb->is_alive()) {
1255         cb->verify();
1256       }
1257     }
1258   }
1259 }
1260 
1261 // A CodeHeap is full. Print out warning and report event.
1262 PRAGMA_DIAG_PUSH
1263 PRAGMA_FORMAT_NONLITERAL_IGNORED
1264 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1265   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event




1126     if (!nm->method()->is_method_handle_intrinsic()) {
1127       nm->mark_for_deoptimization();
1128       if (nm->has_evol_metadata()) {
1129         add_to_old_table(nm);
1130       }
1131     }
1132   }
1133 }
1134 
1135 // Flushes compiled methods dependent on redefined classes, that have already been
1136 // marked for deoptimization.
1137 void CodeCache::flush_evol_dependents() {
1138   assert(SafepointSynchronize::is_at_safepoint(), "Can only do this at a safepoint!");
1139 
1140   // CodeCache can only be updated by a thread_in_VM and they will all be
1141   // stopped during the safepoint so CodeCache will be safe to update without
1142   // holding the CodeCache_lock.
1143 
1144   // At least one nmethod has been marked for deoptimization
1145 
1146   Deoptimization::deoptimize_all_marked();











1147 }
1148 #endif // INCLUDE_JVMTI
1149 
1150 // Mark methods for deopt (if safe or possible).
1151 void CodeCache::mark_all_nmethods_for_deoptimization() {
1152   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1153   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1154   while(iter.next()) {
1155     CompiledMethod* nm = iter.method();
1156     if (!nm->method()->is_method_handle_intrinsic() &&
1157         !nm->is_not_installed() &&
1158         nm->is_in_use() &&
1159         !nm->is_native_method()) {
1160       // Intrinsics and native methods are never deopted. A method that is
1161       // not installed yet or is not in use is not safe to deopt; the
1162       // is_in_use() check covers the not_entrant and not zombie cases.
1163       // Note: A not_entrant method can become a zombie at anytime if it was
1164       // made not_entrant before the previous safepoint/handshake.
1165       nm->mark_for_deoptimization();
1166     }
1167   }
1168 }
1169 
1170 int CodeCache::mark_for_deoptimization(Method* dependee) {
1171   MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1172   int number_of_marked_CodeBlobs = 0;
1173 
1174   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1175   while(iter.next()) {
1176     CompiledMethod* nm = iter.method();
1177     if (nm->is_dependent_on_method(dependee)) {
1178       ResourceMark rm;
1179       nm->mark_for_deoptimization();
1180       number_of_marked_CodeBlobs++;
1181     }
1182   }
1183 
1184   return number_of_marked_CodeBlobs;
1185 }
1186 
1187 void CodeCache::make_marked_nmethods_not_entrant() {
1188   assert_locked_or_safepoint(CodeCache_lock);
1189   CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1190   while(iter.next()) {
1191     CompiledMethod* nm = iter.method();
1192     if (nm->is_marked_for_deoptimization() && nm->is_in_use()) {
1193       // only_alive_and_not_unloading() can return not_entrant nmethods.
1194       // A not_entrant method can become a zombie at anytime if it was
1195       // made not_entrant before the previous safepoint/handshake. The
1196       // is_in_use() check covers the not_entrant and not zombie cases
1197       // that have become true after the method was marked for deopt.
1198       nm->make_not_entrant();
1199     }
1200   }
1201 }
1202 
1203 // Flushes compiled methods dependent on dependee.
1204 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1205   assert_lock_strong(Compile_lock);
1206 
1207   if (number_of_nmethods_with_dependencies() == 0) return;
1208 




1209   KlassDepChange changes(dependee);
1210 
1211   // Compute the dependent nmethods
1212   if (mark_for_deoptimization(changes) > 0) {
1213     // At least one nmethod has been marked for deoptimization
1214     Deoptimization::deoptimize_all_marked();

1215   }
1216 }
1217 
1218 // Flushes compiled methods dependent on dependee
1219 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1220   // --- Compile_lock is not held. However we are at a safepoint.
1221   assert_locked_or_safepoint(Compile_lock);
1222 




1223   // Compute the dependent nmethods
1224   if (mark_for_deoptimization(m_h()) > 0) {
1225     Deoptimization::deoptimize_all_marked();













1226   }
1227 }
1228 
1229 void CodeCache::verify() {
1230   assert_locked_or_safepoint(CodeCache_lock);
1231   FOR_ALL_HEAPS(heap) {
1232     (*heap)->verify();
1233     FOR_ALL_BLOBS(cb, *heap) {
1234       if (cb->is_alive()) {
1235         cb->verify();
1236       }
1237     }
1238   }
1239 }
1240 
1241 // A CodeHeap is full. Print out warning and report event.
1242 PRAGMA_DIAG_PUSH
1243 PRAGMA_FORMAT_NONLITERAL_IGNORED
1244 void CodeCache::report_codemem_full(int code_blob_type, bool print) {
1245   // Get nmethod heap for the given CodeBlobType and build CodeCacheFull event


< prev index next >