1171 return number_of_marked_CodeBlobs;
1172 }
1173
1174 void CodeCache::make_marked_nmethods_not_entrant() {
1175 assert_locked_or_safepoint(CodeCache_lock);
1176 CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1177 while(iter.next()) {
1178 CompiledMethod* nm = iter.method();
1179 if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {
1180 nm->make_not_entrant();
1181 }
1182 }
1183 }
1184
1185 // Flushes compiled methods dependent on dependee.
1186 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1187 assert_lock_strong(Compile_lock);
1188
1189 if (number_of_nmethods_with_dependencies() == 0) return;
1190
1191 // CodeCache can only be updated by a thread_in_VM and they will all be
1192 // stopped during the safepoint so CodeCache will be safe to update without
1193 // holding the CodeCache_lock.
1194
1195 KlassDepChange changes(dependee);
1196
1197 // Compute the dependent nmethods
1198 if (mark_for_deoptimization(changes) > 0) {
1199 // At least one nmethod has been marked for deoptimization
1200 Deoptimization::deoptimize_all_marked();
1201 }
1202 }
1203
1204 // Flushes compiled methods dependent on dependee
1205 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1206 // --- Compile_lock is not held. However we are at a safepoint.
1207 assert_locked_or_safepoint(Compile_lock);
1208
1209 // CodeCache can only be updated by a thread_in_VM and they will all be
1210 // stopped dring the safepoint so CodeCache will be safe to update without
1211 // holding the CodeCache_lock.
1212
1213 // Compute the dependent nmethods
1214 if (mark_for_deoptimization(m_h()) > 0) {
1215 Deoptimization::deoptimize_all_marked();
1216 }
1217 }
1218
1219 void CodeCache::verify() {
1220 assert_locked_or_safepoint(CodeCache_lock);
1221 FOR_ALL_HEAPS(heap) {
1222 (*heap)->verify();
1223 FOR_ALL_BLOBS(cb, *heap) {
1224 if (cb->is_alive()) {
1225 cb->verify();
1226 }
1227 }
1228 }
1229 }
1230
1231 // A CodeHeap is full. Print out warning and report event.
|
1171 return number_of_marked_CodeBlobs;
1172 }
1173
1174 void CodeCache::make_marked_nmethods_not_entrant() {
1175 assert_locked_or_safepoint(CodeCache_lock);
1176 CompiledMethodIterator iter(CompiledMethodIterator::only_alive_and_not_unloading);
1177 while(iter.next()) {
1178 CompiledMethod* nm = iter.method();
1179 if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {
1180 nm->make_not_entrant();
1181 }
1182 }
1183 }
1184
1185 // Flushes compiled methods dependent on dependee.
1186 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1187 assert_lock_strong(Compile_lock);
1188
1189 if (number_of_nmethods_with_dependencies() == 0) return;
1190
1191 KlassDepChange changes(dependee);
1192
1193 // Compute the dependent nmethods
1194 if (mark_for_deoptimization(changes) > 0) {
1195 // At least one nmethod has been marked for deoptimization
1196 Deoptimization::deoptimize_all_marked();
1197 }
1198 }
1199
1200 // Flushes compiled methods dependent on dependee
1201 void CodeCache::flush_dependents_on_method(const methodHandle& m_h) {
1202 // --- Compile_lock is not held. However we are at a safepoint.
1203 assert_locked_or_safepoint(Compile_lock);
1204
1205 // Compute the dependent nmethods
1206 if (mark_for_deoptimization(m_h()) > 0) {
1207 Deoptimization::deoptimize_all_marked();
1208 }
1209 }
1210
1211 void CodeCache::verify() {
1212 assert_locked_or_safepoint(CodeCache_lock);
1213 FOR_ALL_HEAPS(heap) {
1214 (*heap)->verify();
1215 FOR_ALL_BLOBS(cb, *heap) {
1216 if (cb->is_alive()) {
1217 cb->verify();
1218 }
1219 }
1220 }
1221 }
1222
1223 // A CodeHeap is full. Print out warning and report event.
|