< prev index next >

src/share/vm/code/codeCache.cpp

Print this page




1124 
1125   return number_of_marked_CodeBlobs;
1126 }
1127 
1128 CompiledMethod* CodeCache::find_compiled(void* start) {
1129   CodeBlob *cb = find_blob(start);
1130   assert(cb == NULL || cb->is_compiled(), "did not find an compiled_method");
1131   return (CompiledMethod*)cb;
1132 }
1133 
1134 bool CodeCache::is_far_target(address target) {
1135 #if INCLUDE_AOT
1136   return NativeCall::is_far_call(_low_bound,  target) ||
1137          NativeCall::is_far_call(_high_bound, target);
1138 #else
1139   return false;
1140 #endif
1141 }
1142 
1143 #ifdef HOTSWAP
1144 int CodeCache::mark_for_evol_deoptimization(instanceKlassHandle dependee) {
1145   MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1146   int number_of_marked_CodeBlobs = 0;
1147 
1148   // Deoptimize all methods of the evolving class itself
1149   Array<Method*>* old_methods = dependee->methods();
1150   for (int i = 0; i < old_methods->length(); i++) {
1151     ResourceMark rm;
1152     Method* old_method = old_methods->at(i);
1153     CompiledMethod* nm = old_method->code();
1154     if (nm != NULL) {
1155       nm->mark_for_deoptimization();
1156       number_of_marked_CodeBlobs++;
1157     }
1158   }
1159 
1160   CompiledMethodIterator iter;
1161   while(iter.next_alive()) {
1162     CompiledMethod* nm = iter.method();
1163     if (nm->is_marked_for_deoptimization()) {
1164       // ...Already marked in the previous pass; don't count it again.
1165     } else if (nm->is_evol_dependent_on(dependee())) {
1166       ResourceMark rm;
1167       nm->mark_for_deoptimization();
1168       number_of_marked_CodeBlobs++;
1169     } else  {
1170       // flush caches in case they refer to a redefined Method*
1171       nm->clear_inline_caches();
1172     }
1173   }
1174 
1175   return number_of_marked_CodeBlobs;
1176 }
1177 #endif // HOTSWAP
1178 
1179 
1180 // Deoptimize all methods
1181 void CodeCache::mark_all_nmethods_for_deoptimization() {
1182   MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1183   CompiledMethodIterator iter;
1184   while(iter.next_alive()) {
1185     CompiledMethod* nm = iter.method();


1201       nm->mark_for_deoptimization();
1202       number_of_marked_CodeBlobs++;
1203     }
1204   }
1205 
1206   return number_of_marked_CodeBlobs;
1207 }
1208 
1209 void CodeCache::make_marked_nmethods_not_entrant() {
1210   assert_locked_or_safepoint(CodeCache_lock);
1211   CompiledMethodIterator iter;
1212   while(iter.next_alive()) {
1213     CompiledMethod* nm = iter.method();
1214     if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {
1215       nm->make_not_entrant();
1216     }
1217   }
1218 }
1219 
1220 // Flushes compiled methods dependent on dependee.
1221 void CodeCache::flush_dependents_on(instanceKlassHandle dependee) {
1222   assert_lock_strong(Compile_lock);
1223 
1224   if (number_of_nmethods_with_dependencies() == 0) return;
1225 
1226   // CodeCache can only be updated by a thread_in_VM and they will all be
1227   // stopped during the safepoint so CodeCache will be safe to update without
1228   // holding the CodeCache_lock.
1229 
1230   KlassDepChange changes(dependee);
1231 
1232   // Compute the dependent nmethods
1233   if (mark_for_deoptimization(changes) > 0) {
1234     // At least one nmethod has been marked for deoptimization
1235     VM_Deoptimize op;
1236     VMThread::execute(&op);
1237   }
1238 }
1239 
1240 #ifdef HOTSWAP
1241 // Flushes compiled methods dependent on dependee in the evolutionary sense
1242 void CodeCache::flush_evol_dependents_on(instanceKlassHandle ev_k_h) {
1243   // --- Compile_lock is not held. However we are at a safepoint.
1244   assert_locked_or_safepoint(Compile_lock);
1245   if (number_of_nmethods_with_dependencies() == 0 && !UseAOT) return;
1246 
1247   // CodeCache can only be updated by a thread_in_VM and they will all be
1248   // stopped during the safepoint so CodeCache will be safe to update without
1249   // holding the CodeCache_lock.
1250 
1251   // Compute the dependent nmethods
1252   if (mark_for_evol_deoptimization(ev_k_h) > 0) {
1253     // At least one nmethod has been marked for deoptimization
1254 
1255     // All this already happens inside a VM_Operation, so we'll do all the work here.
1256     // Stuff copied from VM_Deoptimize and modified slightly.
1257 
1258     // We do not want any GCs to happen while we are in the middle of this VM operation
1259     ResourceMark rm;
1260     DeoptimizationMarker dm;
1261 
1262     // Deoptimize all activations depending on marked nmethods
1263     Deoptimization::deoptimize_dependents();
1264 
1265     // Make the dependent methods not entrant
1266     make_marked_nmethods_not_entrant();
1267   }
1268 }
1269 #endif // HOTSWAP
1270 
1271 
1272 // Flushes compiled methods dependent on dependee




1124 
1125   return number_of_marked_CodeBlobs;
1126 }
1127 
1128 CompiledMethod* CodeCache::find_compiled(void* start) {
1129   CodeBlob *cb = find_blob(start);
1130   assert(cb == NULL || cb->is_compiled(), "did not find an compiled_method");
1131   return (CompiledMethod*)cb;
1132 }
1133 
1134 bool CodeCache::is_far_target(address target) {
1135 #if INCLUDE_AOT
1136   return NativeCall::is_far_call(_low_bound,  target) ||
1137          NativeCall::is_far_call(_high_bound, target);
1138 #else
1139   return false;
1140 #endif
1141 }
1142 
1143 #ifdef HOTSWAP
1144 int CodeCache::mark_for_evol_deoptimization(InstanceKlass* dependee) {
1145   MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1146   int number_of_marked_CodeBlobs = 0;
1147 
1148   // Deoptimize all methods of the evolving class itself
1149   Array<Method*>* old_methods = dependee->methods();
1150   for (int i = 0; i < old_methods->length(); i++) {
1151     ResourceMark rm;
1152     Method* old_method = old_methods->at(i);
1153     CompiledMethod* nm = old_method->code();
1154     if (nm != NULL) {
1155       nm->mark_for_deoptimization();
1156       number_of_marked_CodeBlobs++;
1157     }
1158   }
1159 
1160   CompiledMethodIterator iter;
1161   while(iter.next_alive()) {
1162     CompiledMethod* nm = iter.method();
1163     if (nm->is_marked_for_deoptimization()) {
1164       // ...Already marked in the previous pass; don't count it again.
1165     } else if (nm->is_evol_dependent_on(dependee)) {
1166       ResourceMark rm;
1167       nm->mark_for_deoptimization();
1168       number_of_marked_CodeBlobs++;
1169     } else  {
1170       // flush caches in case they refer to a redefined Method*
1171       nm->clear_inline_caches();
1172     }
1173   }
1174 
1175   return number_of_marked_CodeBlobs;
1176 }
1177 #endif // HOTSWAP
1178 
1179 
1180 // Deoptimize all methods
1181 void CodeCache::mark_all_nmethods_for_deoptimization() {
1182   MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1183   CompiledMethodIterator iter;
1184   while(iter.next_alive()) {
1185     CompiledMethod* nm = iter.method();


1201       nm->mark_for_deoptimization();
1202       number_of_marked_CodeBlobs++;
1203     }
1204   }
1205 
1206   return number_of_marked_CodeBlobs;
1207 }
1208 
1209 void CodeCache::make_marked_nmethods_not_entrant() {
1210   assert_locked_or_safepoint(CodeCache_lock);
1211   CompiledMethodIterator iter;
1212   while(iter.next_alive()) {
1213     CompiledMethod* nm = iter.method();
1214     if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {
1215       nm->make_not_entrant();
1216     }
1217   }
1218 }
1219 
1220 // Flushes compiled methods dependent on dependee.
1221 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1222   assert_lock_strong(Compile_lock);
1223 
1224   if (number_of_nmethods_with_dependencies() == 0) return;
1225 
1226   // CodeCache can only be updated by a thread_in_VM and they will all be
1227   // stopped during the safepoint so CodeCache will be safe to update without
1228   // holding the CodeCache_lock.
1229 
1230   KlassDepChange changes(dependee);
1231 
1232   // Compute the dependent nmethods
1233   if (mark_for_deoptimization(changes) > 0) {
1234     // At least one nmethod has been marked for deoptimization
1235     VM_Deoptimize op;
1236     VMThread::execute(&op);
1237   }
1238 }
1239 
1240 #ifdef HOTSWAP
1241 // Flushes compiled methods dependent on dependee in the evolutionary sense
1242 void CodeCache::flush_evol_dependents_on(InstanceKlass* ev_k) {
1243   // --- Compile_lock is not held. However we are at a safepoint.
1244   assert_locked_or_safepoint(Compile_lock);
1245   if (number_of_nmethods_with_dependencies() == 0 && !UseAOT) return;
1246 
1247   // CodeCache can only be updated by a thread_in_VM and they will all be
1248   // stopped during the safepoint so CodeCache will be safe to update without
1249   // holding the CodeCache_lock.
1250 
1251   // Compute the dependent nmethods
1252   if (mark_for_evol_deoptimization(ev_k) > 0) {
1253     // At least one nmethod has been marked for deoptimization
1254 
1255     // All this already happens inside a VM_Operation, so we'll do all the work here.
1256     // Stuff copied from VM_Deoptimize and modified slightly.
1257 
1258     // We do not want any GCs to happen while we are in the middle of this VM operation
1259     ResourceMark rm;
1260     DeoptimizationMarker dm;
1261 
1262     // Deoptimize all activations depending on marked nmethods
1263     Deoptimization::deoptimize_dependents();
1264 
1265     // Make the dependent methods not entrant
1266     make_marked_nmethods_not_entrant();
1267   }
1268 }
1269 #endif // HOTSWAP
1270 
1271 
1272 // Flushes compiled methods dependent on dependee


< prev index next >