639 return NULL;
640 }
641
642 nmethod* CodeCache::find_nmethod(void* start) {
643 CodeBlob* cb = find_blob(start);
644 assert(cb->is_nmethod(), "did not find an nmethod");
645 return (nmethod*)cb;
646 }
647
648 void CodeCache::blobs_do(void f(CodeBlob* nm)) {
649 assert_locked_or_safepoint(CodeCache_lock);
650 FOR_ALL_HEAPS(heap) {
651 FOR_ALL_BLOBS(cb, *heap) {
652 f(cb);
653 }
654 }
655 }
656
657 void CodeCache::nmethods_do(void f(nmethod* nm)) {
658 assert_locked_or_safepoint(CodeCache_lock);
659 NMethodIterator iter;
660 while(iter.next()) {
661 f(iter.method());
662 }
663 }
664
665 void CodeCache::metadata_do(void f(Metadata* m)) {
666 assert_locked_or_safepoint(CodeCache_lock);
667 NMethodIterator iter;
668 while(iter.next_alive()) {
669 iter.method()->metadata_do(f);
670 }
671 AOTLoader::metadata_do(f);
672 }
673
674 int CodeCache::alignment_unit() {
675 return (int)_heaps->first()->alignment_unit();
676 }
677
678 int CodeCache::alignment_offset() {
679 return (int)_heaps->first()->alignment_offset();
680 }
681
682 // Mark nmethods for unloading if they contain otherwise unreachable oops.
683 void CodeCache::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
684 assert_locked_or_safepoint(CodeCache_lock);
685 UnloadingScope scope(is_alive);
686 CompiledMethodIterator iter;
687 while(iter.next_alive()) {
688 iter.method()->do_unloading(unloading_occurred);
689 }
690 }
691
692 void CodeCache::blobs_do(CodeBlobClosure* f) {
693 assert_locked_or_safepoint(CodeCache_lock);
694 FOR_ALL_ALLOCABLE_HEAPS(heap) {
695 FOR_ALL_BLOBS(cb, *heap) {
696 if (cb->is_alive()) {
697 f->do_code_blob(cb);
698 #ifdef ASSERT
699 if (cb->is_nmethod()) {
700 Universe::heap()->verify_nmethod((nmethod*)cb);
701 }
702 #endif //ASSERT
703 }
704 }
705 }
706 }
|
639 return NULL;
640 }
641
642 nmethod* CodeCache::find_nmethod(void* start) {
643 CodeBlob* cb = find_blob(start);
644 assert(cb->is_nmethod(), "did not find an nmethod");
645 return (nmethod*)cb;
646 }
647
648 void CodeCache::blobs_do(void f(CodeBlob* nm)) {
649 assert_locked_or_safepoint(CodeCache_lock);
650 FOR_ALL_HEAPS(heap) {
651 FOR_ALL_BLOBS(cb, *heap) {
652 f(cb);
653 }
654 }
655 }
656
657 void CodeCache::nmethods_do(void f(nmethod* nm)) {
658 assert_locked_or_safepoint(CodeCache_lock);
659 NMethodIterator iter(false /* only_alive */, false /* only_not_unloading */);
660 while(iter.next()) {
661 f(iter.method());
662 }
663 }
664
665 void CodeCache::metadata_do(void f(Metadata* m)) {
666 assert_locked_or_safepoint(CodeCache_lock);
667 NMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
668 while(iter.next()) {
669 iter.method()->metadata_do(f);
670 }
671 AOTLoader::metadata_do(f);
672 }
673
674 int CodeCache::alignment_unit() {
675 return (int)_heaps->first()->alignment_unit();
676 }
677
678 int CodeCache::alignment_offset() {
679 return (int)_heaps->first()->alignment_offset();
680 }
681
682 // Mark nmethods for unloading if they contain otherwise unreachable oops.
683 void CodeCache::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
684 assert_locked_or_safepoint(CodeCache_lock);
685 UnloadingScope scope(is_alive);
686 CompiledMethodIterator iter(true /* only_alive */, false /* only_not_unloading */);
687 while(iter.next()) {
688 iter.method()->do_unloading(unloading_occurred);
689 }
690 }
691
692 void CodeCache::blobs_do(CodeBlobClosure* f) {
693 assert_locked_or_safepoint(CodeCache_lock);
694 FOR_ALL_ALLOCABLE_HEAPS(heap) {
695 FOR_ALL_BLOBS(cb, *heap) {
696 if (cb->is_alive()) {
697 f->do_code_blob(cb);
698 #ifdef ASSERT
699 if (cb->is_nmethod()) {
700 Universe::heap()->verify_nmethod((nmethod*)cb);
701 }
702 #endif //ASSERT
703 }
704 }
705 }
706 }
|
824 cur = next;
825 }
826
827 // Check for stray marks.
828 debug_only(verify_perm_nmethods(NULL));
829 }
830
831 #ifndef PRODUCT
832 void CodeCache::asserted_non_scavengable_nmethods_do(CodeBlobClosure* f) {
833 // While we are here, verify the integrity of the list.
834 mark_scavenge_root_nmethods();
835 for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
836 assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
837 cur->clear_scavenge_root_marked();
838 }
839 verify_perm_nmethods(f);
840 }
841
842 // Temporarily mark nmethods that are claimed to be on the scavenge list.
843 void CodeCache::mark_scavenge_root_nmethods() {
844 NMethodIterator iter;
845 while(iter.next_alive()) {
846 nmethod* nm = iter.method();
847 assert(nm->scavenge_root_not_marked(), "clean state");
848 if (nm->on_scavenge_root_list())
849 nm->set_scavenge_root_marked();
850 }
851 }
852
853 // If the closure is given, run it on the unlisted nmethods.
854 // Also make sure that the effects of mark_scavenge_root_nmethods is gone.
855 void CodeCache::verify_perm_nmethods(CodeBlobClosure* f_or_null) {
856 NMethodIterator iter;
857 while(iter.next_alive()) {
858 nmethod* nm = iter.method();
859 bool call_f = (f_or_null != NULL);
860 assert(nm->scavenge_root_not_marked(), "must be already processed");
861 if (nm->on_scavenge_root_list())
862 call_f = false; // don't show this one to the client
863 Universe::heap()->verify_nmethod(nm);
864 if (call_f) f_or_null->do_code_blob(nm);
865 }
866 }
867 #endif //PRODUCT
868
869 void CodeCache::verify_clean_inline_caches() {
870 #ifdef ASSERT
871 NMethodIterator iter;
872 while(iter.next_alive()) {
873 nmethod* nm = iter.method();
874 assert(!nm->is_unloaded(), "Tautology");
875 nm->verify_clean_inline_caches();
876 nm->verify();
877 }
878 #endif
879 }
880
881 void CodeCache::verify_icholder_relocations() {
882 #ifdef ASSERT
883 // make sure that we aren't leaking icholders
884 int count = 0;
885 FOR_ALL_HEAPS(heap) {
886 FOR_ALL_BLOBS(cb, *heap) {
887 CompiledMethod *nm = cb->as_compiled_method_or_null();
888 if (nm != NULL) {
889 count += nm->verify_icholder_relocations();
890 }
891 }
|
824 cur = next;
825 }
826
827 // Check for stray marks.
828 debug_only(verify_perm_nmethods(NULL));
829 }
830
831 #ifndef PRODUCT
832 void CodeCache::asserted_non_scavengable_nmethods_do(CodeBlobClosure* f) {
833 // While we are here, verify the integrity of the list.
834 mark_scavenge_root_nmethods();
835 for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
836 assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
837 cur->clear_scavenge_root_marked();
838 }
839 verify_perm_nmethods(f);
840 }
841
842 // Temporarily mark nmethods that are claimed to be on the scavenge list.
843 void CodeCache::mark_scavenge_root_nmethods() {
844 NMethodIterator iter(true /* only_alive */, false /* only_not_unloading */);
845 while(iter.next()) {
846 nmethod* nm = iter.method();
847 assert(nm->scavenge_root_not_marked(), "clean state");
848 if (nm->on_scavenge_root_list())
849 nm->set_scavenge_root_marked();
850 }
851 }
852
853 // If the closure is given, run it on the unlisted nmethods.
854 // Also make sure that the effects of mark_scavenge_root_nmethods is gone.
855 void CodeCache::verify_perm_nmethods(CodeBlobClosure* f_or_null) {
856 NMethodIterator iter(true /* only_alive */, false /* only_not_unloading */);
857 while(iter.next()) {
858 nmethod* nm = iter.method();
859 bool call_f = (f_or_null != NULL);
860 assert(nm->scavenge_root_not_marked(), "must be already processed");
861 if (nm->on_scavenge_root_list())
862 call_f = false; // don't show this one to the client
863 Universe::heap()->verify_nmethod(nm);
864 if (call_f) f_or_null->do_code_blob(nm);
865 }
866 }
867 #endif //PRODUCT
868
869 void CodeCache::verify_clean_inline_caches() {
870 #ifdef ASSERT
871 NMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
872 while(iter.next()) {
873 nmethod* nm = iter.method();
874 assert(!nm->is_unloaded(), "Tautology");
875 nm->verify_clean_inline_caches();
876 nm->verify();
877 }
878 #endif
879 }
880
881 void CodeCache::verify_icholder_relocations() {
882 #ifdef ASSERT
883 // make sure that we aren't leaking icholders
884 int count = 0;
885 FOR_ALL_HEAPS(heap) {
886 FOR_ALL_BLOBS(cb, *heap) {
887 CompiledMethod *nm = cb->as_compiled_method_or_null();
888 if (nm != NULL) {
889 count += nm->verify_icholder_relocations();
890 }
891 }
|
925
926 void CodeCache::gc_prologue() { }
927
928 void CodeCache::gc_epilogue() {
929 prune_scavenge_root_nmethods();
930 }
931
932 uint8_t CodeCache::_unloading_cycle = 1;
933
934 void CodeCache::increment_unloading_cycle() {
935 if (_unloading_cycle == 1) {
936 _unloading_cycle = 2;
937 } else {
938 _unloading_cycle = 1;
939 }
940 }
941
942 void CodeCache::verify_oops() {
943 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
944 VerifyOopClosure voc;
945 NMethodIterator iter;
946 while(iter.next_alive()) {
947 nmethod* nm = iter.method();
948 nm->oops_do(&voc);
949 nm->verify_oop_relocations();
950 }
951 }
952
953 int CodeCache::blob_count(int code_blob_type) {
954 CodeHeap* heap = get_code_heap(code_blob_type);
955 return (heap != NULL) ? heap->blob_count() : 0;
956 }
957
958 int CodeCache::blob_count() {
959 int count = 0;
960 FOR_ALL_HEAPS(heap) {
961 count += (*heap)->blob_count();
962 }
963 return count;
964 }
965
|
925
926 void CodeCache::gc_prologue() { }
927
928 void CodeCache::gc_epilogue() {
929 prune_scavenge_root_nmethods();
930 }
931
932 uint8_t CodeCache::_unloading_cycle = 1;
933
934 void CodeCache::increment_unloading_cycle() {
935 if (_unloading_cycle == 1) {
936 _unloading_cycle = 2;
937 } else {
938 _unloading_cycle = 1;
939 }
940 }
941
942 void CodeCache::verify_oops() {
943 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
944 VerifyOopClosure voc;
945 NMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
946 while(iter.next()) {
947 nmethod* nm = iter.method();
948 nm->oops_do(&voc);
949 nm->verify_oop_relocations();
950 }
951 }
952
953 int CodeCache::blob_count(int code_blob_type) {
954 CodeHeap* heap = get_code_heap(code_blob_type);
955 return (heap != NULL) ? heap->blob_count() : 0;
956 }
957
958 int CodeCache::blob_count() {
959 int count = 0;
960 FOR_ALL_HEAPS(heap) {
961 count += (*heap)->blob_count();
962 }
963 return count;
964 }
965
|
1102 // Give OS a chance to register generated code area.
1103 // This is used on Windows 64 bit platforms to register
1104 // Structured Exception Handlers for our generated code.
1105 os::register_code_area((char*)low_bound(), (char*)high_bound());
1106 }
1107
1108 void codeCache_init() {
1109 CodeCache::initialize();
1110 // Load AOT libraries and add AOT code heaps.
1111 AOTLoader::initialize();
1112 }
1113
1114 //------------------------------------------------------------------------------------------------
1115
1116 int CodeCache::number_of_nmethods_with_dependencies() {
1117 return _number_of_nmethods_with_dependencies;
1118 }
1119
1120 void CodeCache::clear_inline_caches() {
1121 assert_locked_or_safepoint(CodeCache_lock);
1122 CompiledMethodIterator iter;
1123 while(iter.next_alive()) {
1124 iter.method()->clear_inline_caches();
1125 }
1126 }
1127
1128 void CodeCache::cleanup_inline_caches() {
1129 assert_locked_or_safepoint(CodeCache_lock);
1130 NMethodIterator iter;
1131 while(iter.next_alive()) {
1132 iter.method()->cleanup_inline_caches(/*clean_all=*/true);
1133 }
1134 }
1135
1136 // Keeps track of time spent for checking dependencies
1137 NOT_PRODUCT(static elapsedTimer dependentCheckTime;)
1138
1139 int CodeCache::mark_for_deoptimization(KlassDepChange& changes) {
1140 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1141 int number_of_marked_CodeBlobs = 0;
1142
1143 // search the hierarchy looking for nmethods which are affected by the loading of this class
1144
1145 // then search the interfaces this class implements looking for nmethods
1146 // which might be dependent of the fact that an interface only had one
1147 // implementor.
1148 // nmethod::check_all_dependencies works only correctly, if no safepoint
1149 // can happen
1150 NoSafepointVerifier nsv;
|
1102 // Give OS a chance to register generated code area.
1103 // This is used on Windows 64 bit platforms to register
1104 // Structured Exception Handlers for our generated code.
1105 os::register_code_area((char*)low_bound(), (char*)high_bound());
1106 }
1107
1108 void codeCache_init() {
1109 CodeCache::initialize();
1110 // Load AOT libraries and add AOT code heaps.
1111 AOTLoader::initialize();
1112 }
1113
1114 //------------------------------------------------------------------------------------------------
1115
1116 int CodeCache::number_of_nmethods_with_dependencies() {
1117 return _number_of_nmethods_with_dependencies;
1118 }
1119
1120 void CodeCache::clear_inline_caches() {
1121 assert_locked_or_safepoint(CodeCache_lock);
1122 CompiledMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
1123 while(iter.next()) {
1124 iter.method()->clear_inline_caches();
1125 }
1126 }
1127
1128 void CodeCache::cleanup_inline_caches() {
1129 assert_locked_or_safepoint(CodeCache_lock);
1130 NMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
1131 while(iter.next()) {
1132 iter.method()->cleanup_inline_caches(/*clean_all=*/true);
1133 }
1134 }
1135
1136 // Keeps track of time spent for checking dependencies
1137 NOT_PRODUCT(static elapsedTimer dependentCheckTime;)
1138
1139 int CodeCache::mark_for_deoptimization(KlassDepChange& changes) {
1140 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1141 int number_of_marked_CodeBlobs = 0;
1142
1143 // search the hierarchy looking for nmethods which are affected by the loading of this class
1144
1145 // then search the interfaces this class implements looking for nmethods
1146 // which might be dependent of the fact that an interface only had one
1147 // implementor.
1148 // nmethod::check_all_dependencies works only correctly, if no safepoint
1149 // can happen
1150 NoSafepointVerifier nsv;
|
1181 #endif
1182 }
1183
1184 #ifdef HOTSWAP
1185 int CodeCache::mark_for_evol_deoptimization(InstanceKlass* dependee) {
1186 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1187 int number_of_marked_CodeBlobs = 0;
1188
1189 // Deoptimize all methods of the evolving class itself
1190 Array<Method*>* old_methods = dependee->methods();
1191 for (int i = 0; i < old_methods->length(); i++) {
1192 ResourceMark rm;
1193 Method* old_method = old_methods->at(i);
1194 CompiledMethod* nm = old_method->code();
1195 if (nm != NULL) {
1196 nm->mark_for_deoptimization();
1197 number_of_marked_CodeBlobs++;
1198 }
1199 }
1200
1201 CompiledMethodIterator iter;
1202 while(iter.next_alive()) {
1203 CompiledMethod* nm = iter.method();
1204 if (nm->is_marked_for_deoptimization()) {
1205 // ...Already marked in the previous pass; don't count it again.
1206 } else if (nm->is_evol_dependent_on(dependee)) {
1207 ResourceMark rm;
1208 nm->mark_for_deoptimization();
1209 number_of_marked_CodeBlobs++;
1210 } else {
1211 // flush caches in case they refer to a redefined Method*
1212 nm->clear_inline_caches();
1213 }
1214 }
1215
1216 return number_of_marked_CodeBlobs;
1217 }
1218 #endif // HOTSWAP
1219
1220
1221 // Deoptimize all methods
1222 void CodeCache::mark_all_nmethods_for_deoptimization() {
1223 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1224 CompiledMethodIterator iter;
1225 while(iter.next_alive()) {
1226 CompiledMethod* nm = iter.method();
1227 if (!nm->method()->is_method_handle_intrinsic()) {
1228 nm->mark_for_deoptimization();
1229 }
1230 }
1231 }
1232
1233 int CodeCache::mark_for_deoptimization(Method* dependee) {
1234 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1235 int number_of_marked_CodeBlobs = 0;
1236
1237 CompiledMethodIterator iter;
1238 while(iter.next_alive()) {
1239 CompiledMethod* nm = iter.method();
1240 if (nm->is_dependent_on_method(dependee)) {
1241 ResourceMark rm;
1242 nm->mark_for_deoptimization();
1243 number_of_marked_CodeBlobs++;
1244 }
1245 }
1246
1247 return number_of_marked_CodeBlobs;
1248 }
1249
1250 void CodeCache::make_marked_nmethods_not_entrant() {
1251 assert_locked_or_safepoint(CodeCache_lock);
1252 CompiledMethodIterator iter;
1253 while(iter.next_alive()) {
1254 CompiledMethod* nm = iter.method();
1255 if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {
1256 nm->make_not_entrant();
1257 }
1258 }
1259 }
1260
1261 // Flushes compiled methods dependent on dependee.
1262 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1263 assert_lock_strong(Compile_lock);
1264
1265 if (number_of_nmethods_with_dependencies() == 0) return;
1266
1267 // CodeCache can only be updated by a thread_in_VM and they will all be
1268 // stopped during the safepoint so CodeCache will be safe to update without
1269 // holding the CodeCache_lock.
1270
1271 KlassDepChange changes(dependee);
1272
|
1181 #endif
1182 }
1183
1184 #ifdef HOTSWAP
1185 int CodeCache::mark_for_evol_deoptimization(InstanceKlass* dependee) {
1186 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1187 int number_of_marked_CodeBlobs = 0;
1188
1189 // Deoptimize all methods of the evolving class itself
1190 Array<Method*>* old_methods = dependee->methods();
1191 for (int i = 0; i < old_methods->length(); i++) {
1192 ResourceMark rm;
1193 Method* old_method = old_methods->at(i);
1194 CompiledMethod* nm = old_method->code();
1195 if (nm != NULL) {
1196 nm->mark_for_deoptimization();
1197 number_of_marked_CodeBlobs++;
1198 }
1199 }
1200
1201 CompiledMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
1202 while(iter.next()) {
1203 CompiledMethod* nm = iter.method();
1204 if (nm->is_marked_for_deoptimization()) {
1205 // ...Already marked in the previous pass; don't count it again.
1206 } else if (nm->is_evol_dependent_on(dependee)) {
1207 ResourceMark rm;
1208 nm->mark_for_deoptimization();
1209 number_of_marked_CodeBlobs++;
1210 } else {
1211 // flush caches in case they refer to a redefined Method*
1212 nm->clear_inline_caches();
1213 }
1214 }
1215
1216 return number_of_marked_CodeBlobs;
1217 }
1218 #endif // HOTSWAP
1219
1220
1221 // Deoptimize all methods
1222 void CodeCache::mark_all_nmethods_for_deoptimization() {
1223 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1224 CompiledMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
1225 while(iter.next()) {
1226 CompiledMethod* nm = iter.method();
1227 if (!nm->method()->is_method_handle_intrinsic()) {
1228 nm->mark_for_deoptimization();
1229 }
1230 }
1231 }
1232
1233 int CodeCache::mark_for_deoptimization(Method* dependee) {
1234 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1235 int number_of_marked_CodeBlobs = 0;
1236
1237 CompiledMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
1238 while(iter.next()) {
1239 CompiledMethod* nm = iter.method();
1240 if (nm->is_dependent_on_method(dependee)) {
1241 ResourceMark rm;
1242 nm->mark_for_deoptimization();
1243 number_of_marked_CodeBlobs++;
1244 }
1245 }
1246
1247 return number_of_marked_CodeBlobs;
1248 }
1249
1250 void CodeCache::make_marked_nmethods_not_entrant() {
1251 assert_locked_or_safepoint(CodeCache_lock);
1252 CompiledMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
1253 while(iter.next()) {
1254 CompiledMethod* nm = iter.method();
1255 if (nm->is_marked_for_deoptimization() && !nm->is_not_entrant()) {
1256 nm->make_not_entrant();
1257 }
1258 }
1259 }
1260
1261 // Flushes compiled methods dependent on dependee.
1262 void CodeCache::flush_dependents_on(InstanceKlass* dependee) {
1263 assert_lock_strong(Compile_lock);
1264
1265 if (number_of_nmethods_with_dependencies() == 0) return;
1266
1267 // CodeCache can only be updated by a thread_in_VM and they will all be
1268 // stopped during the safepoint so CodeCache will be safe to update without
1269 // holding the CodeCache_lock.
1270
1271 KlassDepChange changes(dependee);
1272
|
1501 }
1502 } else if (cb->is_runtime_stub()) {
1503 runtimeStubCount++;
1504 } else if (cb->is_deoptimization_stub()) {
1505 deoptimizationStubCount++;
1506 } else if (cb->is_uncommon_trap_stub()) {
1507 uncommonTrapStubCount++;
1508 } else if (cb->is_adapter_blob()) {
1509 adapterCount++;
1510 } else if (cb->is_buffer_blob()) {
1511 bufferBlobCount++;
1512 }
1513 }
1514 }
1515
1516 int bucketSize = 512;
1517 int bucketLimit = max_nm_size / bucketSize + 1;
1518 int *buckets = NEW_C_HEAP_ARRAY(int, bucketLimit, mtCode);
1519 memset(buckets, 0, sizeof(int) * bucketLimit);
1520
1521 NMethodIterator iter;
1522 while(iter.next()) {
1523 nmethod* nm = iter.method();
1524 if(nm->method() != NULL && nm->is_java_method()) {
1525 buckets[nm->size() / bucketSize]++;
1526 }
1527 }
1528
1529 tty->print_cr("Code Cache Entries (total of %d)",total);
1530 tty->print_cr("-------------------------------------------------");
1531 tty->print_cr("nmethods: %d",nmethodCount);
1532 tty->print_cr("\talive: %d",nmethodAlive);
1533 tty->print_cr("\tnot_entrant: %d",nmethodNotEntrant);
1534 tty->print_cr("\tzombie: %d",nmethodZombie);
1535 tty->print_cr("\tunloaded: %d",nmethodUnloaded);
1536 tty->print_cr("\tjava: %d",nmethodJava);
1537 tty->print_cr("\tnative: %d",nmethodNative);
1538 tty->print_cr("runtime_stubs: %d",runtimeStubCount);
1539 tty->print_cr("adapters: %d",adapterCount);
1540 tty->print_cr("buffer blobs: %d",bufferBlobCount);
|
1501 }
1502 } else if (cb->is_runtime_stub()) {
1503 runtimeStubCount++;
1504 } else if (cb->is_deoptimization_stub()) {
1505 deoptimizationStubCount++;
1506 } else if (cb->is_uncommon_trap_stub()) {
1507 uncommonTrapStubCount++;
1508 } else if (cb->is_adapter_blob()) {
1509 adapterCount++;
1510 } else if (cb->is_buffer_blob()) {
1511 bufferBlobCount++;
1512 }
1513 }
1514 }
1515
1516 int bucketSize = 512;
1517 int bucketLimit = max_nm_size / bucketSize + 1;
1518 int *buckets = NEW_C_HEAP_ARRAY(int, bucketLimit, mtCode);
1519 memset(buckets, 0, sizeof(int) * bucketLimit);
1520
1521 NMethodIterator iter(false /* only_alive */, false /* only_not_unloading */);
1522 while(iter.next()) {
1523 nmethod* nm = iter.method();
1524 if(nm->method() != NULL && nm->is_java_method()) {
1525 buckets[nm->size() / bucketSize]++;
1526 }
1527 }
1528
1529 tty->print_cr("Code Cache Entries (total of %d)",total);
1530 tty->print_cr("-------------------------------------------------");
1531 tty->print_cr("nmethods: %d",nmethodCount);
1532 tty->print_cr("\talive: %d",nmethodAlive);
1533 tty->print_cr("\tnot_entrant: %d",nmethodNotEntrant);
1534 tty->print_cr("\tzombie: %d",nmethodZombie);
1535 tty->print_cr("\tunloaded: %d",nmethodUnloaded);
1536 tty->print_cr("\tjava: %d",nmethodJava);
1537 tty->print_cr("\tnative: %d",nmethodNative);
1538 tty->print_cr("runtime_stubs: %d",runtimeStubCount);
1539 tty->print_cr("adapters: %d",adapterCount);
1540 tty->print_cr("buffer blobs: %d",bufferBlobCount);
|
1641 }
1642
1643 if (detailed) {
1644 st->print_cr(" total_blobs=" UINT32_FORMAT " nmethods=" UINT32_FORMAT
1645 " adapters=" UINT32_FORMAT,
1646 blob_count(), nmethod_count(), adapter_count());
1647 st->print_cr(" compilation: %s", CompileBroker::should_compile_new_jobs() ?
1648 "enabled" : Arguments::mode() == Arguments::_int ?
1649 "disabled (interpreter mode)" :
1650 "disabled (not enough contiguous free space left)");
1651 st->print_cr(" stopped_count=%d, restarted_count=%d",
1652 CompileBroker::get_total_compiler_stopped_count(),
1653 CompileBroker::get_total_compiler_restarted_count());
1654 st->print_cr(" full_count=%d", full_count);
1655 }
1656 }
1657
1658 void CodeCache::print_codelist(outputStream* st) {
1659 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1660
1661 CompiledMethodIterator iter;
1662 while (iter.next_alive()) {
1663 CompiledMethod* cm = iter.method();
1664 ResourceMark rm;
1665 char* method_name = cm->method()->name_and_sig_as_C_string();
1666 st->print_cr("%d %d %d %s [" INTPTR_FORMAT ", " INTPTR_FORMAT " - " INTPTR_FORMAT "]",
1667 cm->compile_id(), cm->comp_level(), cm->get_state(),
1668 method_name,
1669 (intptr_t)cm->header_begin(), (intptr_t)cm->code_begin(), (intptr_t)cm->code_end());
1670 }
1671 }
1672
1673 void CodeCache::print_layout(outputStream* st) {
1674 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1675 ResourceMark rm;
1676 print_summary(st, true);
1677 }
1678
1679 void CodeCache::log_state(outputStream* st) {
1680 st->print(" total_blobs='" UINT32_FORMAT "' nmethods='" UINT32_FORMAT "'"
1681 " adapters='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
|
1641 }
1642
1643 if (detailed) {
1644 st->print_cr(" total_blobs=" UINT32_FORMAT " nmethods=" UINT32_FORMAT
1645 " adapters=" UINT32_FORMAT,
1646 blob_count(), nmethod_count(), adapter_count());
1647 st->print_cr(" compilation: %s", CompileBroker::should_compile_new_jobs() ?
1648 "enabled" : Arguments::mode() == Arguments::_int ?
1649 "disabled (interpreter mode)" :
1650 "disabled (not enough contiguous free space left)");
1651 st->print_cr(" stopped_count=%d, restarted_count=%d",
1652 CompileBroker::get_total_compiler_stopped_count(),
1653 CompileBroker::get_total_compiler_restarted_count());
1654 st->print_cr(" full_count=%d", full_count);
1655 }
1656 }
1657
1658 void CodeCache::print_codelist(outputStream* st) {
1659 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1660
1661 CompiledMethodIterator iter(true /* only_alive */, true /* only_not_unloading */);
1662 while (iter.next()) {
1663 CompiledMethod* cm = iter.method();
1664 ResourceMark rm;
1665 char* method_name = cm->method()->name_and_sig_as_C_string();
1666 st->print_cr("%d %d %d %s [" INTPTR_FORMAT ", " INTPTR_FORMAT " - " INTPTR_FORMAT "]",
1667 cm->compile_id(), cm->comp_level(), cm->get_state(),
1668 method_name,
1669 (intptr_t)cm->header_begin(), (intptr_t)cm->code_begin(), (intptr_t)cm->code_end());
1670 }
1671 }
1672
1673 void CodeCache::print_layout(outputStream* st) {
1674 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1675 ResourceMark rm;
1676 print_summary(st, true);
1677 }
1678
1679 void CodeCache::log_state(outputStream* st) {
1680 st->print(" total_blobs='" UINT32_FORMAT "' nmethods='" UINT32_FORMAT "'"
1681 " adapters='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
|