59 #include "runtime/atomic.hpp"
60 #include "runtime/commandLineFlagConstraintList.hpp"
61 #include "runtime/deoptimization.hpp"
62 #include "runtime/fprofiler.hpp"
63 #include "runtime/handles.inline.hpp"
64 #include "runtime/init.hpp"
65 #include "runtime/java.hpp"
66 #include "runtime/javaCalls.hpp"
67 #include "runtime/sharedRuntime.hpp"
68 #include "runtime/synchronizer.hpp"
69 #include "runtime/thread.inline.hpp"
70 #include "runtime/timerTrace.hpp"
71 #include "runtime/vm_operations.hpp"
72 #include "services/memoryService.hpp"
73 #include "utilities/copy.hpp"
74 #include "utilities/events.hpp"
75 #include "utilities/hashtable.inline.hpp"
76 #include "utilities/macros.hpp"
77 #include "utilities/ostream.hpp"
78 #include "utilities/preserveException.hpp"
79 #if INCLUDE_ALL_GCS
80 #include "gc/cms/cmsCollectorPolicy.hpp"
81 #include "gc/g1/g1CollectedHeap.inline.hpp"
82 #include "gc/g1/g1CollectorPolicy.hpp"
83 #include "gc/parallel/parallelScavengeHeap.hpp"
84 #include "gc/shared/adaptiveSizePolicy.hpp"
85 #endif // INCLUDE_ALL_GCS
86 #if INCLUDE_CDS
87 #include "classfile/sharedClassUtil.hpp"
88 #endif
89
90 // Known objects
91 Klass* Universe::_boolArrayKlassObj = NULL;
92 Klass* Universe::_byteArrayKlassObj = NULL;
93 Klass* Universe::_charArrayKlassObj = NULL;
94 Klass* Universe::_intArrayKlassObj = NULL;
95 Klass* Universe::_shortArrayKlassObj = NULL;
96 Klass* Universe::_longArrayKlassObj = NULL;
97 Klass* Universe::_singleArrayKlassObj = NULL;
98 Klass* Universe::_doubleArrayKlassObj = NULL;
99 Klass* Universe::_typeArrayKlassObjs[T_VOID+1] = { NULL /*, NULL...*/ };
100 Klass* Universe::_objectArrayKlassObj = NULL;
101 oop Universe::_int_mirror = NULL;
102 oop Universe::_float_mirror = NULL;
103 oop Universe::_double_mirror = NULL;
104 oop Universe::_byte_mirror = NULL;
105 oop Universe::_bool_mirror = NULL;
144
145 // These variables are guarded by FullGCALot_lock.
146 debug_only(objArrayOop Universe::_fullgc_alot_dummy_array = NULL;)
147 debug_only(int Universe::_fullgc_alot_dummy_next = 0;)
148
149 // Heap
150 int Universe::_verify_count = 0;
151
152 // Oop verification (see MacroAssembler::verify_oop)
153 uintptr_t Universe::_verify_oop_mask = 0;
154 uintptr_t Universe::_verify_oop_bits = (uintptr_t) -1;
155
156 int Universe::_base_vtable_size = 0;
157 bool Universe::_bootstrapping = false;
158 bool Universe::_module_initialized = false;
159 bool Universe::_fully_initialized = false;
160
161 size_t Universe::_heap_capacity_at_last_gc;
162 size_t Universe::_heap_used_at_last_gc = 0;
163
164 CollectedHeap* Universe::_collectedHeap = NULL;
165
166 NarrowPtrStruct Universe::_narrow_oop = { NULL, 0, true };
167 NarrowPtrStruct Universe::_narrow_klass = { NULL, 0, true };
168 address Universe::_narrow_ptrs_base;
169
170 void Universe::basic_type_classes_do(void f(Klass*)) {
171 f(boolArrayKlassObj());
172 f(byteArrayKlassObj());
173 f(charArrayKlassObj());
174 f(intArrayKlassObj());
175 f(shortArrayKlassObj());
176 f(longArrayKlassObj());
177 f(singleArrayKlassObj());
178 f(doubleArrayKlassObj());
179 }
180
181 void Universe::oops_do(OopClosure* f, bool do_all) {
182
183 f->do_oop((oop*) &_int_mirror);
184 f->do_oop((oop*) &_float_mirror);
185 f->do_oop((oop*) &_double_mirror);
647
648 if (_non_oop_bits == 0) {
649 _non_oop_bits = (intptr_t)os::non_memory_address_word() | 1;
650 }
651
652 return (void*)_non_oop_bits;
653 }
654
655 jint universe_init() {
656 assert(!Universe::_fully_initialized, "called after initialize_vtables");
657 guarantee(1 << LogHeapWordSize == sizeof(HeapWord),
658 "LogHeapWordSize is incorrect.");
659 guarantee(sizeof(oop) >= sizeof(HeapWord), "HeapWord larger than oop?");
660 guarantee(sizeof(oop) % sizeof(HeapWord) == 0,
661 "oop size is not not a multiple of HeapWord size");
662
663 TraceTime timer("Genesis", TRACETIME_LOG(Info, startuptime));
664
665 JavaClasses::compute_hard_coded_offsets();
666
667 jint status = Universe::initialize_heap();
668 if (status != JNI_OK) {
669 return status;
670 }
671
672 Metaspace::global_initialize();
673
674 // Checks 'AfterMemoryInit' constraints.
675 if (!CommandLineFlagConstraintList::check_constraints(CommandLineFlagConstraint::AfterMemoryInit)) {
676 return JNI_EINVAL;
677 }
678
679 // Create memory for metadata. Must be after initializing heap for
680 // DumpSharedSpaces.
681 ClassLoaderData::init_null_class_loader_data();
682
683 // We have a heap so create the Method* caches before
684 // Metaspace::initialize_shared_spaces() tries to populate them.
685 Universe::_finalizer_register_cache = new LatestMethodCache();
686 Universe::_loader_addClass_cache = new LatestMethodCache();
687 Universe::_pd_implies_cache = new LatestMethodCache();
694 // the file (other than the mapped regions) is no longer needed, and
695 // the file is closed. Closing the file does not affect the
696 // currently mapped regions.
697 MetaspaceShared::initialize_shared_spaces();
698 StringTable::create_table();
699 } else {
700 SymbolTable::create_table();
701 StringTable::create_table();
702
703 if (DumpSharedSpaces) {
704 MetaspaceShared::prepare_for_dumping();
705 }
706 }
707 if (strlen(VerifySubSet) > 0) {
708 Universe::initialize_verify_flags();
709 }
710
711 return JNI_OK;
712 }
713
714 CollectedHeap* Universe::create_heap() {
715 assert(_collectedHeap == NULL, "Heap already created");
716 #if !INCLUDE_ALL_GCS
717 if (UseParallelGC) {
718 fatal("UseParallelGC not supported in this VM.");
719 } else if (UseG1GC) {
720 fatal("UseG1GC not supported in this VM.");
721 } else if (UseConcMarkSweepGC) {
722 fatal("UseConcMarkSweepGC not supported in this VM.");
723 #else
724 if (UseParallelGC) {
725 return Universe::create_heap_with_policy<ParallelScavengeHeap, GenerationSizer>();
726 } else if (UseG1GC) {
727 return Universe::create_heap_with_policy<G1CollectedHeap, G1CollectorPolicy>();
728 } else if (UseConcMarkSweepGC) {
729 return Universe::create_heap_with_policy<GenCollectedHeap, ConcurrentMarkSweepPolicy>();
730 #endif
731 } else if (UseSerialGC) {
732 return Universe::create_heap_with_policy<GenCollectedHeap, MarkSweepPolicy>();
733 }
734
735 ShouldNotReachHere();
736 return NULL;
737 }
738
739 // Choose the heap base address and oop encoding mode
740 // when compressed oops are used:
741 // Unscaled - Use 32-bits oops without encoding when
742 // NarrowOopHeapBaseMin + heap_size < 4Gb
743 // ZeroBased - Use zero based compressed oops with encoding when
744 // NarrowOopHeapBaseMin + heap_size < 32Gb
745 // HeapBased - Use compressed oops with heap base + encoding.
746
747 jint Universe::initialize_heap() {
748 jint status = JNI_ERR;
749
750 _collectedHeap = create_heap_ext();
751 if (_collectedHeap == NULL) {
752 _collectedHeap = create_heap();
753 }
754
755 status = _collectedHeap->initialize();
756 if (status != JNI_OK) {
757 return status;
758 }
759 log_info(gc)("Using %s", _collectedHeap->name());
760
761 ThreadLocalAllocBuffer::set_max_size(Universe::heap()->max_tlab_size());
762
763 #ifdef _LP64
764 if (UseCompressedOops) {
765 // Subtract a page because something can get allocated at heap base.
766 // This also makes implicit null checking work, because the
767 // memory+1 page below heap_base needs to cause a signal.
768 // See needs_explicit_null_check.
769 // Only set the heap base for compressed oops because it indicates
770 // compressed oops for pstack code.
771 if ((uint64_t)Universe::heap()->reserved_region().end() > UnscaledOopHeapMax) {
772 // Didn't reserve heap below 4Gb. Must shift.
773 Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
774 }
775 if ((uint64_t)Universe::heap()->reserved_region().end() <= OopEncodingHeapMax) {
776 // Did reserve heap below 32Gb. Can use base == 0;
777 Universe::set_narrow_oop_base(0);
778 }
779
780 Universe::set_narrow_ptrs_base(Universe::narrow_oop_base());
781
782 if (log_is_enabled(Info, gc, heap, coops)) {
783 ResourceMark rm;
784 outputStream* logst = Log(gc, heap, coops)::info_stream();
785 Universe::print_compressed_oops_mode(logst);
786 }
787
788 // Tell tests in which mode we run.
789 Arguments::PropertyList_add(new SystemProperty("java.vm.compressedOopsMode",
790 narrow_oop_mode_to_string(narrow_oop_mode()),
791 false));
792 }
793 // Universe::narrow_oop_base() is one page below the heap.
794 assert((intptr_t)Universe::narrow_oop_base() <= (intptr_t)(Universe::heap()->base() -
795 os::vm_page_size()) ||
796 Universe::narrow_oop_base() == NULL, "invalid value");
797 assert(Universe::narrow_oop_shift() == LogMinObjAlignmentInBytes ||
798 Universe::narrow_oop_shift() == 0, "invalid value");
799 #endif
800
801 // We will never reach the CATCH below since Exceptions::_throw will cause
802 // the VM to exit if an exception is thrown during initialization
803
804 if (UseTLAB) {
805 assert(Universe::heap()->supports_tlab_allocation(),
806 "Should support thread-local allocation buffers");
807 ThreadLocalAllocBuffer::startup_initialization();
808 }
809 return JNI_OK;
810 }
811
812 void Universe::print_compressed_oops_mode(outputStream* st) {
813 st->print("Heap address: " PTR_FORMAT ", size: " SIZE_FORMAT " MB",
814 p2i(Universe::heap()->base()), Universe::heap()->reserved_region().byte_size()/M);
815
816 st->print(", Compressed Oops mode: %s", narrow_oop_mode_to_string(narrow_oop_mode()));
817
818 if (Universe::narrow_oop_base() != 0) {
819 st->print(": " PTR_FORMAT, p2i(Universe::narrow_oop_base()));
820 }
821
822 if (Universe::narrow_oop_shift() != 0) {
823 st->print(", Oop shift amount: %d", Universe::narrow_oop_shift());
824 }
825
826 if (!Universe::narrow_oop_use_implicit_null_checks()) {
827 st->print(", no protected page in front of the heap");
828 }
829 st->cr();
830 }
831
832 ReservedSpace Universe::reserve_heap(size_t heap_size, size_t alignment) {
833
834 assert(alignment <= Arguments::conservative_max_heap_alignment(),
858 // Else heap start and base MUST differ, so that NULL can be encoded nonambigous.
859 Universe::set_narrow_oop_base((address)total_rs.compressed_oop_base());
860 }
861
862 return total_rs;
863 }
864
865 vm_exit_during_initialization(
866 err_msg("Could not reserve enough space for " SIZE_FORMAT "KB object heap",
867 total_reserved/K));
868
869 // satisfy compiler
870 ShouldNotReachHere();
871 return ReservedHeapSpace(0, 0, false);
872 }
873
874
875 // It's the caller's responsibility to ensure glitch-freedom
876 // (if required).
877 void Universe::update_heap_info_at_gc() {
878 _heap_capacity_at_last_gc = heap()->capacity();
879 _heap_used_at_last_gc = heap()->used();
880 }
881
882
883 const char* Universe::narrow_oop_mode_to_string(Universe::NARROW_OOP_MODE mode) {
884 switch (mode) {
885 case UnscaledNarrowOop:
886 return "32-bit";
887 case ZeroBasedNarrowOop:
888 return "Zero based";
889 case DisjointBaseNarrowOop:
890 return "Non-zero disjoint base";
891 case HeapBasedNarrowOop:
892 return "Non-zero based";
893 }
894
895 ShouldNotReachHere();
896 return "";
897 }
898
899
1065 Universe::_preallocated_out_of_memory_error_array = oopFactory::new_objArray(k_h(), len, CHECK_false);
1066 for (int i=0; i<len; i++) {
1067 oop err = k_h->allocate_instance(CHECK_false);
1068 Handle err_h = Handle(THREAD, err);
1069 java_lang_Throwable::allocate_backtrace(err_h, CHECK_false);
1070 Universe::preallocated_out_of_memory_errors()->obj_at_put(i, err_h());
1071 }
1072 Universe::_preallocated_out_of_memory_error_avail_count = (jint)len;
1073 }
1074
1075 Universe::initialize_known_methods(CHECK_false);
1076
1077 // This needs to be done before the first scavenge/gc, since
1078 // it's an input to soft ref clearing policy.
1079 {
1080 MutexLocker x(Heap_lock);
1081 Universe::update_heap_info_at_gc();
1082 }
1083
1084 // ("weak") refs processing infrastructure initialization
1085 Universe::heap()->post_initialize();
1086
1087 // Initialize performance counters for metaspaces
1088 MetaspaceCounters::initialize_performance_counters();
1089 CompressedClassSpaceCounters::initialize_performance_counters();
1090
1091 MemoryService::add_metaspace_memory_pools();
1092
1093 MemoryService::set_universe_heap(Universe::heap());
1094 #if INCLUDE_CDS
1095 SharedClassUtil::initialize(CHECK_false);
1096 #endif
1097 return true;
1098 }
1099
1100
1101 void Universe::compute_base_vtable_size() {
1102 _base_vtable_size = ClassLoader::compute_Object_vtable();
1103 }
1104
1105 void Universe::print_on(outputStream* st) {
1106 GCMutexLocker hl(Heap_lock); // Heap_lock might be locked by caller thread.
1107 st->print_cr("Heap");
1108 heap()->print_on(st);
1109 }
1110
1111 void Universe::print_heap_at_SIGBREAK() {
1112 if (PrintHeapAtSIGBREAK) {
1113 print_on(tty);
1114 tty->cr();
1115 tty->flush();
1116 }
1117 }
1118
1119 void Universe::print_heap_before_gc() {
1120 Log(gc, heap) log;
1121 if (log.is_debug()) {
1122 log.debug("Heap before GC invocations=%u (full %u):", heap()->total_collections(), heap()->total_full_collections());
1123 ResourceMark rm;
1124 heap()->print_on(log.debug_stream());
1125 }
1126 }
1127
1128 void Universe::print_heap_after_gc() {
1129 Log(gc, heap) log;
1130 if (log.is_debug()) {
1131 log.debug("Heap after GC invocations=%u (full %u):", heap()->total_collections(), heap()->total_full_collections());
1132 ResourceMark rm;
1133 heap()->print_on(log.debug_stream());
1134 }
1135 }
1136
1137 void Universe::initialize_verify_flags() {
1138 verify_flags = 0;
1139 const char delimiter[] = " ,";
1140
1141 size_t length = strlen(VerifySubSet);
1142 char* subset_list = NEW_C_HEAP_ARRAY(char, length + 1, mtInternal);
1143 strncpy(subset_list, VerifySubSet, length + 1);
1144
1145 char* token = strtok(subset_list, delimiter);
1146 while (token != NULL) {
1147 if (strcmp(token, "threads") == 0) {
1148 verify_flags |= Verify_Threads;
1149 } else if (strcmp(token, "heap") == 0) {
1150 verify_flags |= Verify_Heap;
1151 } else if (strcmp(token, "symbol_table") == 0) {
1152 verify_flags |= Verify_SymbolTable;
1153 } else if (strcmp(token, "string_table") == 0) {
1187 _verify_in_progress = true;
1188
1189 COMPILER2_PRESENT(
1190 assert(!DerivedPointerTable::is_active(),
1191 "DPT should not be active during verification "
1192 "(of thread stacks below)");
1193 )
1194
1195 ResourceMark rm;
1196 HandleMark hm; // Handles created during verification can be zapped
1197 _verify_count++;
1198
1199 FormatBuffer<> title("Verifying %s", prefix);
1200 GCTraceTime(Info, gc, verify) tm(title.buffer());
1201 if (should_verify_subset(Verify_Threads)) {
1202 log_debug(gc, verify)("Threads");
1203 Threads::verify();
1204 }
1205 if (should_verify_subset(Verify_Heap)) {
1206 log_debug(gc, verify)("Heap");
1207 heap()->verify(option);
1208 }
1209 if (should_verify_subset(Verify_SymbolTable)) {
1210 log_debug(gc, verify)("SymbolTable");
1211 SymbolTable::verify();
1212 }
1213 if (should_verify_subset(Verify_StringTable)) {
1214 log_debug(gc, verify)("StringTable");
1215 StringTable::verify();
1216 }
1217 if (should_verify_subset(Verify_CodeCache)) {
1218 {
1219 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1220 log_debug(gc, verify)("CodeCache");
1221 CodeCache::verify();
1222 }
1223 }
1224 if (should_verify_subset(Verify_SystemDictionary)) {
1225 log_debug(gc, verify)("SystemDictionary");
1226 SystemDictionary::verify();
1227 }
1267 while ((mask & diff) != 0)
1268 mask <<= 1;
1269 uintptr_t bits = (min & mask);
1270 assert(bits == (max & mask), "correct mask");
1271 // check an intermediate value between min and max, just to make sure:
1272 assert(bits == ((min + (max-min)/2) & mask), "correct mask");
1273
1274 // require address alignment, too:
1275 mask |= (alignSize - 1);
1276
1277 if (!(_verify_oop_mask == 0 && _verify_oop_bits == (uintptr_t)-1)) {
1278 assert(_verify_oop_mask == mask && _verify_oop_bits == bits, "mask stability");
1279 }
1280 _verify_oop_mask = mask;
1281 _verify_oop_bits = bits;
1282 }
1283
1284 // Oop verification (see MacroAssembler::verify_oop)
1285
1286 uintptr_t Universe::verify_oop_mask() {
1287 MemRegion m = heap()->reserved_region();
1288 calculate_verify_data(m.start(), m.end());
1289 return _verify_oop_mask;
1290 }
1291
1292 uintptr_t Universe::verify_oop_bits() {
1293 MemRegion m = heap()->reserved_region();
1294 calculate_verify_data(m.start(), m.end());
1295 return _verify_oop_bits;
1296 }
1297
1298 uintptr_t Universe::verify_mark_mask() {
1299 return markOopDesc::lock_mask_in_place;
1300 }
1301
1302 uintptr_t Universe::verify_mark_bits() {
1303 intptr_t mask = verify_mark_mask();
1304 intptr_t bits = (intptr_t)markOopDesc::prototype();
1305 assert((bits & ~mask) == 0, "no stray header bits");
1306 return bits;
1307 }
1308 #endif // PRODUCT
1309
1310
1311 void Universe::compute_verify_oop_data() {
1312 verify_oop_mask();
1313 verify_oop_bits();
|
59 #include "runtime/atomic.hpp"
60 #include "runtime/commandLineFlagConstraintList.hpp"
61 #include "runtime/deoptimization.hpp"
62 #include "runtime/fprofiler.hpp"
63 #include "runtime/handles.inline.hpp"
64 #include "runtime/init.hpp"
65 #include "runtime/java.hpp"
66 #include "runtime/javaCalls.hpp"
67 #include "runtime/sharedRuntime.hpp"
68 #include "runtime/synchronizer.hpp"
69 #include "runtime/thread.inline.hpp"
70 #include "runtime/timerTrace.hpp"
71 #include "runtime/vm_operations.hpp"
72 #include "services/memoryService.hpp"
73 #include "utilities/copy.hpp"
74 #include "utilities/events.hpp"
75 #include "utilities/hashtable.inline.hpp"
76 #include "utilities/macros.hpp"
77 #include "utilities/ostream.hpp"
78 #include "utilities/preserveException.hpp"
79 #if INCLUDE_CDS
80 #include "classfile/sharedClassUtil.hpp"
81 #endif
82
83 // Known objects
84 Klass* Universe::_boolArrayKlassObj = NULL;
85 Klass* Universe::_byteArrayKlassObj = NULL;
86 Klass* Universe::_charArrayKlassObj = NULL;
87 Klass* Universe::_intArrayKlassObj = NULL;
88 Klass* Universe::_shortArrayKlassObj = NULL;
89 Klass* Universe::_longArrayKlassObj = NULL;
90 Klass* Universe::_singleArrayKlassObj = NULL;
91 Klass* Universe::_doubleArrayKlassObj = NULL;
92 Klass* Universe::_typeArrayKlassObjs[T_VOID+1] = { NULL /*, NULL...*/ };
93 Klass* Universe::_objectArrayKlassObj = NULL;
94 oop Universe::_int_mirror = NULL;
95 oop Universe::_float_mirror = NULL;
96 oop Universe::_double_mirror = NULL;
97 oop Universe::_byte_mirror = NULL;
98 oop Universe::_bool_mirror = NULL;
137
138 // These variables are guarded by FullGCALot_lock.
139 debug_only(objArrayOop Universe::_fullgc_alot_dummy_array = NULL;)
140 debug_only(int Universe::_fullgc_alot_dummy_next = 0;)
141
142 // Heap
143 int Universe::_verify_count = 0;
144
145 // Oop verification (see MacroAssembler::verify_oop)
146 uintptr_t Universe::_verify_oop_mask = 0;
147 uintptr_t Universe::_verify_oop_bits = (uintptr_t) -1;
148
149 int Universe::_base_vtable_size = 0;
150 bool Universe::_bootstrapping = false;
151 bool Universe::_module_initialized = false;
152 bool Universe::_fully_initialized = false;
153
154 size_t Universe::_heap_capacity_at_last_gc;
155 size_t Universe::_heap_used_at_last_gc = 0;
156
157 NarrowPtrStruct Universe::_narrow_oop = { NULL, 0, true };
158 NarrowPtrStruct Universe::_narrow_klass = { NULL, 0, true };
159 address Universe::_narrow_ptrs_base;
160
161 void Universe::basic_type_classes_do(void f(Klass*)) {
162 f(boolArrayKlassObj());
163 f(byteArrayKlassObj());
164 f(charArrayKlassObj());
165 f(intArrayKlassObj());
166 f(shortArrayKlassObj());
167 f(longArrayKlassObj());
168 f(singleArrayKlassObj());
169 f(doubleArrayKlassObj());
170 }
171
172 void Universe::oops_do(OopClosure* f, bool do_all) {
173
174 f->do_oop((oop*) &_int_mirror);
175 f->do_oop((oop*) &_float_mirror);
176 f->do_oop((oop*) &_double_mirror);
638
639 if (_non_oop_bits == 0) {
640 _non_oop_bits = (intptr_t)os::non_memory_address_word() | 1;
641 }
642
643 return (void*)_non_oop_bits;
644 }
645
646 jint universe_init() {
647 assert(!Universe::_fully_initialized, "called after initialize_vtables");
648 guarantee(1 << LogHeapWordSize == sizeof(HeapWord),
649 "LogHeapWordSize is incorrect.");
650 guarantee(sizeof(oop) >= sizeof(HeapWord), "HeapWord larger than oop?");
651 guarantee(sizeof(oop) % sizeof(HeapWord) == 0,
652 "oop size is not not a multiple of HeapWord size");
653
654 TraceTime timer("Genesis", TRACETIME_LOG(Info, startuptime));
655
656 JavaClasses::compute_hard_coded_offsets();
657
658 assert(GC::is_initialized(), "needs to be initialized here");
659 jint status = GC::gc()->initialize_heap();
660 if (status != JNI_OK) {
661 return status;
662 }
663
664 status = Universe::initialize_heap();
665 if (status != JNI_OK) {
666 return status;
667 }
668
669 Metaspace::global_initialize();
670
671 // Checks 'AfterMemoryInit' constraints.
672 if (!CommandLineFlagConstraintList::check_constraints(CommandLineFlagConstraint::AfterMemoryInit)) {
673 return JNI_EINVAL;
674 }
675
676 // Create memory for metadata. Must be after initializing heap for
677 // DumpSharedSpaces.
678 ClassLoaderData::init_null_class_loader_data();
679
680 // We have a heap so create the Method* caches before
681 // Metaspace::initialize_shared_spaces() tries to populate them.
682 Universe::_finalizer_register_cache = new LatestMethodCache();
683 Universe::_loader_addClass_cache = new LatestMethodCache();
684 Universe::_pd_implies_cache = new LatestMethodCache();
691 // the file (other than the mapped regions) is no longer needed, and
692 // the file is closed. Closing the file does not affect the
693 // currently mapped regions.
694 MetaspaceShared::initialize_shared_spaces();
695 StringTable::create_table();
696 } else {
697 SymbolTable::create_table();
698 StringTable::create_table();
699
700 if (DumpSharedSpaces) {
701 MetaspaceShared::prepare_for_dumping();
702 }
703 }
704 if (strlen(VerifySubSet) > 0) {
705 Universe::initialize_verify_flags();
706 }
707
708 return JNI_OK;
709 }
710
711 // Choose the heap base address and oop encoding mode
712 // when compressed oops are used:
713 // Unscaled - Use 32-bits oops without encoding when
714 // NarrowOopHeapBaseMin + heap_size < 4Gb
715 // ZeroBased - Use zero based compressed oops with encoding when
716 // NarrowOopHeapBaseMin + heap_size < 32Gb
717 // HeapBased - Use compressed oops with heap base + encoding.
718
719 jint Universe::initialize_heap() {
720 jint status = JNI_ERR;
721
722 GC* gc = GC::gc();
723 CollectedHeap* heap = gc->heap();
724 ThreadLocalAllocBuffer::set_max_size(heap->max_tlab_size());
725
726 #ifdef _LP64
727 if (UseCompressedOops) {
728 // Subtract a page because something can get allocated at heap base.
729 // This also makes implicit null checking work, because the
730 // memory+1 page below heap_base needs to cause a signal.
731 // See needs_explicit_null_check.
732 // Only set the heap base for compressed oops because it indicates
733 // compressed oops for pstack code.
734 if ((uint64_t) heap->reserved_region().end() > UnscaledOopHeapMax) {
735 // Didn't reserve heap below 4Gb. Must shift.
736 Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
737 }
738 if ((uint64_t) heap->reserved_region().end() <= OopEncodingHeapMax) {
739 // Did reserve heap below 32Gb. Can use base == 0;
740 Universe::set_narrow_oop_base(0);
741 }
742
743 Universe::set_narrow_ptrs_base(Universe::narrow_oop_base());
744
745 if (log_is_enabled(Info, gc, heap, coops)) {
746 ResourceMark rm;
747 outputStream* logst = Log(gc, heap, coops)::info_stream();
748 Universe::print_compressed_oops_mode(logst);
749 }
750
751 // Tell tests in which mode we run.
752 Arguments::PropertyList_add(new SystemProperty("java.vm.compressedOopsMode",
753 narrow_oop_mode_to_string(narrow_oop_mode()),
754 false));
755 }
756 // Universe::narrow_oop_base() is one page below the heap.
757 assert((intptr_t)Universe::narrow_oop_base() <= (intptr_t)(heap->base() -
758 os::vm_page_size()) ||
759 Universe::narrow_oop_base() == NULL, "invalid value");
760 assert(Universe::narrow_oop_shift() == LogMinObjAlignmentInBytes ||
761 Universe::narrow_oop_shift() == 0, "invalid value");
762 #endif
763
764 // We will never reach the CATCH below since Exceptions::_throw will cause
765 // the VM to exit if an exception is thrown during initialization
766
767 if (UseTLAB) {
768 assert(heap->supports_tlab_allocation(),
769 "Should support thread-local allocation buffers");
770 ThreadLocalAllocBuffer::startup_initialization();
771 }
772 return JNI_OK;
773 }
774
775 void Universe::print_compressed_oops_mode(outputStream* st) {
776 st->print("Heap address: " PTR_FORMAT ", size: " SIZE_FORMAT " MB",
777 p2i(GC::gc()->heap()->base()), GC::gc()->heap()->reserved_region().byte_size()/M);
778
779 st->print(", Compressed Oops mode: %s", narrow_oop_mode_to_string(narrow_oop_mode()));
780
781 if (Universe::narrow_oop_base() != 0) {
782 st->print(": " PTR_FORMAT, p2i(Universe::narrow_oop_base()));
783 }
784
785 if (Universe::narrow_oop_shift() != 0) {
786 st->print(", Oop shift amount: %d", Universe::narrow_oop_shift());
787 }
788
789 if (!Universe::narrow_oop_use_implicit_null_checks()) {
790 st->print(", no protected page in front of the heap");
791 }
792 st->cr();
793 }
794
795 ReservedSpace Universe::reserve_heap(size_t heap_size, size_t alignment) {
796
797 assert(alignment <= Arguments::conservative_max_heap_alignment(),
821 // Else heap start and base MUST differ, so that NULL can be encoded nonambigous.
822 Universe::set_narrow_oop_base((address)total_rs.compressed_oop_base());
823 }
824
825 return total_rs;
826 }
827
828 vm_exit_during_initialization(
829 err_msg("Could not reserve enough space for " SIZE_FORMAT "KB object heap",
830 total_reserved/K));
831
832 // satisfy compiler
833 ShouldNotReachHere();
834 return ReservedHeapSpace(0, 0, false);
835 }
836
837
838 // It's the caller's responsibility to ensure glitch-freedom
839 // (if required).
840 void Universe::update_heap_info_at_gc() {
841 _heap_capacity_at_last_gc = GC::gc()->heap()->capacity();
842 _heap_used_at_last_gc = GC::gc()->heap()->used();
843 }
844
845
846 const char* Universe::narrow_oop_mode_to_string(Universe::NARROW_OOP_MODE mode) {
847 switch (mode) {
848 case UnscaledNarrowOop:
849 return "32-bit";
850 case ZeroBasedNarrowOop:
851 return "Zero based";
852 case DisjointBaseNarrowOop:
853 return "Non-zero disjoint base";
854 case HeapBasedNarrowOop:
855 return "Non-zero based";
856 }
857
858 ShouldNotReachHere();
859 return "";
860 }
861
862
1028 Universe::_preallocated_out_of_memory_error_array = oopFactory::new_objArray(k_h(), len, CHECK_false);
1029 for (int i=0; i<len; i++) {
1030 oop err = k_h->allocate_instance(CHECK_false);
1031 Handle err_h = Handle(THREAD, err);
1032 java_lang_Throwable::allocate_backtrace(err_h, CHECK_false);
1033 Universe::preallocated_out_of_memory_errors()->obj_at_put(i, err_h());
1034 }
1035 Universe::_preallocated_out_of_memory_error_avail_count = (jint)len;
1036 }
1037
1038 Universe::initialize_known_methods(CHECK_false);
1039
1040 // This needs to be done before the first scavenge/gc, since
1041 // it's an input to soft ref clearing policy.
1042 {
1043 MutexLocker x(Heap_lock);
1044 Universe::update_heap_info_at_gc();
1045 }
1046
1047 // ("weak") refs processing infrastructure initialization
1048 GC::gc()->heap()->post_initialize();
1049
1050 // Initialize performance counters for metaspaces
1051 MetaspaceCounters::initialize_performance_counters();
1052 CompressedClassSpaceCounters::initialize_performance_counters();
1053
1054 MemoryService::add_metaspace_memory_pools();
1055
1056 MemoryService::set_universe_heap(GC::gc()->heap());
1057 #if INCLUDE_CDS
1058 SharedClassUtil::initialize(CHECK_false);
1059 #endif
1060 return true;
1061 }
1062
1063
1064 void Universe::compute_base_vtable_size() {
1065 _base_vtable_size = ClassLoader::compute_Object_vtable();
1066 }
1067
1068 void Universe::print_on(outputStream* st) {
1069 GCMutexLocker hl(Heap_lock); // Heap_lock might be locked by caller thread.
1070 st->print_cr("Heap");
1071 GC::gc()->heap()->print_on(st);
1072 }
1073
1074 void Universe::print_heap_at_SIGBREAK() {
1075 if (PrintHeapAtSIGBREAK) {
1076 print_on(tty);
1077 tty->cr();
1078 tty->flush();
1079 }
1080 }
1081
1082 void Universe::print_heap_before_gc() {
1083 Log(gc, heap) log;
1084 if (log.is_debug()) {
1085 log.debug("Heap before GC invocations=%u (full %u):", GC::gc()->heap()->total_collections(), GC::gc()->heap()->total_full_collections());
1086 ResourceMark rm;
1087 GC::gc()->heap()->print_on(log.debug_stream());
1088 }
1089 }
1090
1091 void Universe::print_heap_after_gc() {
1092 Log(gc, heap) log;
1093 if (log.is_debug()) {
1094 log.debug("Heap after GC invocations=%u (full %u):", GC::gc()->heap()->total_collections(), GC::gc()->heap()->total_full_collections());
1095 ResourceMark rm;
1096 GC::gc()->heap()->print_on(log.debug_stream());
1097 }
1098 }
1099
1100 void Universe::initialize_verify_flags() {
1101 verify_flags = 0;
1102 const char delimiter[] = " ,";
1103
1104 size_t length = strlen(VerifySubSet);
1105 char* subset_list = NEW_C_HEAP_ARRAY(char, length + 1, mtInternal);
1106 strncpy(subset_list, VerifySubSet, length + 1);
1107
1108 char* token = strtok(subset_list, delimiter);
1109 while (token != NULL) {
1110 if (strcmp(token, "threads") == 0) {
1111 verify_flags |= Verify_Threads;
1112 } else if (strcmp(token, "heap") == 0) {
1113 verify_flags |= Verify_Heap;
1114 } else if (strcmp(token, "symbol_table") == 0) {
1115 verify_flags |= Verify_SymbolTable;
1116 } else if (strcmp(token, "string_table") == 0) {
1150 _verify_in_progress = true;
1151
1152 COMPILER2_PRESENT(
1153 assert(!DerivedPointerTable::is_active(),
1154 "DPT should not be active during verification "
1155 "(of thread stacks below)");
1156 )
1157
1158 ResourceMark rm;
1159 HandleMark hm; // Handles created during verification can be zapped
1160 _verify_count++;
1161
1162 FormatBuffer<> title("Verifying %s", prefix);
1163 GCTraceTime(Info, gc, verify) tm(title.buffer());
1164 if (should_verify_subset(Verify_Threads)) {
1165 log_debug(gc, verify)("Threads");
1166 Threads::verify();
1167 }
1168 if (should_verify_subset(Verify_Heap)) {
1169 log_debug(gc, verify)("Heap");
1170 GC::gc()->heap()->verify(option);
1171 }
1172 if (should_verify_subset(Verify_SymbolTable)) {
1173 log_debug(gc, verify)("SymbolTable");
1174 SymbolTable::verify();
1175 }
1176 if (should_verify_subset(Verify_StringTable)) {
1177 log_debug(gc, verify)("StringTable");
1178 StringTable::verify();
1179 }
1180 if (should_verify_subset(Verify_CodeCache)) {
1181 {
1182 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1183 log_debug(gc, verify)("CodeCache");
1184 CodeCache::verify();
1185 }
1186 }
1187 if (should_verify_subset(Verify_SystemDictionary)) {
1188 log_debug(gc, verify)("SystemDictionary");
1189 SystemDictionary::verify();
1190 }
1230 while ((mask & diff) != 0)
1231 mask <<= 1;
1232 uintptr_t bits = (min & mask);
1233 assert(bits == (max & mask), "correct mask");
1234 // check an intermediate value between min and max, just to make sure:
1235 assert(bits == ((min + (max-min)/2) & mask), "correct mask");
1236
1237 // require address alignment, too:
1238 mask |= (alignSize - 1);
1239
1240 if (!(_verify_oop_mask == 0 && _verify_oop_bits == (uintptr_t)-1)) {
1241 assert(_verify_oop_mask == mask && _verify_oop_bits == bits, "mask stability");
1242 }
1243 _verify_oop_mask = mask;
1244 _verify_oop_bits = bits;
1245 }
1246
1247 // Oop verification (see MacroAssembler::verify_oop)
1248
1249 uintptr_t Universe::verify_oop_mask() {
1250 MemRegion m = GC::gc()->heap()->reserved_region();
1251 calculate_verify_data(m.start(), m.end());
1252 return _verify_oop_mask;
1253 }
1254
1255 uintptr_t Universe::verify_oop_bits() {
1256 MemRegion m = GC::gc()->heap()->reserved_region();
1257 calculate_verify_data(m.start(), m.end());
1258 return _verify_oop_bits;
1259 }
1260
1261 uintptr_t Universe::verify_mark_mask() {
1262 return markOopDesc::lock_mask_in_place;
1263 }
1264
1265 uintptr_t Universe::verify_mark_bits() {
1266 intptr_t mask = verify_mark_mask();
1267 intptr_t bits = (intptr_t)markOopDesc::prototype();
1268 assert((bits & ~mask) == 0, "no stray header bits");
1269 return bits;
1270 }
1271 #endif // PRODUCT
1272
1273
1274 void Universe::compute_verify_oop_data() {
1275 verify_oop_mask();
1276 verify_oop_bits();
|