164 static Array<Klass*>* _the_array_interfaces_array;
165
166 // array of preallocated error objects with backtrace
167 static objArrayOop _preallocated_out_of_memory_error_array;
168
169 // number of preallocated error objects available for use
170 static volatile jint _preallocated_out_of_memory_error_avail_count;
171
172 static oop _null_ptr_exception_instance; // preallocated exception object
173 static oop _arithmetic_exception_instance; // preallocated exception object
174 static oop _virtual_machine_error_instance; // preallocated exception object
175 // The object used as an exception dummy when exceptions are thrown for
176 // the vm thread.
177 static oop _vm_exception;
178
179 // The particular choice of collected heap.
180 static CollectedHeap* _collectedHeap;
181
182 // For UseCompressedOops.
183 static struct NarrowPtrStruct _narrow_oop;
184 // For UseCompressedKlassPointers.
185 static struct NarrowPtrStruct _narrow_klass;
186 static address _narrow_ptrs_base;
187
188 // array of dummy objects used with +FullGCAlot
189 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
190 // index of next entry to clear
191 debug_only(static int _fullgc_alot_dummy_next;)
192
193 // Compiler/dispatch support
194 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
195
196 // Initialization
197 static bool _bootstrapping; // true during genesis
198 static bool _fully_initialized; // true after universe_init and initialize_vtables called
199
200 // the array of preallocated errors with backtraces
201 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
202
203 // generate an out of memory error; if possible using an error with preallocated backtrace;
204 // otherwise return the given default error.
212 static void initialize_basic_type_mirrors(TRAPS);
213 static void fixup_mirrors(TRAPS);
214
215 static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
216 static void reinitialize_itables(TRAPS);
217 static void compute_base_vtable_size(); // compute vtable size of class Object
218
219 static void genesis(TRAPS); // Create the initial world
220
221 // Mirrors for primitive classes (created eagerly)
222 static oop check_mirror(oop m) {
223 assert(m != NULL, "mirror not initialized");
224 return m;
225 }
226
227 static void set_narrow_oop_base(address base) {
228 assert(UseCompressedOops, "no compressed oops?");
229 _narrow_oop._base = base;
230 }
231 static void set_narrow_klass_base(address base) {
232 assert(UseCompressedKlassPointers, "no compressed klass ptrs?");
233 _narrow_klass._base = base;
234 }
235 static void set_narrow_oop_use_implicit_null_checks(bool use) {
236 assert(UseCompressedOops, "no compressed ptrs?");
237 _narrow_oop._use_implicit_null_checks = use;
238 }
239
240 // Debugging
241 static int _verify_count; // number of verifies done
242 // True during call to verify(). Should only be set/cleared in verify().
243 static bool _verify_in_progress;
244
245 static void compute_verify_oop_data();
246
247 public:
248 // Known classes in the VM
249 static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; }
250 static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; }
251 static Klass* charArrayKlassObj() { return _charArrayKlassObj; }
252 static Klass* intArrayKlassObj() { return _intArrayKlassObj; }
336 // Narrow Oop encoding mode:
337 // 0 - Use 32-bits oops without encoding when
338 // NarrowOopHeapBaseMin + heap_size < 4Gb
339 // 1 - Use zero based compressed oops with encoding when
340 // NarrowOopHeapBaseMin + heap_size < 32Gb
341 // 2 - Use compressed oops with heap base + encoding.
342 enum NARROW_OOP_MODE {
343 UnscaledNarrowOop = 0,
344 ZeroBasedNarrowOop = 1,
345 HeapBasedNarrowOop = 2
346 };
347 static NARROW_OOP_MODE narrow_oop_mode();
348 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
349 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
350 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
351 static address narrow_oop_base() { return _narrow_oop._base; }
352 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
353 static int narrow_oop_shift() { return _narrow_oop._shift; }
354 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
355
356 // For UseCompressedKlassPointers
357 static address narrow_klass_base() { return _narrow_klass._base; }
358 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
359 static int narrow_klass_shift() { return _narrow_klass._shift; }
360 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
361
362 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
363 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
364 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
365
366 // this is set in vm_version on sparc (and then reset in universe afaict)
367 static void set_narrow_oop_shift(int shift) {
368 _narrow_oop._shift = shift;
369 }
370
371 static void set_narrow_klass_shift(int shift) {
372 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
373 _narrow_klass._shift = shift;
374 }
375
376 // Reserve Java heap and determine CompressedOops mode
|
164 static Array<Klass*>* _the_array_interfaces_array;
165
166 // array of preallocated error objects with backtrace
167 static objArrayOop _preallocated_out_of_memory_error_array;
168
169 // number of preallocated error objects available for use
170 static volatile jint _preallocated_out_of_memory_error_avail_count;
171
172 static oop _null_ptr_exception_instance; // preallocated exception object
173 static oop _arithmetic_exception_instance; // preallocated exception object
174 static oop _virtual_machine_error_instance; // preallocated exception object
175 // The object used as an exception dummy when exceptions are thrown for
176 // the vm thread.
177 static oop _vm_exception;
178
179 // The particular choice of collected heap.
180 static CollectedHeap* _collectedHeap;
181
182 // For UseCompressedOops.
183 static struct NarrowPtrStruct _narrow_oop;
184 // For UseCompressedClassPointers.
185 static struct NarrowPtrStruct _narrow_klass;
186 static address _narrow_ptrs_base;
187
188 // array of dummy objects used with +FullGCAlot
189 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
190 // index of next entry to clear
191 debug_only(static int _fullgc_alot_dummy_next;)
192
193 // Compiler/dispatch support
194 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
195
196 // Initialization
197 static bool _bootstrapping; // true during genesis
198 static bool _fully_initialized; // true after universe_init and initialize_vtables called
199
200 // the array of preallocated errors with backtraces
201 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
202
203 // generate an out of memory error; if possible using an error with preallocated backtrace;
204 // otherwise return the given default error.
212 static void initialize_basic_type_mirrors(TRAPS);
213 static void fixup_mirrors(TRAPS);
214
215 static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
216 static void reinitialize_itables(TRAPS);
217 static void compute_base_vtable_size(); // compute vtable size of class Object
218
219 static void genesis(TRAPS); // Create the initial world
220
221 // Mirrors for primitive classes (created eagerly)
222 static oop check_mirror(oop m) {
223 assert(m != NULL, "mirror not initialized");
224 return m;
225 }
226
227 static void set_narrow_oop_base(address base) {
228 assert(UseCompressedOops, "no compressed oops?");
229 _narrow_oop._base = base;
230 }
231 static void set_narrow_klass_base(address base) {
232 assert(UseCompressedClassPointers, "no compressed klass ptrs?");
233 _narrow_klass._base = base;
234 }
235 static void set_narrow_oop_use_implicit_null_checks(bool use) {
236 assert(UseCompressedOops, "no compressed ptrs?");
237 _narrow_oop._use_implicit_null_checks = use;
238 }
239
240 // Debugging
241 static int _verify_count; // number of verifies done
242 // True during call to verify(). Should only be set/cleared in verify().
243 static bool _verify_in_progress;
244
245 static void compute_verify_oop_data();
246
247 public:
248 // Known classes in the VM
249 static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; }
250 static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; }
251 static Klass* charArrayKlassObj() { return _charArrayKlassObj; }
252 static Klass* intArrayKlassObj() { return _intArrayKlassObj; }
336 // Narrow Oop encoding mode:
337 // 0 - Use 32-bits oops without encoding when
338 // NarrowOopHeapBaseMin + heap_size < 4Gb
339 // 1 - Use zero based compressed oops with encoding when
340 // NarrowOopHeapBaseMin + heap_size < 32Gb
341 // 2 - Use compressed oops with heap base + encoding.
342 enum NARROW_OOP_MODE {
343 UnscaledNarrowOop = 0,
344 ZeroBasedNarrowOop = 1,
345 HeapBasedNarrowOop = 2
346 };
347 static NARROW_OOP_MODE narrow_oop_mode();
348 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
349 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
350 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
351 static address narrow_oop_base() { return _narrow_oop._base; }
352 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
353 static int narrow_oop_shift() { return _narrow_oop._shift; }
354 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
355
356 // For UseCompressedClassPointers
357 static address narrow_klass_base() { return _narrow_klass._base; }
358 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
359 static int narrow_klass_shift() { return _narrow_klass._shift; }
360 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
361
362 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
363 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
364 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
365
366 // this is set in vm_version on sparc (and then reset in universe afaict)
367 static void set_narrow_oop_shift(int shift) {
368 _narrow_oop._shift = shift;
369 }
370
371 static void set_narrow_klass_shift(int shift) {
372 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
373 _narrow_klass._shift = shift;
374 }
375
376 // Reserve Java heap and determine CompressedOops mode
|