178 static oop _virtual_machine_error_instance; // preallocated exception object
179 // The object used as an exception dummy when exceptions are thrown for
180 // the vm thread.
181 static oop _vm_exception;
182
183 static oop _allocation_context_notification_obj;
184
185 // References waiting to be transferred to the ReferenceHandler
186 static oop _reference_pending_list;
187
188 // The particular choice of collected heap.
189 static CollectedHeap* _collectedHeap;
190
191 static intptr_t _non_oop_bits;
192
193 // For UseCompressedOops.
194 static struct NarrowPtrStruct _narrow_oop;
195 // For UseCompressedClassPointers.
196 static struct NarrowPtrStruct _narrow_klass;
197 static address _narrow_ptrs_base;
198
199 // array of dummy objects used with +FullGCAlot
200 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
201 // index of next entry to clear
202 debug_only(static int _fullgc_alot_dummy_next;)
203
204 // Compiler/dispatch support
205 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
206
207 // Initialization
208 static bool _bootstrapping; // true during genesis
209 static bool _module_initialized; // true after call_initPhase2 called
210 static bool _fully_initialized; // true after universe_init and initialize_vtables called
211
212 // the array of preallocated errors with backtraces
213 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
214
215 // generate an out of memory error; if possible using an error with preallocated backtrace;
216 // otherwise return the given default error.
217 static oop gen_out_of_memory_error(oop default_err);
218
421 // For UseCompressedClassPointers
422 static address narrow_klass_base() { return _narrow_klass._base; }
423 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
424 static int narrow_klass_shift() { return _narrow_klass._shift; }
425 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
426
427 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
428 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
429 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
430
431 static void print_compressed_oops_mode(outputStream* st);
432
433 // this is set in vm_version on sparc (and then reset in universe afaict)
434 static void set_narrow_oop_shift(int shift) {
435 _narrow_oop._shift = shift;
436 }
437
438 static void set_narrow_klass_shift(int shift) {
439 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
440 _narrow_klass._shift = shift;
441 }
442
443 // Reserve Java heap and determine CompressedOops mode
444 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
445
446 // Historic gc information
447 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; }
448 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
449 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; }
450 static void update_heap_info_at_gc();
451
452 // Testers
453 static bool is_bootstrapping() { return _bootstrapping; }
454 static bool is_module_initialized() { return _module_initialized; }
455 static bool is_fully_initialized() { return _fully_initialized; }
456
457 static inline bool element_type_should_be_aligned(BasicType type);
458 static inline bool field_type_should_be_aligned(BasicType type);
459 static bool on_page_boundary(void* addr);
460 static bool should_fill_in_stack_trace(Handle throwable);
461 static void check_alignment(uintx size, uintx alignment, const char* name);
|
178 static oop _virtual_machine_error_instance; // preallocated exception object
179 // The object used as an exception dummy when exceptions are thrown for
180 // the vm thread.
181 static oop _vm_exception;
182
183 static oop _allocation_context_notification_obj;
184
185 // References waiting to be transferred to the ReferenceHandler
186 static oop _reference_pending_list;
187
188 // The particular choice of collected heap.
189 static CollectedHeap* _collectedHeap;
190
191 static intptr_t _non_oop_bits;
192
193 // For UseCompressedOops.
194 static struct NarrowPtrStruct _narrow_oop;
195 // For UseCompressedClassPointers.
196 static struct NarrowPtrStruct _narrow_klass;
197 static address _narrow_ptrs_base;
198 #if INCLUDE_VBC
199 static int _oop_metadata_odd_mask;
200 #endif
201 // array of dummy objects used with +FullGCAlot
202 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
203 // index of next entry to clear
204 debug_only(static int _fullgc_alot_dummy_next;)
205
206 // Compiler/dispatch support
207 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
208
209 // Initialization
210 static bool _bootstrapping; // true during genesis
211 static bool _module_initialized; // true after call_initPhase2 called
212 static bool _fully_initialized; // true after universe_init and initialize_vtables called
213
214 // the array of preallocated errors with backtraces
215 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
216
217 // generate an out of memory error; if possible using an error with preallocated backtrace;
218 // otherwise return the given default error.
219 static oop gen_out_of_memory_error(oop default_err);
220
423 // For UseCompressedClassPointers
424 static address narrow_klass_base() { return _narrow_klass._base; }
425 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
426 static int narrow_klass_shift() { return _narrow_klass._shift; }
427 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
428
429 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
430 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
431 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
432
433 static void print_compressed_oops_mode(outputStream* st);
434
435 // this is set in vm_version on sparc (and then reset in universe afaict)
436 static void set_narrow_oop_shift(int shift) {
437 _narrow_oop._shift = shift;
438 }
439
440 static void set_narrow_klass_shift(int shift) {
441 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
442 _narrow_klass._shift = shift;
443 #if INCLUDE_VBC
444 _oop_metadata_odd_mask = (shift) ? 1 : KlassPtrEvenOddMask;
445 #endif
446 }
447
448 #if INCLUDE_VBC
449 static int oop_metadata_odd_mask() { return _oop_metadata_odd_mask; }
450 #endif
451
452
453 // Reserve Java heap and determine CompressedOops mode
454 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
455
456 // Historic gc information
457 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; }
458 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
459 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; }
460 static void update_heap_info_at_gc();
461
462 // Testers
463 static bool is_bootstrapping() { return _bootstrapping; }
464 static bool is_module_initialized() { return _module_initialized; }
465 static bool is_fully_initialized() { return _fully_initialized; }
466
467 static inline bool element_type_should_be_aligned(BasicType type);
468 static inline bool field_type_should_be_aligned(BasicType type);
469 static bool on_page_boundary(void* addr);
470 static bool should_fill_in_stack_trace(Handle throwable);
471 static void check_alignment(uintx size, uintx alignment, const char* name);
|