85 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
86 int _shift;
87 // Generate code with implicit null checks for narrow ptrs.
88 bool _use_implicit_null_checks;
89 };
90
91 enum VerifyOption {
92 VerifyOption_Default = 0,
93
94 // G1
95 VerifyOption_G1UsePrevMarking = VerifyOption_Default,
96 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
97 VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1
98 };
99
100 class Universe: AllStatic {
101 // Ugh. Universe is much too friendly.
102 friend class MarkSweep;
103 friend class oopDesc;
104 friend class ClassLoader;
105 friend class Arguments;
106 friend class SystemDictionary;
107 friend class VMStructs;
108 friend class VM_PopulateDumpSharedSpace;
109 friend class Metaspace;
110
111 friend jint universe_init();
112 friend void universe2_init();
113 friend bool universe_post_init();
114
115 private:
116 // Known classes in the VM
117 static Klass* _boolArrayKlassObj;
118 static Klass* _byteArrayKlassObj;
119 static Klass* _charArrayKlassObj;
120 static Klass* _intArrayKlassObj;
121 static Klass* _shortArrayKlassObj;
122 static Klass* _longArrayKlassObj;
123 static Klass* _singleArrayKlassObj;
124 static Klass* _doubleArrayKlassObj;
125 static Klass* _typeArrayKlassObjs[T_VOID+1];
126
332 // Accessors needed for fast allocation
333 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
334 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
335 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
336 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
337 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
338 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
339 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
340 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
341 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
342
343 // The particular choice of collected heap.
344 static CollectedHeap* heap() { return _collectedHeap; }
345
346 // For UseCompressedOops
347 // Narrow Oop encoding mode:
348 // 0 - Use 32-bits oops without encoding when
349 // NarrowOopHeapBaseMin + heap_size < 4Gb
350 // 1 - Use zero based compressed oops with encoding when
351 // NarrowOopHeapBaseMin + heap_size < 32Gb
352 // 2 - Use compressed oops with heap base + encoding.
353 enum NARROW_OOP_MODE {
354 UnscaledNarrowOop = 0,
355 ZeroBasedNarrowOop = 1,
356 HeapBasedNarrowOop = 2
357 };
358 static NARROW_OOP_MODE narrow_oop_mode();
359 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
360 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
361 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
362 static address narrow_oop_base() { return _narrow_oop._base; }
363 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
364 static int narrow_oop_shift() { return _narrow_oop._shift; }
365 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
366
367 // For UseCompressedClassPointers
368 static address narrow_klass_base() { return _narrow_klass._base; }
369 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
370 static int narrow_klass_shift() { return _narrow_klass._shift; }
371 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
372
373 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
374 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
375 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
376
377 static void print_compressed_oops_mode();
378
379 // this is set in vm_version on sparc (and then reset in universe afaict)
380 static void set_narrow_oop_shift(int shift) {
381 _narrow_oop._shift = shift;
382 }
|
85 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
86 int _shift;
87 // Generate code with implicit null checks for narrow ptrs.
88 bool _use_implicit_null_checks;
89 };
90
91 enum VerifyOption {
92 VerifyOption_Default = 0,
93
94 // G1
95 VerifyOption_G1UsePrevMarking = VerifyOption_Default,
96 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
97 VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1
98 };
99
100 class Universe: AllStatic {
101 // Ugh. Universe is much too friendly.
102 friend class MarkSweep;
103 friend class oopDesc;
104 friend class ClassLoader;
105 friend class SystemDictionary;
106 friend class ReservedSpace;
107 friend class VMStructs;
108 friend class VM_PopulateDumpSharedSpace;
109 friend class Metaspace;
110
111 friend jint universe_init();
112 friend void universe2_init();
113 friend bool universe_post_init();
114
115 private:
116 // Known classes in the VM
117 static Klass* _boolArrayKlassObj;
118 static Klass* _byteArrayKlassObj;
119 static Klass* _charArrayKlassObj;
120 static Klass* _intArrayKlassObj;
121 static Klass* _shortArrayKlassObj;
122 static Klass* _longArrayKlassObj;
123 static Klass* _singleArrayKlassObj;
124 static Klass* _doubleArrayKlassObj;
125 static Klass* _typeArrayKlassObjs[T_VOID+1];
126
332 // Accessors needed for fast allocation
333 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
334 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
335 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
336 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
337 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
338 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
339 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
340 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
341 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
342
343 // The particular choice of collected heap.
344 static CollectedHeap* heap() { return _collectedHeap; }
345
346 // For UseCompressedOops
347 // Narrow Oop encoding mode:
348 // 0 - Use 32-bits oops without encoding when
349 // NarrowOopHeapBaseMin + heap_size < 4Gb
350 // 1 - Use zero based compressed oops with encoding when
351 // NarrowOopHeapBaseMin + heap_size < 32Gb
352 // 2 - Use compressed oops with disjoint heap base if
353 // base is 32G-aligned and base > 0. This allows certain
354 // optimizations in encoding/decoding.
355 // 3 - Use compressed oops with heap base + encoding.
356 enum NARROW_OOP_MODE {
357 UnscaledNarrowOop = 0,
358 ZeroBasedNarrowOop = 1,
359 DisjointBaseNarrowOop = 2,
360 HeapBasedNarrowOop = 3,
361 AnyNarrowOopMode = 4
362 };
363 static NARROW_OOP_MODE narrow_oop_mode();
364 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
365 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
366 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
367 static address narrow_oop_base() { return _narrow_oop._base; }
368 // Test whether bits of addr and possible offsets into the heap overlap.
369 static bool is_disjoint_heap_base_address(address addr) {
370 return (((uint64_t)(intptr_t)addr) &
371 (((uint64_t)UCONST64(0xFFFFffffFFFFffff)) >> (32-LogMinObjAlignmentInBytes))) == 0;
372 }
373 static char** get_attach_addresses_for_disjoint_mode();
374 // Check for disjoint base compressed oops.
375 static bool narrow_oop_base_disjoint() {
376 return _narrow_oop._base != NULL && is_disjoint_heap_base_address(_narrow_oop._base);
377 }
378 // Check for real heapbased compressed oops.
379 // We must subtract the base as the bits overlap.
380 // If we negate above function, we also get unscaled and zerobased.
381 static bool narrow_oop_base_overlaps() {
382 return _narrow_oop._base != NULL && !is_disjoint_heap_base_address(_narrow_oop._base);
383 }
384 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
385 static int narrow_oop_shift() { return _narrow_oop._shift; }
386 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
387
388 // For UseCompressedClassPointers
389 static address narrow_klass_base() { return _narrow_klass._base; }
390 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
391 static int narrow_klass_shift() { return _narrow_klass._shift; }
392 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
393
394 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
395 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
396 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
397
398 static void print_compressed_oops_mode();
399
400 // this is set in vm_version on sparc (and then reset in universe afaict)
401 static void set_narrow_oop_shift(int shift) {
402 _narrow_oop._shift = shift;
403 }
|