85 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
86 int _shift;
87 // Generate code with implicit null checks for narrow ptrs.
88 bool _use_implicit_null_checks;
89 };
90
91 enum VerifyOption {
92 VerifyOption_Default = 0,
93
94 // G1
95 VerifyOption_G1UsePrevMarking = VerifyOption_Default,
96 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
97 VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1
98 };
99
100 class Universe: AllStatic {
101 // Ugh. Universe is much too friendly.
102 friend class MarkSweep;
103 friend class oopDesc;
104 friend class ClassLoader;
105 friend class Arguments;
106 friend class SystemDictionary;
107 friend class VMStructs;
108 friend class VM_PopulateDumpSharedSpace;
109 friend class Metaspace;
110
111 friend jint universe_init();
112 friend void universe2_init();
113 friend bool universe_post_init();
114
115 private:
116 // Known classes in the VM
117 static Klass* _boolArrayKlassObj;
118 static Klass* _byteArrayKlassObj;
119 static Klass* _charArrayKlassObj;
120 static Klass* _intArrayKlassObj;
121 static Klass* _shortArrayKlassObj;
122 static Klass* _longArrayKlassObj;
123 static Klass* _singleArrayKlassObj;
124 static Klass* _doubleArrayKlassObj;
125 static Klass* _typeArrayKlassObjs[T_VOID+1];
126
334 // Accessors needed for fast allocation
335 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
336 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
337 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
338 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
339 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
340 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
341 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
342 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
343 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
344
345 // The particular choice of collected heap.
346 static CollectedHeap* heap() { return _collectedHeap; }
347
348 // For UseCompressedOops
349 // Narrow Oop encoding mode:
350 // 0 - Use 32-bits oops without encoding when
351 // NarrowOopHeapBaseMin + heap_size < 4Gb
352 // 1 - Use zero based compressed oops with encoding when
353 // NarrowOopHeapBaseMin + heap_size < 32Gb
354 // 2 - Use compressed oops with heap base + encoding.
355 enum NARROW_OOP_MODE {
356 UnscaledNarrowOop = 0,
357 ZeroBasedNarrowOop = 1,
358 HeapBasedNarrowOop = 2
359 };
360 static NARROW_OOP_MODE narrow_oop_mode();
361 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
362 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
363 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
364 static address narrow_oop_base() { return _narrow_oop._base; }
365 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
366 static int narrow_oop_shift() { return _narrow_oop._shift; }
367 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
368
369 // For UseCompressedClassPointers
370 static address narrow_klass_base() { return _narrow_klass._base; }
371 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
372 static int narrow_klass_shift() { return _narrow_klass._shift; }
373 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
374
375 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
376 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
377 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
378
379 static void print_compressed_oops_mode();
380
381 // this is set in vm_version on sparc (and then reset in universe afaict)
382 static void set_narrow_oop_shift(int shift) {
383 _narrow_oop._shift = shift;
384 }
|
85 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
86 int _shift;
87 // Generate code with implicit null checks for narrow ptrs.
88 bool _use_implicit_null_checks;
89 };
90
91 enum VerifyOption {
92 VerifyOption_Default = 0,
93
94 // G1
95 VerifyOption_G1UsePrevMarking = VerifyOption_Default,
96 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
97 VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1
98 };
99
100 class Universe: AllStatic {
101 // Ugh. Universe is much too friendly.
102 friend class MarkSweep;
103 friend class oopDesc;
104 friend class ClassLoader;
105 friend class SystemDictionary;
106 friend class ReservedHeapSpace;
107 friend class VMStructs;
108 friend class VM_PopulateDumpSharedSpace;
109 friend class Metaspace;
110
111 friend jint universe_init();
112 friend void universe2_init();
113 friend bool universe_post_init();
114
115 private:
116 // Known classes in the VM
117 static Klass* _boolArrayKlassObj;
118 static Klass* _byteArrayKlassObj;
119 static Klass* _charArrayKlassObj;
120 static Klass* _intArrayKlassObj;
121 static Klass* _shortArrayKlassObj;
122 static Klass* _longArrayKlassObj;
123 static Klass* _singleArrayKlassObj;
124 static Klass* _doubleArrayKlassObj;
125 static Klass* _typeArrayKlassObjs[T_VOID+1];
126
334 // Accessors needed for fast allocation
335 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
336 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
337 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
338 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
339 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
340 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
341 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
342 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
343 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
344
345 // The particular choice of collected heap.
346 static CollectedHeap* heap() { return _collectedHeap; }
347
348 // For UseCompressedOops
349 // Narrow Oop encoding mode:
350 // 0 - Use 32-bits oops without encoding when
351 // NarrowOopHeapBaseMin + heap_size < 4Gb
352 // 1 - Use zero based compressed oops with encoding when
353 // NarrowOopHeapBaseMin + heap_size < 32Gb
354 // 2 - Use compressed oops with disjoint heap base if
355 // base is 32G-aligned and base > 0. This allows certain
356 // optimizations in encoding/decoding.
357 // Disjoint: Bits used in base are disjoint from bits used
358 // for oops ==> oop = (cOop << 3) | base. One can disjoint
359 // the bits of an oop into base and compressed oop.
360 // 3 - Use compressed oops with heap base + encoding.
361 enum NARROW_OOP_MODE {
362 UnscaledNarrowOop = 0,
363 ZeroBasedNarrowOop = 1,
364 DisjointBaseNarrowOop = 2,
365 HeapBasedNarrowOop = 3,
366 AnyNarrowOopMode = 4
367 };
368 static NARROW_OOP_MODE narrow_oop_mode();
369 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
370 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
371 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
372 static address narrow_oop_base() { return _narrow_oop._base; }
373 // Test whether bits of addr and possible offsets into the heap overlap.
374 static bool is_disjoint_heap_base_address(address addr) {
375 return (((uint64_t)(intptr_t)addr) &
376 (((uint64_t)UCONST64(0xFFFFffffFFFFffff)) >> (32-LogMinObjAlignmentInBytes))) == 0;
377 }
378 // Check for disjoint base compressed oops.
379 static bool narrow_oop_base_disjoint() {
380 return _narrow_oop._base != NULL && is_disjoint_heap_base_address(_narrow_oop._base);
381 }
382 // Check for real heapbased compressed oops.
383 // We must subtract the base as the bits overlap.
384 // If we negate above function, we also get unscaled and zerobased.
385 static bool narrow_oop_base_overlaps() {
386 return _narrow_oop._base != NULL && !is_disjoint_heap_base_address(_narrow_oop._base);
387 }
388 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
389 static int narrow_oop_shift() { return _narrow_oop._shift; }
390 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
391
392 // For UseCompressedClassPointers
393 static address narrow_klass_base() { return _narrow_klass._base; }
394 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
395 static int narrow_klass_shift() { return _narrow_klass._shift; }
396 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
397
398 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
399 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
400 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
401
402 static void print_compressed_oops_mode();
403
404 // this is set in vm_version on sparc (and then reset in universe afaict)
405 static void set_narrow_oop_shift(int shift) {
406 _narrow_oop._shift = shift;
407 }
|