< prev index next >

src/share/vm/memory/universe.hpp

Print this page
rev 7280 : 8064457: Introduce compressed oops mode "disjoint base" and improve compressed heap handling.


  73     f->do_ptr((void**)&_klass);
  74   }
  75 };
  76 
  77 
  78 // For UseCompressedOops.
  79 struct NarrowPtrStruct {
  80   // Base address for oop-within-java-object materialization.
  81   // NULL if using wide oops or zero based narrow oops.
  82   address _base;
  83   // Number of shift bits for encoding/decoding narrow ptrs.
  84   // 0 if using wide ptrs or zero based unscaled narrow ptrs,
  85   // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
  86   int     _shift;
  87   // Generate code with implicit null checks for narrow ptrs.
  88   bool    _use_implicit_null_checks;
  89 };
  90 
  91 enum VerifyOption {
  92       VerifyOption_Default = 0,
  93 
  94       // G1
  95       VerifyOption_G1UsePrevMarking = VerifyOption_Default,
  96       VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
  97       VerifyOption_G1UseMarkWord    = VerifyOption_G1UseNextMarking + 1
  98 };
  99 
 100 class Universe: AllStatic {
 101   // Ugh.  Universe is much too friendly.
 102   friend class MarkSweep;
 103   friend class oopDesc;
 104   friend class ClassLoader;
 105   friend class Arguments;
 106   friend class SystemDictionary;

 107   friend class VMStructs;
 108   friend class VM_PopulateDumpSharedSpace;
 109   friend class Metaspace;
 110 
 111   friend jint  universe_init();
 112   friend void  universe2_init();
 113   friend bool  universe_post_init();
 114 
 115  private:
 116   // Known classes in the VM
 117   static Klass* _boolArrayKlassObj;
 118   static Klass* _byteArrayKlassObj;
 119   static Klass* _charArrayKlassObj;
 120   static Klass* _intArrayKlassObj;
 121   static Klass* _shortArrayKlassObj;
 122   static Klass* _longArrayKlassObj;
 123   static Klass* _singleArrayKlassObj;
 124   static Klass* _doubleArrayKlassObj;
 125   static Klass* _typeArrayKlassObjs[T_VOID+1];
 126 


 332   // Accessors needed for fast allocation
 333   static Klass** boolArrayKlassObj_addr()           { return &_boolArrayKlassObj;   }
 334   static Klass** byteArrayKlassObj_addr()           { return &_byteArrayKlassObj;   }
 335   static Klass** charArrayKlassObj_addr()           { return &_charArrayKlassObj;   }
 336   static Klass** intArrayKlassObj_addr()            { return &_intArrayKlassObj;    }
 337   static Klass** shortArrayKlassObj_addr()          { return &_shortArrayKlassObj;  }
 338   static Klass** longArrayKlassObj_addr()           { return &_longArrayKlassObj;   }
 339   static Klass** singleArrayKlassObj_addr()         { return &_singleArrayKlassObj; }
 340   static Klass** doubleArrayKlassObj_addr()         { return &_doubleArrayKlassObj; }
 341   static Klass** objectArrayKlassObj_addr()         { return &_objectArrayKlassObj; }
 342 
 343   // The particular choice of collected heap.
 344   static CollectedHeap* heap() { return _collectedHeap; }
 345 
 346   // For UseCompressedOops
 347   // Narrow Oop encoding mode:
 348   // 0 - Use 32-bits oops without encoding when
 349   //     NarrowOopHeapBaseMin + heap_size < 4Gb
 350   // 1 - Use zero based compressed oops with encoding when
 351   //     NarrowOopHeapBaseMin + heap_size < 32Gb
 352   // 2 - Use compressed oops with heap base + encoding.



 353   enum NARROW_OOP_MODE {
 354     UnscaledNarrowOop  = 0,
 355     ZeroBasedNarrowOop = 1,
 356     HeapBasedNarrowOop = 2


 357   };
 358   static NARROW_OOP_MODE narrow_oop_mode();
 359   static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
 360   static char*    preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
 361   static char*    preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
 362   static address  narrow_oop_base()                       { return  _narrow_oop._base; }















 363   static bool  is_narrow_oop_base(void* addr)             { return (narrow_oop_base() == (address)addr); }
 364   static int      narrow_oop_shift()                      { return  _narrow_oop._shift; }
 365   static bool     narrow_oop_use_implicit_null_checks()   { return  _narrow_oop._use_implicit_null_checks; }
 366 
 367   // For UseCompressedClassPointers
 368   static address  narrow_klass_base()                     { return  _narrow_klass._base; }
 369   static bool  is_narrow_klass_base(void* addr)           { return (narrow_klass_base() == (address)addr); }
 370   static int      narrow_klass_shift()                    { return  _narrow_klass._shift; }
 371   static bool     narrow_klass_use_implicit_null_checks() { return  _narrow_klass._use_implicit_null_checks; }
 372 
 373   static address* narrow_ptrs_base_addr()                 { return &_narrow_ptrs_base; }
 374   static void     set_narrow_ptrs_base(address a)         { _narrow_ptrs_base = a; }
 375   static address  narrow_ptrs_base()                      { return _narrow_ptrs_base; }
 376 
 377   static void     print_compressed_oops_mode();
 378 
 379   // this is set in vm_version on sparc (and then reset in universe afaict)
 380   static void     set_narrow_oop_shift(int shift)         {
 381     _narrow_oop._shift   = shift;
 382   }




  73     f->do_ptr((void**)&_klass);
  74   }
  75 };
  76 
  77 
  78 // For UseCompressedOops.
  79 struct NarrowPtrStruct {
  80   // Base address for oop-within-java-object materialization.
  81   // NULL if using wide oops or zero based narrow oops.
  82   address _base;
  83   // Number of shift bits for encoding/decoding narrow ptrs.
  84   // 0 if using wide ptrs or zero based unscaled narrow ptrs,
  85   // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
  86   int     _shift;
  87   // Generate code with implicit null checks for narrow ptrs.
  88   bool    _use_implicit_null_checks;
  89 };
  90 
  91 enum VerifyOption {
  92       VerifyOption_Default = 0,

  93       // G1
  94       VerifyOption_G1UsePrevMarking = VerifyOption_Default,
  95       VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
  96       VerifyOption_G1UseMarkWord    = VerifyOption_G1UseNextMarking + 1
  97 };
  98 
  99 class Universe: AllStatic {
 100   // Ugh.  Universe is much too friendly.
 101   friend class MarkSweep;
 102   friend class oopDesc;
 103   friend class ClassLoader;

 104   friend class SystemDictionary;
 105   friend class ReservedSpace;
 106   friend class VMStructs;
 107   friend class VM_PopulateDumpSharedSpace;
 108   friend class Metaspace;
 109 
 110   friend jint  universe_init();
 111   friend void  universe2_init();
 112   friend bool  universe_post_init();
 113 
 114  private:
 115   // Known classes in the VM
 116   static Klass* _boolArrayKlassObj;
 117   static Klass* _byteArrayKlassObj;
 118   static Klass* _charArrayKlassObj;
 119   static Klass* _intArrayKlassObj;
 120   static Klass* _shortArrayKlassObj;
 121   static Klass* _longArrayKlassObj;
 122   static Klass* _singleArrayKlassObj;
 123   static Klass* _doubleArrayKlassObj;
 124   static Klass* _typeArrayKlassObjs[T_VOID+1];
 125 


 331   // Accessors needed for fast allocation
 332   static Klass** boolArrayKlassObj_addr()           { return &_boolArrayKlassObj;   }
 333   static Klass** byteArrayKlassObj_addr()           { return &_byteArrayKlassObj;   }
 334   static Klass** charArrayKlassObj_addr()           { return &_charArrayKlassObj;   }
 335   static Klass** intArrayKlassObj_addr()            { return &_intArrayKlassObj;    }
 336   static Klass** shortArrayKlassObj_addr()          { return &_shortArrayKlassObj;  }
 337   static Klass** longArrayKlassObj_addr()           { return &_longArrayKlassObj;   }
 338   static Klass** singleArrayKlassObj_addr()         { return &_singleArrayKlassObj; }
 339   static Klass** doubleArrayKlassObj_addr()         { return &_doubleArrayKlassObj; }
 340   static Klass** objectArrayKlassObj_addr()         { return &_objectArrayKlassObj; }
 341 
 342   // The particular choice of collected heap.
 343   static CollectedHeap* heap() { return _collectedHeap; }
 344 
 345   // For UseCompressedOops
 346   // Narrow Oop encoding mode:
 347   // 0 - Use 32-bits oops without encoding when
 348   //     NarrowOopHeapBaseMin + heap_size < 4Gb
 349   // 1 - Use zero based compressed oops with encoding when
 350   //     NarrowOopHeapBaseMin + heap_size < 32Gb
 351   // 2 - Use compressed oops with disjoint heap base if
 352   //     base is 32G-aligned and base > 0. This allows certain
 353   //     optimizations in encoding/decoding.
 354   // 3 - Use compressed oops with heap base + encoding.
 355   enum NARROW_OOP_MODE {
 356     UnscaledNarrowOop  = 0,
 357     ZeroBasedNarrowOop = 1,
 358     DisjointBaseNarrowOop = 2,
 359     HeapBasedNarrowOop = 3,
 360     AnyNarrowOopMode = 4
 361   };
 362   static NARROW_OOP_MODE narrow_oop_mode();
 363   static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
 364   static char*    preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
 365   static char*    preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
 366   static address  narrow_oop_base()                  { return  _narrow_oop._base; }
 367   // Test whether bits of addr and possible offsets into the heap overlap.
 368   static bool     is_disjoint_heap_base_address(address addr) {
 369     return (((uint64_t)(intptr_t)addr) &
 370             (((uint64_t)UCONST64(0xFFFFffffFFFFffff)) >> (32-LogMinObjAlignmentInBytes))) == 0;
 371   }
 372   // Check for disjoint base compressed oops.
 373   static bool     narrow_oop_base_disjoint()        {
 374     return _narrow_oop._base != NULL && is_disjoint_heap_base_address(_narrow_oop._base);
 375   }
 376   // Check for real heapbased compressed oops.
 377   // We must subtract the base as the bits overlap.
 378   // If we negate above function, we also get unscaled and zerobased.
 379   static bool     narrow_oop_base_overlaps()          {
 380     return _narrow_oop._base != NULL && !is_disjoint_heap_base_address(_narrow_oop._base);
 381   }
 382   static bool  is_narrow_oop_base(void* addr)             { return (narrow_oop_base() == (address)addr); }
 383   static int      narrow_oop_shift()                      { return  _narrow_oop._shift; }
 384   static bool     narrow_oop_use_implicit_null_checks()   { return  _narrow_oop._use_implicit_null_checks; }
 385 
 386   // For UseCompressedClassPointers
 387   static address  narrow_klass_base()                     { return  _narrow_klass._base; }
 388   static bool  is_narrow_klass_base(void* addr)           { return (narrow_klass_base() == (address)addr); }
 389   static int      narrow_klass_shift()                    { return  _narrow_klass._shift; }
 390   static bool     narrow_klass_use_implicit_null_checks() { return  _narrow_klass._use_implicit_null_checks; }
 391 
 392   static address* narrow_ptrs_base_addr()                 { return &_narrow_ptrs_base; }
 393   static void     set_narrow_ptrs_base(address a)         { _narrow_ptrs_base = a; }
 394   static address  narrow_ptrs_base()                      { return _narrow_ptrs_base; }
 395 
 396   static void     print_compressed_oops_mode();
 397 
 398   // this is set in vm_version on sparc (and then reset in universe afaict)
 399   static void     set_narrow_oop_shift(int shift)         {
 400     _narrow_oop._shift   = shift;
 401   }


< prev index next >