src/share/vm/memory/universe.hpp

Print this page
rev 4773 : 8005849: JEP 167: Event-Based JVM Tracing
Reviewed-by: acorn, coleenp, sla
Contributed-by: Karen Kinnear <karen.kinnear@oracle.com>, Bengt Rutisson <bengt.rutisson@oracle.com>, Calvin Cheung <calvin.cheung@oracle.com>, Erik Gahlin <erik.gahlin@oracle.com>, Erik Helin <erik.helin@oracle.com>, Jesper Wilhelmsson <jesper.wilhelmsson@oracle.com>, Keith McGuigan <keith.mcguigan@oracle.com>, Mattias Tobiasson <mattias.tobiasson@oracle.com>, Markus Gronlund <markus.gronlund@oracle.com>, Mikael Auno <mikael.auno@oracle.com>, Nils Eliasson <nils.eliasson@oracle.com>, Nils Loodin <nils.loodin@oracle.com>, Rickard Backman <rickard.backman@oracle.com>, Staffan Larsen <staffan.larsen@oracle.com>, Stefan Karlsson <stefan.karlsson@oracle.com>, Yekaterina Kantserova <yekaterina.kantserova@oracle.com>


 236   // Historic gc information
 237   static size_t _heap_capacity_at_last_gc;
 238   static size_t _heap_used_at_last_gc;
 239 
 240   static jint initialize_heap();
 241   static void initialize_basic_type_mirrors(TRAPS);
 242   static void fixup_mirrors(TRAPS);
 243 
 244   static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
 245   static void reinitialize_itables(TRAPS);
 246   static void compute_base_vtable_size();             // compute vtable size of class Object
 247 
 248   static void genesis(TRAPS);                         // Create the initial world
 249 
 250   // Mirrors for primitive classes (created eagerly)
 251   static oop check_mirror(oop m) {
 252     assert(m != NULL, "mirror not initialized");
 253     return m;
 254   }
 255 
 256   // Narrow Oop encoding mode:
 257   // 0 - Use 32-bits oops without encoding when
 258   //     NarrowOopHeapBaseMin + heap_size < 4Gb
 259   // 1 - Use zero based compressed oops with encoding when
 260   //     NarrowOopHeapBaseMin + heap_size < 32Gb
 261   // 2 - Use compressed oops with heap base + encoding.
 262   enum NARROW_OOP_MODE {
 263     UnscaledNarrowOop  = 0,
 264     ZeroBasedNarrowOop = 1,
 265     HeapBasedNarrowOop = 2
 266   };
 267   static char*    preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode);
 268   static char*    preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
 269   static void     set_narrow_oop_base(address base) {
 270     assert(UseCompressedOops, "no compressed oops?");
 271     _narrow_oop._base    = base;
 272   }
 273   static void     set_narrow_klass_base(address base) {
 274     assert(UseCompressedKlassPointers, "no compressed klass ptrs?");
 275     _narrow_klass._base   = base;
 276   }
 277   static void     set_narrow_oop_use_implicit_null_checks(bool use) {
 278     assert(UseCompressedOops, "no compressed ptrs?");
 279     _narrow_oop._use_implicit_null_checks   = use;
 280   }
 281   static bool     reserve_metaspace_helper(bool with_base = false);
 282   static ReservedHeapSpace reserve_heap_metaspace(size_t heap_size, size_t alignment, bool& contiguous);
 283 
 284   static size_t  class_metaspace_size() {
 285     return _class_metaspace_size;
 286   }
 287   static void    set_class_metaspace_size(size_t metaspace_size) {
 288     _class_metaspace_size = metaspace_size;


 363   static oop out_of_memory_error_java_heap()          { return gen_out_of_memory_error(_out_of_memory_error_java_heap);  }
 364   static oop out_of_memory_error_perm_gen()           { return gen_out_of_memory_error(_out_of_memory_error_perm_gen);   }
 365   static oop out_of_memory_error_array_size()         { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
 366   static oop out_of_memory_error_gc_overhead_limit()  { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit);  }
 367 
 368   // Accessors needed for fast allocation
 369   static Klass** boolArrayKlassObj_addr()           { return &_boolArrayKlassObj;   }
 370   static Klass** byteArrayKlassObj_addr()           { return &_byteArrayKlassObj;   }
 371   static Klass** charArrayKlassObj_addr()           { return &_charArrayKlassObj;   }
 372   static Klass** intArrayKlassObj_addr()            { return &_intArrayKlassObj;    }
 373   static Klass** shortArrayKlassObj_addr()          { return &_shortArrayKlassObj;  }
 374   static Klass** longArrayKlassObj_addr()           { return &_longArrayKlassObj;   }
 375   static Klass** singleArrayKlassObj_addr()         { return &_singleArrayKlassObj; }
 376   static Klass** doubleArrayKlassObj_addr()         { return &_doubleArrayKlassObj; }
 377   static Klass** objectArrayKlassObj_addr()         { return &_objectArrayKlassObj; }
 378 
 379   // The particular choice of collected heap.
 380   static CollectedHeap* heap() { return _collectedHeap; }
 381 
 382   // For UseCompressedOops















 383   static address  narrow_oop_base()                       { return  _narrow_oop._base; }
 384   static bool  is_narrow_oop_base(void* addr)             { return (narrow_oop_base() == (address)addr); }
 385   static int      narrow_oop_shift()                      { return  _narrow_oop._shift; }
 386   static bool     narrow_oop_use_implicit_null_checks()   { return  _narrow_oop._use_implicit_null_checks; }
 387 
 388   // For UseCompressedKlassPointers
 389   static address  narrow_klass_base()                     { return  _narrow_klass._base; }
 390   static bool  is_narrow_klass_base(void* addr)           { return (narrow_klass_base() == (address)addr); }
 391   static int      narrow_klass_shift()                    { return  _narrow_klass._shift; }
 392   static bool     narrow_klass_use_implicit_null_checks() { return  _narrow_klass._use_implicit_null_checks; }
 393 
 394   static address* narrow_ptrs_base_addr()                 { return &_narrow_ptrs_base; }
 395   static void     set_narrow_ptrs_base(address a)         { _narrow_ptrs_base = a; }
 396   static address  narrow_ptrs_base()                      { return _narrow_ptrs_base; }
 397 
 398   // this is set in vm_version on sparc (and then reset in universe afaict)
 399   static void     set_narrow_oop_shift(int shift)         {
 400     _narrow_oop._shift   = shift;
 401   }
 402 




 236   // Historic gc information
 237   static size_t _heap_capacity_at_last_gc;
 238   static size_t _heap_used_at_last_gc;
 239 
 240   static jint initialize_heap();
 241   static void initialize_basic_type_mirrors(TRAPS);
 242   static void fixup_mirrors(TRAPS);
 243 
 244   static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
 245   static void reinitialize_itables(TRAPS);
 246   static void compute_base_vtable_size();             // compute vtable size of class Object
 247 
 248   static void genesis(TRAPS);                         // Create the initial world
 249 
 250   // Mirrors for primitive classes (created eagerly)
 251   static oop check_mirror(oop m) {
 252     assert(m != NULL, "mirror not initialized");
 253     return m;
 254   }
 255 













 256   static void     set_narrow_oop_base(address base) {
 257     assert(UseCompressedOops, "no compressed oops?");
 258     _narrow_oop._base    = base;
 259   }
 260   static void     set_narrow_klass_base(address base) {
 261     assert(UseCompressedKlassPointers, "no compressed klass ptrs?");
 262     _narrow_klass._base   = base;
 263   }
 264   static void     set_narrow_oop_use_implicit_null_checks(bool use) {
 265     assert(UseCompressedOops, "no compressed ptrs?");
 266     _narrow_oop._use_implicit_null_checks   = use;
 267   }
 268   static bool     reserve_metaspace_helper(bool with_base = false);
 269   static ReservedHeapSpace reserve_heap_metaspace(size_t heap_size, size_t alignment, bool& contiguous);
 270 
 271   static size_t  class_metaspace_size() {
 272     return _class_metaspace_size;
 273   }
 274   static void    set_class_metaspace_size(size_t metaspace_size) {
 275     _class_metaspace_size = metaspace_size;


 350   static oop out_of_memory_error_java_heap()          { return gen_out_of_memory_error(_out_of_memory_error_java_heap);  }
 351   static oop out_of_memory_error_perm_gen()           { return gen_out_of_memory_error(_out_of_memory_error_perm_gen);   }
 352   static oop out_of_memory_error_array_size()         { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
 353   static oop out_of_memory_error_gc_overhead_limit()  { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit);  }
 354 
 355   // Accessors needed for fast allocation
 356   static Klass** boolArrayKlassObj_addr()           { return &_boolArrayKlassObj;   }
 357   static Klass** byteArrayKlassObj_addr()           { return &_byteArrayKlassObj;   }
 358   static Klass** charArrayKlassObj_addr()           { return &_charArrayKlassObj;   }
 359   static Klass** intArrayKlassObj_addr()            { return &_intArrayKlassObj;    }
 360   static Klass** shortArrayKlassObj_addr()          { return &_shortArrayKlassObj;  }
 361   static Klass** longArrayKlassObj_addr()           { return &_longArrayKlassObj;   }
 362   static Klass** singleArrayKlassObj_addr()         { return &_singleArrayKlassObj; }
 363   static Klass** doubleArrayKlassObj_addr()         { return &_doubleArrayKlassObj; }
 364   static Klass** objectArrayKlassObj_addr()         { return &_objectArrayKlassObj; }
 365 
 366   // The particular choice of collected heap.
 367   static CollectedHeap* heap() { return _collectedHeap; }
 368 
 369   // For UseCompressedOops
 370   // Narrow Oop encoding mode:
 371   // 0 - Use 32-bits oops without encoding when
 372   //     NarrowOopHeapBaseMin + heap_size < 4Gb
 373   // 1 - Use zero based compressed oops with encoding when
 374   //     NarrowOopHeapBaseMin + heap_size < 32Gb
 375   // 2 - Use compressed oops with heap base + encoding.
 376   enum NARROW_OOP_MODE {
 377     UnscaledNarrowOop  = 0,
 378     ZeroBasedNarrowOop = 1,
 379     HeapBasedNarrowOop = 2
 380   };
 381   static NARROW_OOP_MODE narrow_oop_mode();
 382   static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
 383   static char*    preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode);
 384   static char*    preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
 385   static address  narrow_oop_base()                       { return  _narrow_oop._base; }
 386   static bool  is_narrow_oop_base(void* addr)             { return (narrow_oop_base() == (address)addr); }
 387   static int      narrow_oop_shift()                      { return  _narrow_oop._shift; }
 388   static bool     narrow_oop_use_implicit_null_checks()   { return  _narrow_oop._use_implicit_null_checks; }
 389 
 390   // For UseCompressedKlassPointers
 391   static address  narrow_klass_base()                     { return  _narrow_klass._base; }
 392   static bool  is_narrow_klass_base(void* addr)           { return (narrow_klass_base() == (address)addr); }
 393   static int      narrow_klass_shift()                    { return  _narrow_klass._shift; }
 394   static bool     narrow_klass_use_implicit_null_checks() { return  _narrow_klass._use_implicit_null_checks; }
 395 
 396   static address* narrow_ptrs_base_addr()                 { return &_narrow_ptrs_base; }
 397   static void     set_narrow_ptrs_base(address a)         { _narrow_ptrs_base = a; }
 398   static address  narrow_ptrs_base()                      { return _narrow_ptrs_base; }
 399 
 400   // this is set in vm_version on sparc (and then reset in universe afaict)
 401   static void     set_narrow_oop_shift(int shift)         {
 402     _narrow_oop._shift   = shift;
 403   }
 404