1 /*
   2  * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP
  26 #define SHARE_VM_MEMORY_UNIVERSE_HPP
  27 
  28 #include "oops/array.hpp"
  29 #include "runtime/handles.hpp"
  30 #include "utilities/growableArray.hpp"
  31 
  32 // Universe is a name space holding known system classes and objects in the VM.
  33 //
  34 // Loaded classes are accessible through the SystemDictionary.
  35 //
  36 // The object heap is allocated and accessed through Universe, and various allocation
  37 // support is provided. Allocation by the interpreter and compiled code is done inline
  38 // and bails out to Scavenge::invoke_and_allocate.
  39 
  40 class CollectedHeap;
  41 class DeferredObjAllocEvent;
  42 
  43 
  44 // A helper class for caching a Method* when the user of the cache
  45 // only cares about the latest version of the Method*.  This cache safely
  46 // interacts with the RedefineClasses API.
  47 
  48 class LatestMethodCache : public CHeapObj<mtClass> {
  49   // We save the Klass* and the idnum of Method* in order to get
  50   // the current cached Method*.
  51  private:
  52   Klass*                _klass;
  53   int                   _method_idnum;
  54 
  55  public:
  56   LatestMethodCache()   { _klass = NULL; _method_idnum = -1; }
  57   ~LatestMethodCache()  { _klass = NULL; _method_idnum = -1; }
  58 
  59   void   init(Klass* k, Method* m);
  60   Klass* klass() const           { return _klass; }
  61   int    method_idnum() const    { return _method_idnum; }
  62 
  63   Method* get_method();
  64 
  65   // CDS support.  Replace the klass in this with the archive version
  66   // could use this for Enhanced Class Redefinition also.
  67   void serialize(SerializeClosure* f) {
  68     f->do_ptr((void**)&_klass);
  69   }
  70   void metaspace_pointers_do(MetaspaceClosure* it);
  71 };
  72 
  73 
  74 // For UseCompressedOops.
  75 struct NarrowPtrStruct {
  76   // Base address for oop-within-java-object materialization.
  77   // NULL if using wide oops or zero based narrow oops.
  78   address _base;
  79   // Number of shift bits for encoding/decoding narrow ptrs.
  80   // 0 if using wide ptrs or zero based unscaled narrow ptrs,
  81   // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
  82   int     _shift;
  83   // Generate code with implicit null checks for narrow ptrs.
  84   bool    _use_implicit_null_checks;
  85 };
  86 
  87 enum VerifyOption {
  88       VerifyOption_Default = 0,
  89 
  90       // G1
  91       VerifyOption_G1UsePrevMarking = VerifyOption_Default,
  92       VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
  93       VerifyOption_G1UseFullMarking = VerifyOption_G1UseNextMarking + 1
  94 };
  95 
  96 class Universe: AllStatic {
  97   // Ugh.  Universe is much too friendly.
  98   friend class MarkSweep;
  99   friend class oopDesc;
 100   friend class ClassLoader;
 101   friend class SystemDictionary;
 102   friend class ReservedHeapSpace;
 103   friend class VMStructs;
 104   friend class VM_PopulateDumpSharedSpace;
 105   friend class Metaspace;
 106   friend class MetaspaceShared;
 107 
 108   friend jint  universe_init();
 109   friend void  universe2_init();
 110   friend bool  universe_post_init();
 111   friend void  universe_post_module_init();
 112 
 113  private:
 114   // Known classes in the VM
 115   static Klass* _boolArrayKlassObj;
 116   static Klass* _byteArrayKlassObj;
 117   static Klass* _charArrayKlassObj;
 118   static Klass* _intArrayKlassObj;
 119   static Klass* _shortArrayKlassObj;
 120   static Klass* _longArrayKlassObj;
 121   static Klass* _singleArrayKlassObj;
 122   static Klass* _doubleArrayKlassObj;
 123   static Klass* _typeArrayKlassObjs[T_VOID+1];
 124 
 125   static Klass* _objectArrayKlassObj;
 126 
 127   // Known objects in the VM
 128 
 129   // Primitive objects
 130   static oop _int_mirror;
 131   static oop _float_mirror;
 132   static oop _double_mirror;
 133   static oop _byte_mirror;
 134   static oop _bool_mirror;
 135   static oop _char_mirror;
 136   static oop _long_mirror;
 137   static oop _short_mirror;
 138   static oop _void_mirror;
 139 
 140   static oop          _main_thread_group;             // Reference to the main thread group object
 141   static oop          _system_thread_group;           // Reference to the system thread group object
 142 
 143   static objArrayOop  _the_empty_class_klass_array;   // Canonicalized obj array of type java.lang.Class
 144   static oop          _the_null_string;               // A cache of "null" as a Java string
 145   static oop          _the_min_jint_string;          // A cache of "-2147483648" as a Java string
 146   static LatestMethodCache* _finalizer_register_cache; // static method for registering finalizable objects
 147   static LatestMethodCache* _loader_addClass_cache;    // method for registering loaded classes in class loader vector
 148   static LatestMethodCache* _pd_implies_cache;         // method for checking protection domain attributes
 149   static LatestMethodCache* _throw_illegal_access_error_cache; // Unsafe.throwIllegalAccessError() method
 150   static LatestMethodCache* _do_stack_walk_cache;      // method for stack walker callback
 151 
 152   // preallocated error objects (no backtrace)
 153   static oop          _out_of_memory_error_java_heap;
 154   static oop          _out_of_memory_error_metaspace;
 155   static oop          _out_of_memory_error_class_metaspace;
 156   static oop          _out_of_memory_error_array_size;
 157   static oop          _out_of_memory_error_gc_overhead_limit;
 158   static oop          _out_of_memory_error_realloc_objects;
 159 
 160   // preallocated cause message for delayed StackOverflowError
 161   static oop          _delayed_stack_overflow_error_message;
 162 
 163   static Array<int>*       _the_empty_int_array;    // Canonicalized int array
 164   static Array<u2>*        _the_empty_short_array;  // Canonicalized short array
 165   static Array<Klass*>*  _the_empty_klass_array;  // Canonicalized klass obj array
 166   static Array<Method*>* _the_empty_method_array; // Canonicalized method obj array
 167 
 168   static Array<Klass*>*  _the_array_interfaces_array;
 169 
 170   // array of preallocated error objects with backtrace
 171   static objArrayOop   _preallocated_out_of_memory_error_array;
 172 
 173   // number of preallocated error objects available for use
 174   static volatile jint _preallocated_out_of_memory_error_avail_count;
 175 
 176   static oop          _null_ptr_exception_instance;   // preallocated exception object
 177   static oop          _arithmetic_exception_instance; // preallocated exception object
 178   static oop          _virtual_machine_error_instance; // preallocated exception object
 179   // The object used as an exception dummy when exceptions are thrown for
 180   // the vm thread.
 181   static oop          _vm_exception;
 182 
 183   static oop          _allocation_context_notification_obj;
 184 
 185   // References waiting to be transferred to the ReferenceHandler
 186   static oop          _reference_pending_list;
 187 
 188   // The particular choice of collected heap.
 189   static CollectedHeap* _collectedHeap;
 190 
 191   static intptr_t _non_oop_bits;
 192 
 193   // For UseCompressedOops.
 194   static struct NarrowPtrStruct _narrow_oop;
 195   // For UseCompressedClassPointers.
 196   static struct NarrowPtrStruct _narrow_klass;
 197   static address _narrow_ptrs_base;
 198 
 199   // array of dummy objects used with +FullGCAlot
 200   debug_only(static objArrayOop _fullgc_alot_dummy_array;)
 201   // index of next entry to clear
 202   debug_only(static int         _fullgc_alot_dummy_next;)
 203 
 204   // Compiler/dispatch support
 205   static int  _base_vtable_size;                      // Java vtbl size of klass Object (in words)
 206 
 207   // Initialization
 208   static bool _bootstrapping;                         // true during genesis
 209   static bool _module_initialized;                    // true after call_initPhase2 called
 210   static bool _fully_initialized;                     // true after universe_init and initialize_vtables called
 211 
 212   // the array of preallocated errors with backtraces
 213   static objArrayOop  preallocated_out_of_memory_errors()     { return _preallocated_out_of_memory_error_array; }
 214 
 215   // generate an out of memory error; if possible using an error with preallocated backtrace;
 216   // otherwise return the given default error.
 217   static oop        gen_out_of_memory_error(oop default_err);
 218 
 219   // Historic gc information
 220   static size_t _heap_capacity_at_last_gc;
 221   static size_t _heap_used_at_last_gc;
 222 
 223   static CollectedHeap* create_heap();
 224   static CollectedHeap* create_heap_ext();
 225   static jint initialize_heap();
 226   static void initialize_basic_type_mirrors(TRAPS);
 227   static void fixup_mirrors(TRAPS);
 228 
 229   static void reinitialize_vtable_of(Klass* k, TRAPS);
 230   static void reinitialize_itables(TRAPS);
 231   static void compute_base_vtable_size();             // compute vtable size of class Object
 232 
 233   static void genesis(TRAPS);                         // Create the initial world
 234 
 235   // Mirrors for primitive classes (created eagerly)
 236   static oop check_mirror(oop m) {
 237     assert(m != NULL, "mirror not initialized");
 238     return m;
 239   }
 240 
 241   static void     set_narrow_oop_base(address base) {
 242     assert(UseCompressedOops, "no compressed oops?");
 243     _narrow_oop._base    = base;
 244   }
 245   static void     set_narrow_klass_base(address base) {
 246     assert(UseCompressedClassPointers, "no compressed klass ptrs?");
 247     _narrow_klass._base   = base;
 248   }
 249   static void     set_narrow_oop_use_implicit_null_checks(bool use) {
 250     assert(UseCompressedOops, "no compressed ptrs?");
 251     _narrow_oop._use_implicit_null_checks   = use;
 252   }
 253 
 254   // Debugging
 255   static int _verify_count;                           // number of verifies done
 256 
 257   // True during call to verify().  Should only be set/cleared in verify().
 258   static bool _verify_in_progress;
 259   static long verify_flags;
 260 
 261   static uintptr_t _verify_oop_mask;
 262   static uintptr_t _verify_oop_bits;
 263 
 264   static void calculate_verify_data(HeapWord* low_boundary, HeapWord* high_boundary) PRODUCT_RETURN;
 265   static void compute_verify_oop_data();
 266 
 267  public:
 268   // Known classes in the VM
 269   static Klass* boolArrayKlassObj()                 { return _boolArrayKlassObj;   }
 270   static Klass* byteArrayKlassObj()                 { return _byteArrayKlassObj;   }
 271   static Klass* charArrayKlassObj()                 { return _charArrayKlassObj;   }
 272   static Klass* intArrayKlassObj()                  { return _intArrayKlassObj;    }
 273   static Klass* shortArrayKlassObj()                { return _shortArrayKlassObj;  }
 274   static Klass* longArrayKlassObj()                 { return _longArrayKlassObj;   }
 275   static Klass* singleArrayKlassObj()               { return _singleArrayKlassObj; }
 276   static Klass* doubleArrayKlassObj()               { return _doubleArrayKlassObj; }
 277 
 278   static Klass* objectArrayKlassObj() {
 279     return _objectArrayKlassObj;
 280   }
 281 
 282   static Klass* typeArrayKlassObj(BasicType t) {
 283     assert((uint)t < T_VOID+1, "range check for type: %s", type2name(t));
 284     assert(_typeArrayKlassObjs[t] != NULL, "domain check");
 285     return _typeArrayKlassObjs[t];
 286   }
 287 
 288   // Known objects in the VM
 289   static oop int_mirror()                   { return check_mirror(_int_mirror); }
 290   static oop float_mirror()                 { return check_mirror(_float_mirror); }
 291   static oop double_mirror()                { return check_mirror(_double_mirror); }
 292   static oop byte_mirror()                  { return check_mirror(_byte_mirror); }
 293   static oop bool_mirror()                  { return check_mirror(_bool_mirror); }
 294   static oop char_mirror()                  { return check_mirror(_char_mirror); }
 295   static oop long_mirror()                  { return check_mirror(_long_mirror); }
 296   static oop short_mirror()                 { return check_mirror(_short_mirror); }
 297   static oop void_mirror()                  { return check_mirror(_void_mirror); }
 298 
 299   // table of same
 300   static oop _mirrors[T_VOID+1];
 301 
 302   static oop java_mirror(BasicType t) {
 303     assert((uint)t < T_VOID+1, "range check");
 304     return check_mirror(_mirrors[t]);
 305   }
 306   static oop      main_thread_group()                 { return _main_thread_group; }
 307   static void set_main_thread_group(oop group)        { _main_thread_group = group;}
 308 
 309   static oop      system_thread_group()               { return _system_thread_group; }
 310   static void set_system_thread_group(oop group)      { _system_thread_group = group;}
 311 
 312   static objArrayOop  the_empty_class_klass_array ()  { return _the_empty_class_klass_array;   }
 313   static Array<Klass*>* the_array_interfaces_array() { return _the_array_interfaces_array;   }
 314   static oop          the_null_string()               { return _the_null_string;               }
 315   static oop          the_min_jint_string()          { return _the_min_jint_string;          }
 316 
 317   static Method*      finalizer_register_method()     { return _finalizer_register_cache->get_method(); }
 318   static Method*      loader_addClass_method()        { return _loader_addClass_cache->get_method(); }
 319 
 320   static Method*      protection_domain_implies_method() { return _pd_implies_cache->get_method(); }
 321   static Method*      throw_illegal_access_error()    { return _throw_illegal_access_error_cache->get_method(); }
 322 
 323   static Method*      do_stack_walk_method()          { return _do_stack_walk_cache->get_method(); }
 324 
 325   // Function to initialize these
 326   static void initialize_known_methods(TRAPS);
 327 
 328   static oop          null_ptr_exception_instance()   { return _null_ptr_exception_instance;   }
 329   static oop          arithmetic_exception_instance() { return _arithmetic_exception_instance; }
 330   static oop          virtual_machine_error_instance() { return _virtual_machine_error_instance; }
 331   static oop          vm_exception()                  { return _vm_exception; }
 332 
 333   static inline oop   allocation_context_notification_obj();
 334   static inline void  set_allocation_context_notification_obj(oop obj);
 335 
 336   // Reference pending list manipulation.  Access is protected by
 337   // Heap_lock.  The getter, setter and predicate require the caller
 338   // owns the lock.  Swap is used by parallel non-concurrent reference
 339   // processing threads, where some higher level controller owns
 340   // Heap_lock, so requires the lock is locked, but not necessarily by
 341   // the current thread.
 342   static oop          reference_pending_list();
 343   static void         set_reference_pending_list(oop list);
 344   static bool         has_reference_pending_list();
 345   static oop          swap_reference_pending_list(oop list);
 346 
 347   static Array<int>*       the_empty_int_array()    { return _the_empty_int_array; }
 348   static Array<u2>*        the_empty_short_array()  { return _the_empty_short_array; }
 349   static Array<Method*>* the_empty_method_array() { return _the_empty_method_array; }
 350   static Array<Klass*>*  the_empty_klass_array()  { return _the_empty_klass_array; }
 351 
 352   // OutOfMemoryError support. Returns an error with the required message. The returned error
 353   // may or may not have a backtrace. If error has a backtrace then the stack trace is already
 354   // filled in.
 355   static oop out_of_memory_error_java_heap()          { return gen_out_of_memory_error(_out_of_memory_error_java_heap);  }
 356   static oop out_of_memory_error_metaspace()          { return gen_out_of_memory_error(_out_of_memory_error_metaspace);   }
 357   static oop out_of_memory_error_class_metaspace()    { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace);   }
 358   static oop out_of_memory_error_array_size()         { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
 359   static oop out_of_memory_error_gc_overhead_limit()  { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit);  }
 360   static oop out_of_memory_error_realloc_objects()    { return gen_out_of_memory_error(_out_of_memory_error_realloc_objects);  }
 361   static oop delayed_stack_overflow_error_message()   { return _delayed_stack_overflow_error_message; }
 362 
 363   // Accessors needed for fast allocation
 364   static Klass** boolArrayKlassObj_addr()           { return &_boolArrayKlassObj;   }
 365   static Klass** byteArrayKlassObj_addr()           { return &_byteArrayKlassObj;   }
 366   static Klass** charArrayKlassObj_addr()           { return &_charArrayKlassObj;   }
 367   static Klass** intArrayKlassObj_addr()            { return &_intArrayKlassObj;    }
 368   static Klass** shortArrayKlassObj_addr()          { return &_shortArrayKlassObj;  }
 369   static Klass** longArrayKlassObj_addr()           { return &_longArrayKlassObj;   }
 370   static Klass** singleArrayKlassObj_addr()         { return &_singleArrayKlassObj; }
 371   static Klass** doubleArrayKlassObj_addr()         { return &_doubleArrayKlassObj; }
 372   static Klass** objectArrayKlassObj_addr()         { return &_objectArrayKlassObj; }
 373 
 374   // The particular choice of collected heap.
 375   static CollectedHeap* heap() { return _collectedHeap; }
 376 
 377   // For UseCompressedOops
 378   // Narrow Oop encoding mode:
 379   // 0 - Use 32-bits oops without encoding when
 380   //     NarrowOopHeapBaseMin + heap_size < 4Gb
 381   // 1 - Use zero based compressed oops with encoding when
 382   //     NarrowOopHeapBaseMin + heap_size < 32Gb
 383   // 2 - Use compressed oops with disjoint heap base if
 384   //     base is 32G-aligned and base > 0. This allows certain
 385   //     optimizations in encoding/decoding.
 386   //     Disjoint: Bits used in base are disjoint from bits used
 387   //     for oops ==> oop = (cOop << 3) | base.  One can disjoint
 388   //     the bits of an oop into base and compressed oop.
 389   // 3 - Use compressed oops with heap base + encoding.
 390   enum NARROW_OOP_MODE {
 391     UnscaledNarrowOop  = 0,
 392     ZeroBasedNarrowOop = 1,
 393     DisjointBaseNarrowOop = 2,
 394     HeapBasedNarrowOop = 3,
 395     AnyNarrowOopMode = 4
 396   };
 397   static NARROW_OOP_MODE narrow_oop_mode();
 398   static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
 399   static char*    preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
 400   static char*    preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
 401   static address  narrow_oop_base()                  { return  _narrow_oop._base; }
 402   // Test whether bits of addr and possible offsets into the heap overlap.
 403   static bool     is_disjoint_heap_base_address(address addr) {
 404     return (((uint64_t)(intptr_t)addr) &
 405             (((uint64_t)UCONST64(0xFFFFffffFFFFffff)) >> (32-LogMinObjAlignmentInBytes))) == 0;
 406   }
 407   // Check for disjoint base compressed oops.
 408   static bool     narrow_oop_base_disjoint()        {
 409     return _narrow_oop._base != NULL && is_disjoint_heap_base_address(_narrow_oop._base);
 410   }
 411   // Check for real heapbased compressed oops.
 412   // We must subtract the base as the bits overlap.
 413   // If we negate above function, we also get unscaled and zerobased.
 414   static bool     narrow_oop_base_overlaps()          {
 415     return _narrow_oop._base != NULL && !is_disjoint_heap_base_address(_narrow_oop._base);
 416   }
 417   static bool  is_narrow_oop_base(void* addr)             { return (narrow_oop_base() == (address)addr); }
 418   static int      narrow_oop_shift()                      { return  _narrow_oop._shift; }
 419   static bool     narrow_oop_use_implicit_null_checks()   { return  _narrow_oop._use_implicit_null_checks; }
 420 
 421   // For UseCompressedClassPointers
 422   static address  narrow_klass_base()                     { return  _narrow_klass._base; }
 423   static bool  is_narrow_klass_base(void* addr)           { return (narrow_klass_base() == (address)addr); }
 424   static int      narrow_klass_shift()                    { return  _narrow_klass._shift; }
 425   static bool     narrow_klass_use_implicit_null_checks() { return  _narrow_klass._use_implicit_null_checks; }
 426 
 427   static address* narrow_ptrs_base_addr()                 { return &_narrow_ptrs_base; }
 428   static void     set_narrow_ptrs_base(address a)         { _narrow_ptrs_base = a; }
 429   static address  narrow_ptrs_base()                      { return _narrow_ptrs_base; }
 430 
 431   static void     print_compressed_oops_mode(outputStream* st);
 432 
 433   // this is set in vm_version on sparc (and then reset in universe afaict)
 434   static void     set_narrow_oop_shift(int shift)         {
 435     _narrow_oop._shift   = shift;
 436   }
 437 
 438   static void     set_narrow_klass_shift(int shift)       {
 439     assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
 440     _narrow_klass._shift   = shift;
 441   }
 442 
 443   // Reserve Java heap and determine CompressedOops mode
 444   static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
 445 
 446   // Historic gc information
 447   static size_t get_heap_capacity_at_last_gc()         { return _heap_capacity_at_last_gc; }
 448   static size_t get_heap_free_at_last_gc()             { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
 449   static size_t get_heap_used_at_last_gc()             { return _heap_used_at_last_gc; }
 450   static void update_heap_info_at_gc();
 451 
 452   // Testers
 453   static bool is_bootstrapping()                      { return _bootstrapping; }
 454   static bool is_module_initialized()                 { return _module_initialized; }
 455   static bool is_fully_initialized()                  { return _fully_initialized; }
 456 
 457   static inline bool element_type_should_be_aligned(BasicType type);
 458   static inline bool field_type_should_be_aligned(BasicType type);
 459   static bool        on_page_boundary(void* addr);
 460   static bool        should_fill_in_stack_trace(Handle throwable);
 461   static void check_alignment(uintx size, uintx alignment, const char* name);
 462 
 463   // Finalizer support.
 464   static void run_finalizers_on_exit();
 465 
 466   // Iteration
 467 
 468   // Apply "f" to the addresses of all the direct heap pointers maintained
 469   // as static fields of "Universe".
 470   static void oops_do(OopClosure* f, bool do_all = false);
 471 
 472   // CDS support
 473   static void serialize(SerializeClosure* f, bool do_all = false);
 474 
 475   // Apply "f" to all klasses for basic types (classes not present in
 476   // SystemDictionary).
 477   static void basic_type_classes_do(void f(Klass*));
 478   static void metaspace_pointers_do(MetaspaceClosure* it);
 479 
 480   // Debugging
 481   enum VERIFY_FLAGS {
 482     Verify_Threads = 1,
 483     Verify_Heap = 2,
 484     Verify_SymbolTable = 4,
 485     Verify_StringTable = 8,
 486     Verify_CodeCache = 16,
 487     Verify_SystemDictionary = 32,
 488     Verify_ClassLoaderDataGraph = 64,
 489     Verify_MetaspaceAux = 128,
 490     Verify_JNIHandles = 256,
 491     Verify_CodeCacheOops = 512,
 492     Verify_All = -1
 493   };
 494   static void initialize_verify_flags();
 495   static bool should_verify_subset(uint subset);
 496   static bool verify_in_progress() { return _verify_in_progress; }
 497   static void verify(VerifyOption option, const char* prefix);
 498   static void verify(const char* prefix) {
 499     verify(VerifyOption_Default, prefix);
 500   }
 501   static void verify() {
 502     verify("");
 503   }
 504 
 505   static int  verify_count()       { return _verify_count; }
 506   static void print_on(outputStream* st);
 507   static void print_heap_at_SIGBREAK();
 508   static void print_heap_before_gc();
 509   static void print_heap_after_gc();
 510 
 511   // Change the number of dummy objects kept reachable by the full gc dummy
 512   // array; this should trigger relocation in a sliding compaction collector.
 513   debug_only(static bool release_fullgc_alot_dummy();)
 514   // The non-oop pattern (see compiledIC.hpp, etc)
 515   static void*   non_oop_word();
 516 
 517   // Oop verification (see MacroAssembler::verify_oop)
 518   static uintptr_t verify_oop_mask()          PRODUCT_RETURN0;
 519   static uintptr_t verify_oop_bits()          PRODUCT_RETURN0;
 520   static uintptr_t verify_mark_bits()         PRODUCT_RETURN0;
 521   static uintptr_t verify_mark_mask()         PRODUCT_RETURN0;
 522 
 523   // Compiler support
 524   static int base_vtable_size()               { return _base_vtable_size; }
 525 };
 526 
 527 class DeferredObjAllocEvent : public CHeapObj<mtInternal> {
 528   private:
 529     oop    _oop;
 530     size_t _bytesize;
 531     jint   _arena_id;
 532 
 533   public:
 534     DeferredObjAllocEvent(const oop o, const size_t s, const jint id) {
 535       _oop      = o;
 536       _bytesize = s;
 537       _arena_id = id;
 538     }
 539 
 540     ~DeferredObjAllocEvent() {
 541     }
 542 
 543     jint   arena_id() { return _arena_id; }
 544     size_t bytesize() { return _bytesize; }
 545     oop    get_oop()  { return _oop; }
 546 };
 547 
 548 #endif // SHARE_VM_MEMORY_UNIVERSE_HPP