1 /*
   2  * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP
  26 #define SHARE_VM_MEMORY_UNIVERSE_HPP
  27 
  28 #include "runtime/handles.hpp"
  29 #include "utilities/array.hpp"
  30 #include "utilities/growableArray.hpp"
  31 
  32 // Universe is a name space holding known system classes and objects in the VM.
  33 //
  34 // Loaded classes are accessible through the SystemDictionary.
  35 //
  36 // The object heap is allocated and accessed through Universe, and various allocation
  37 // support is provided. Allocation by the interpreter and compiled code is done inline
  38 // and bails out to Scavenge::invoke_and_allocate.
  39 
  40 class CollectedHeap;
  41 class DeferredObjAllocEvent;
  42 
  43 
  44 // A helper class for caching a Method* when the user of the cache
  45 // only cares about the latest version of the Method*.  This cache safely
  46 // interacts with the RedefineClasses API.
  47 
  48 class LatestMethodCache : public CHeapObj<mtClass> {
  49   // We save the Klass* and the idnum of Method* in order to get
  50   // the current cached Method*.
  51  private:
  52   Klass*                _klass;
  53   int                   _method_idnum;
  54 
  55  public:
  56   LatestMethodCache()   { _klass = NULL; _method_idnum = -1; }
  57   ~LatestMethodCache()  { _klass = NULL; _method_idnum = -1; }
  58 
  59   void   init(Klass* k, Method* m);
  60   Klass* klass() const           { return _klass; }
  61   int    method_idnum() const    { return _method_idnum; }
  62 
  63   Method* get_method();
  64 
  65   // CDS support.  Replace the klass in this with the archive version
  66   // could use this for Enhanced Class Redefinition also.
  67   void serialize(SerializeClosure* f) {
  68     f->do_ptr((void**)&_klass);
  69   }
  70 };
  71 
  72 
  73 // For UseCompressedOops.
  74 struct NarrowPtrStruct {
  75   // Base address for oop-within-java-object materialization.
  76   // NULL if using wide oops or zero based narrow oops.
  77   address _base;
  78   // Number of shift bits for encoding/decoding narrow ptrs.
  79   // 0 if using wide ptrs or zero based unscaled narrow ptrs,
  80   // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
  81   int     _shift;
  82   // Generate code with implicit null checks for narrow ptrs.
  83   bool    _use_implicit_null_checks;
  84 };
  85 
  86 enum VerifyOption {
  87       VerifyOption_Default = 0,
  88 
  89       // G1
  90       VerifyOption_G1UsePrevMarking = VerifyOption_Default,
  91       VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
  92       VerifyOption_G1UseMarkWord    = VerifyOption_G1UseNextMarking + 1
  93 };
  94 
  95 class Universe: AllStatic {
  96   // Ugh.  Universe is much too friendly.
  97   friend class MarkSweep;
  98   friend class oopDesc;
  99   friend class ClassLoader;
 100   friend class SystemDictionary;
 101   friend class ReservedHeapSpace;
 102   friend class VMStructs;
 103   friend class VM_PopulateDumpSharedSpace;
 104   friend class Metaspace;
 105 
 106   friend jint  universe_init();
 107   friend void  universe2_init();
 108   friend bool  universe_post_init();
 109   friend void  universe_post_module_init();
 110 
 111  private:
 112   // Known classes in the VM
 113   static Klass* _boolArrayKlassObj;
 114   static Klass* _byteArrayKlassObj;
 115   static Klass* _charArrayKlassObj;
 116   static Klass* _intArrayKlassObj;
 117   static Klass* _shortArrayKlassObj;
 118   static Klass* _longArrayKlassObj;
 119   static Klass* _singleArrayKlassObj;
 120   static Klass* _doubleArrayKlassObj;
 121   static Klass* _typeArrayKlassObjs[T_VOID+1];
 122 
 123   static Klass* _objectArrayKlassObj;
 124 
 125   // Known objects in the VM
 126 
 127   // Primitive objects
 128   static oop _int_mirror;
 129   static oop _float_mirror;
 130   static oop _double_mirror;
 131   static oop _byte_mirror;
 132   static oop _bool_mirror;
 133   static oop _char_mirror;
 134   static oop _long_mirror;
 135   static oop _short_mirror;
 136   static oop _void_mirror;
 137 
 138   static oop          _main_thread_group;             // Reference to the main thread group object
 139   static oop          _system_thread_group;           // Reference to the system thread group object
 140 
 141   static objArrayOop  _the_empty_class_klass_array;   // Canonicalized obj array of type java.lang.Class
 142   static oop          _the_null_string;               // A cache of "null" as a Java string
 143   static oop          _the_min_jint_string;          // A cache of "-2147483648" as a Java string
 144   static LatestMethodCache* _finalizer_register_cache; // static method for registering finalizable objects
 145   static LatestMethodCache* _loader_addClass_cache;    // method for registering loaded classes in class loader vector
 146   static LatestMethodCache* _pd_implies_cache;         // method for checking protection domain attributes
 147   static LatestMethodCache* _throw_illegal_access_error_cache; // Unsafe.throwIllegalAccessError() method
 148   static LatestMethodCache* _do_stack_walk_cache;      // method for stack walker callback
 149 
 150   // preallocated error objects (no backtrace)
 151   static oop          _out_of_memory_error_java_heap;
 152   static oop          _out_of_memory_error_metaspace;
 153   static oop          _out_of_memory_error_class_metaspace;
 154   static oop          _out_of_memory_error_array_size;
 155   static oop          _out_of_memory_error_gc_overhead_limit;
 156   static oop          _out_of_memory_error_realloc_objects;
 157 
 158   // preallocated cause message for delayed StackOverflowError
 159   static oop          _delayed_stack_overflow_error_message;
 160 
 161   static Array<int>*       _the_empty_int_array;    // Canonicalized int array
 162   static Array<u2>*        _the_empty_short_array;  // Canonicalized short array
 163   static Array<Klass*>*  _the_empty_klass_array;  // Canonicalized klass obj array
 164   static Array<Method*>* _the_empty_method_array; // Canonicalized method obj array
 165 
 166   static Array<Klass*>*  _the_array_interfaces_array;
 167 
 168   // array of preallocated error objects with backtrace
 169   static objArrayOop   _preallocated_out_of_memory_error_array;
 170 
 171   // number of preallocated error objects available for use
 172   static volatile jint _preallocated_out_of_memory_error_avail_count;
 173 
 174   static oop          _null_ptr_exception_instance;   // preallocated exception object
 175   static oop          _arithmetic_exception_instance; // preallocated exception object
 176   static oop          _virtual_machine_error_instance; // preallocated exception object
 177   // The object used as an exception dummy when exceptions are thrown for
 178   // the vm thread.
 179   static oop          _vm_exception;
 180 
 181   static oop          _allocation_context_notification_obj;
 182 
 183   // References waiting to be transferred to the ReferenceHandler
 184   static oop          _reference_pending_list;
 185 
 186   // The particular choice of collected heap.
 187   static CollectedHeap* _collectedHeap;
 188 
 189   static intptr_t _non_oop_bits;
 190 
 191   // For UseCompressedOops.
 192   static struct NarrowPtrStruct _narrow_oop;
 193   // For UseCompressedClassPointers.
 194   static struct NarrowPtrStruct _narrow_klass;
 195   static address _narrow_ptrs_base;
 196 
 197   // array of dummy objects used with +FullGCAlot
 198   debug_only(static objArrayOop _fullgc_alot_dummy_array;)
 199   // index of next entry to clear
 200   debug_only(static int         _fullgc_alot_dummy_next;)
 201 
 202   // Compiler/dispatch support
 203   static int  _base_vtable_size;                      // Java vtbl size of klass Object (in words)
 204 
 205   // Initialization
 206   static bool _bootstrapping;                         // true during genesis
 207   static bool _module_initialized;                    // true after call_initPhase2 called
 208   static bool _fully_initialized;                     // true after universe_init and initialize_vtables called
 209 
 210   // the array of preallocated errors with backtraces
 211   static objArrayOop  preallocated_out_of_memory_errors()     { return _preallocated_out_of_memory_error_array; }
 212 
 213   // generate an out of memory error; if possible using an error with preallocated backtrace;
 214   // otherwise return the given default error.
 215   static oop        gen_out_of_memory_error(oop default_err);
 216 
 217   // Historic gc information
 218   static size_t _heap_capacity_at_last_gc;
 219   static size_t _heap_used_at_last_gc;
 220 
 221   template <class Heap, class Policy> static CollectedHeap* create_heap_with_policy();
 222   static CollectedHeap* create_heap();
 223   static CollectedHeap* create_heap_ext();
 224   static jint initialize_heap();
 225   static void initialize_basic_type_mirrors(TRAPS);
 226   static void fixup_mirrors(TRAPS);
 227 
 228   static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
 229   static void reinitialize_itables(TRAPS);
 230   static void compute_base_vtable_size();             // compute vtable size of class Object
 231 
 232   static void genesis(TRAPS);                         // Create the initial world
 233 
 234   // Mirrors for primitive classes (created eagerly)
 235   static oop check_mirror(oop m) {
 236     assert(m != NULL, "mirror not initialized");
 237     return m;
 238   }
 239 
 240   static void     set_narrow_oop_base(address base) {
 241     assert(UseCompressedOops, "no compressed oops?");
 242     _narrow_oop._base    = base;
 243   }
 244   static void     set_narrow_klass_base(address base) {
 245     assert(UseCompressedClassPointers, "no compressed klass ptrs?");
 246     _narrow_klass._base   = base;
 247   }
 248   static void     set_narrow_oop_use_implicit_null_checks(bool use) {
 249     assert(UseCompressedOops, "no compressed ptrs?");
 250     _narrow_oop._use_implicit_null_checks   = use;
 251   }
 252 
 253   // Debugging
 254   static int _verify_count;                           // number of verifies done
 255 
 256   // True during call to verify().  Should only be set/cleared in verify().
 257   static bool _verify_in_progress;
 258   static long verify_flags;
 259 
 260   static uintptr_t _verify_oop_mask;
 261   static uintptr_t _verify_oop_bits;
 262 
 263   static void calculate_verify_data(HeapWord* low_boundary, HeapWord* high_boundary) PRODUCT_RETURN;
 264   static void compute_verify_oop_data();
 265 
 266  public:
 267   // Known classes in the VM
 268   static Klass* boolArrayKlassObj()                 { return _boolArrayKlassObj;   }
 269   static Klass* byteArrayKlassObj()                 { return _byteArrayKlassObj;   }
 270   static Klass* charArrayKlassObj()                 { return _charArrayKlassObj;   }
 271   static Klass* intArrayKlassObj()                  { return _intArrayKlassObj;    }
 272   static Klass* shortArrayKlassObj()                { return _shortArrayKlassObj;  }
 273   static Klass* longArrayKlassObj()                 { return _longArrayKlassObj;   }
 274   static Klass* singleArrayKlassObj()               { return _singleArrayKlassObj; }
 275   static Klass* doubleArrayKlassObj()               { return _doubleArrayKlassObj; }
 276 
 277   static Klass* objectArrayKlassObj() {
 278     return _objectArrayKlassObj;
 279   }
 280 
 281   static Klass* typeArrayKlassObj(BasicType t) {
 282     assert((uint)t < T_VOID+1, "range check for type: %s", type2name(t));
 283     assert(_typeArrayKlassObjs[t] != NULL, "domain check");
 284     return _typeArrayKlassObjs[t];
 285   }
 286 
 287   // Known objects in the VM
 288   static oop int_mirror()                   { return check_mirror(_int_mirror); }
 289   static oop float_mirror()                 { return check_mirror(_float_mirror); }
 290   static oop double_mirror()                { return check_mirror(_double_mirror); }
 291   static oop byte_mirror()                  { return check_mirror(_byte_mirror); }
 292   static oop bool_mirror()                  { return check_mirror(_bool_mirror); }
 293   static oop char_mirror()                  { return check_mirror(_char_mirror); }
 294   static oop long_mirror()                  { return check_mirror(_long_mirror); }
 295   static oop short_mirror()                 { return check_mirror(_short_mirror); }
 296   static oop void_mirror()                  { return check_mirror(_void_mirror); }
 297 
 298   // table of same
 299   static oop _mirrors[T_VOID+1];
 300 
 301   static oop java_mirror(BasicType t) {
 302     assert((uint)t < T_VOID+1, "range check");
 303     return check_mirror(_mirrors[t]);
 304   }
 305   static oop      main_thread_group()                 { return _main_thread_group; }
 306   static void set_main_thread_group(oop group)        { _main_thread_group = group;}
 307 
 308   static oop      system_thread_group()               { return _system_thread_group; }
 309   static void set_system_thread_group(oop group)      { _system_thread_group = group;}
 310 
 311   static objArrayOop  the_empty_class_klass_array ()  { return _the_empty_class_klass_array;   }
 312   static Array<Klass*>* the_array_interfaces_array() { return _the_array_interfaces_array;   }
 313   static oop          the_null_string()               { return _the_null_string;               }
 314   static oop          the_min_jint_string()          { return _the_min_jint_string;          }
 315 
 316   static Method*      finalizer_register_method()     { return _finalizer_register_cache->get_method(); }
 317   static Method*      loader_addClass_method()        { return _loader_addClass_cache->get_method(); }
 318 
 319   static Method*      protection_domain_implies_method() { return _pd_implies_cache->get_method(); }
 320   static Method*      throw_illegal_access_error()    { return _throw_illegal_access_error_cache->get_method(); }
 321 
 322   static Method*      do_stack_walk_method()          { return _do_stack_walk_cache->get_method(); }
 323 
 324   // Function to initialize these
 325   static void initialize_known_methods(TRAPS);
 326 
 327   static oop          null_ptr_exception_instance()   { return _null_ptr_exception_instance;   }
 328   static oop          arithmetic_exception_instance() { return _arithmetic_exception_instance; }
 329   static oop          virtual_machine_error_instance() { return _virtual_machine_error_instance; }
 330   static oop          vm_exception()                  { return _vm_exception; }
 331 
 332   static inline oop   allocation_context_notification_obj();
 333   static inline void  set_allocation_context_notification_obj(oop obj);
 334 
 335   // Reference pending list manipulation.  Access is protected by
 336   // Heap_lock.  The getter, setter and predicate require the caller
 337   // owns the lock.  Swap is used by parallel non-concurrent reference
 338   // processing threads, where some higher level controller owns
 339   // Heap_lock, so requires the lock is locked, but not necessarily by
 340   // the current thread.
 341   static oop          reference_pending_list();
 342   static void         set_reference_pending_list(oop list);
 343   static bool         has_reference_pending_list();
 344   static oop          swap_reference_pending_list(oop list);
 345 
 346   static Array<int>*       the_empty_int_array()    { return _the_empty_int_array; }
 347   static Array<u2>*        the_empty_short_array()  { return _the_empty_short_array; }
 348   static Array<Method*>* the_empty_method_array() { return _the_empty_method_array; }
 349   static Array<Klass*>*  the_empty_klass_array()  { return _the_empty_klass_array; }
 350 
 351   // OutOfMemoryError support. Returns an error with the required message. The returned error
 352   // may or may not have a backtrace. If error has a backtrace then the stack trace is already
 353   // filled in.
 354   static oop out_of_memory_error_java_heap()          { return gen_out_of_memory_error(_out_of_memory_error_java_heap);  }
 355   static oop out_of_memory_error_metaspace()          { return gen_out_of_memory_error(_out_of_memory_error_metaspace);   }
 356   static oop out_of_memory_error_class_metaspace()    { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace);   }
 357   static oop out_of_memory_error_array_size()         { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
 358   static oop out_of_memory_error_gc_overhead_limit()  { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit);  }
 359   static oop out_of_memory_error_realloc_objects()    { return gen_out_of_memory_error(_out_of_memory_error_realloc_objects);  }
 360   static oop delayed_stack_overflow_error_message()   { return _delayed_stack_overflow_error_message; }
 361 
 362   // Accessors needed for fast allocation
 363   static Klass** boolArrayKlassObj_addr()           { return &_boolArrayKlassObj;   }
 364   static Klass** byteArrayKlassObj_addr()           { return &_byteArrayKlassObj;   }
 365   static Klass** charArrayKlassObj_addr()           { return &_charArrayKlassObj;   }
 366   static Klass** intArrayKlassObj_addr()            { return &_intArrayKlassObj;    }
 367   static Klass** shortArrayKlassObj_addr()          { return &_shortArrayKlassObj;  }
 368   static Klass** longArrayKlassObj_addr()           { return &_longArrayKlassObj;   }
 369   static Klass** singleArrayKlassObj_addr()         { return &_singleArrayKlassObj; }
 370   static Klass** doubleArrayKlassObj_addr()         { return &_doubleArrayKlassObj; }
 371   static Klass** objectArrayKlassObj_addr()         { return &_objectArrayKlassObj; }
 372 
 373   // The particular choice of collected heap.
 374   static CollectedHeap* heap() { return _collectedHeap; }
 375 
 376   // For UseCompressedOops
 377   // Narrow Oop encoding mode:
 378   // 0 - Use 32-bits oops without encoding when
 379   //     NarrowOopHeapBaseMin + heap_size < 4Gb
 380   // 1 - Use zero based compressed oops with encoding when
 381   //     NarrowOopHeapBaseMin + heap_size < 32Gb
 382   // 2 - Use compressed oops with disjoint heap base if
 383   //     base is 32G-aligned and base > 0. This allows certain
 384   //     optimizations in encoding/decoding.
 385   //     Disjoint: Bits used in base are disjoint from bits used
 386   //     for oops ==> oop = (cOop << 3) | base.  One can disjoint
 387   //     the bits of an oop into base and compressed oop.
 388   // 3 - Use compressed oops with heap base + encoding.
 389   enum NARROW_OOP_MODE {
 390     UnscaledNarrowOop  = 0,
 391     ZeroBasedNarrowOop = 1,
 392     DisjointBaseNarrowOop = 2,
 393     HeapBasedNarrowOop = 3,
 394     AnyNarrowOopMode = 4
 395   };
 396   static NARROW_OOP_MODE narrow_oop_mode();
 397   static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
 398   static char*    preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
 399   static char*    preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
 400   static address  narrow_oop_base()                  { return  _narrow_oop._base; }
 401   // Test whether bits of addr and possible offsets into the heap overlap.
 402   static bool     is_disjoint_heap_base_address(address addr) {
 403     return (((uint64_t)(intptr_t)addr) &
 404             (((uint64_t)UCONST64(0xFFFFffffFFFFffff)) >> (32-LogMinObjAlignmentInBytes))) == 0;
 405   }
 406   // Check for disjoint base compressed oops.
 407   static bool     narrow_oop_base_disjoint()        {
 408     return _narrow_oop._base != NULL && is_disjoint_heap_base_address(_narrow_oop._base);
 409   }
 410   // Check for real heapbased compressed oops.
 411   // We must subtract the base as the bits overlap.
 412   // If we negate above function, we also get unscaled and zerobased.
 413   static bool     narrow_oop_base_overlaps()          {
 414     return _narrow_oop._base != NULL && !is_disjoint_heap_base_address(_narrow_oop._base);
 415   }
 416   static bool  is_narrow_oop_base(void* addr)             { return (narrow_oop_base() == (address)addr); }
 417   static int      narrow_oop_shift()                      { return  _narrow_oop._shift; }
 418   static bool     narrow_oop_use_implicit_null_checks()   { return  _narrow_oop._use_implicit_null_checks; }
 419 
 420   // For UseCompressedClassPointers
 421   static address  narrow_klass_base()                     { return  _narrow_klass._base; }
 422   static bool  is_narrow_klass_base(void* addr)           { return (narrow_klass_base() == (address)addr); }
 423   static int      narrow_klass_shift()                    { return  _narrow_klass._shift; }
 424   static bool     narrow_klass_use_implicit_null_checks() { return  _narrow_klass._use_implicit_null_checks; }
 425 
 426   static address* narrow_ptrs_base_addr()                 { return &_narrow_ptrs_base; }
 427   static void     set_narrow_ptrs_base(address a)         { _narrow_ptrs_base = a; }
 428   static address  narrow_ptrs_base()                      { return _narrow_ptrs_base; }
 429 
 430   static void     print_compressed_oops_mode(outputStream* st);
 431 
 432   // this is set in vm_version on sparc (and then reset in universe afaict)
 433   static void     set_narrow_oop_shift(int shift)         {
 434     _narrow_oop._shift   = shift;
 435   }
 436 
 437   static void     set_narrow_klass_shift(int shift)       {
 438     assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
 439     _narrow_klass._shift   = shift;
 440   }
 441 
 442   // Reserve Java heap and determine CompressedOops mode
 443   static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
 444 
 445   // Historic gc information
 446   static size_t get_heap_capacity_at_last_gc()         { return _heap_capacity_at_last_gc; }
 447   static size_t get_heap_free_at_last_gc()             { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
 448   static size_t get_heap_used_at_last_gc()             { return _heap_used_at_last_gc; }
 449   static void update_heap_info_at_gc();
 450 
 451   // Testers
 452   static bool is_bootstrapping()                      { return _bootstrapping; }
 453   static bool is_module_initialized()                 { return _module_initialized; }
 454   static bool is_fully_initialized()                  { return _fully_initialized; }
 455 
 456   static inline bool element_type_should_be_aligned(BasicType type);
 457   static inline bool field_type_should_be_aligned(BasicType type);
 458   static bool        on_page_boundary(void* addr);
 459   static bool        should_fill_in_stack_trace(Handle throwable);
 460   static void check_alignment(uintx size, uintx alignment, const char* name);
 461 
 462   // Finalizer support.
 463   static void run_finalizers_on_exit();
 464 
 465   // Iteration
 466 
 467   // Apply "f" to the addresses of all the direct heap pointers maintained
 468   // as static fields of "Universe".
 469   static void oops_do(OopClosure* f, bool do_all = false);
 470 
 471   // CDS support
 472   static void serialize(SerializeClosure* f, bool do_all = false);
 473 
 474   // Apply "f" to all klasses for basic types (classes not present in
 475   // SystemDictionary).
 476   static void basic_type_classes_do(void f(Klass*));
 477 
 478   // For sharing -- fill in a list of known vtable pointers.
 479   static void init_self_patching_vtbl_list(void** list, int count);
 480 
 481   // Debugging
 482   enum VERIFY_FLAGS {
 483     Verify_Threads = 1,
 484     Verify_Heap = 2,
 485     Verify_SymbolTable = 4,
 486     Verify_StringTable = 8,
 487     Verify_CodeCache = 16,
 488     Verify_SystemDictionary = 32,
 489     Verify_ClassLoaderDataGraph = 64,
 490     Verify_MetaspaceAux = 128,
 491     Verify_JNIHandles = 256,
 492     Verify_CodeCacheOops = 512,
 493     Verify_All = -1
 494   };
 495   static void initialize_verify_flags();
 496   static bool should_verify_subset(uint subset);
 497   static bool verify_in_progress() { return _verify_in_progress; }
 498   static void verify(VerifyOption option, const char* prefix);
 499   static void verify(const char* prefix) {
 500     verify(VerifyOption_Default, prefix);
 501   }
 502   static void verify() {
 503     verify("");
 504   }
 505 
 506   static int  verify_count()       { return _verify_count; }
 507   static void print_on(outputStream* st);
 508   static void print_heap_at_SIGBREAK();
 509   static void print_heap_before_gc();
 510   static void print_heap_after_gc();
 511 
 512   // Change the number of dummy objects kept reachable by the full gc dummy
 513   // array; this should trigger relocation in a sliding compaction collector.
 514   debug_only(static bool release_fullgc_alot_dummy();)
 515   // The non-oop pattern (see compiledIC.hpp, etc)
 516   static void*   non_oop_word();
 517 
 518   // Oop verification (see MacroAssembler::verify_oop)
 519   static uintptr_t verify_oop_mask()          PRODUCT_RETURN0;
 520   static uintptr_t verify_oop_bits()          PRODUCT_RETURN0;
 521   static uintptr_t verify_mark_bits()         PRODUCT_RETURN0;
 522   static uintptr_t verify_mark_mask()         PRODUCT_RETURN0;
 523 
 524   // Compiler support
 525   static int base_vtable_size()               { return _base_vtable_size; }
 526 };
 527 
 528 class DeferredObjAllocEvent : public CHeapObj<mtInternal> {
 529   private:
 530     oop    _oop;
 531     size_t _bytesize;
 532     jint   _arena_id;
 533 
 534   public:
 535     DeferredObjAllocEvent(const oop o, const size_t s, const jint id) {
 536       _oop      = o;
 537       _bytesize = s;
 538       _arena_id = id;
 539     }
 540 
 541     ~DeferredObjAllocEvent() {
 542     }
 543 
 544     jint   arena_id() { return _arena_id; }
 545     size_t bytesize() { return _bytesize; }
 546     oop    get_oop()  { return _oop; }
 547 };
 548 
 549 #endif // SHARE_VM_MEMORY_UNIVERSE_HPP