1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP 26 #define SHARE_VM_MEMORY_UNIVERSE_HPP 27 28 #include "oops/array.hpp" 29 #include "runtime/handles.hpp" 30 #include "utilities/growableArray.hpp" 31 32 // Universe is a name space holding known system classes and objects in the VM. 33 // 34 // Loaded classes are accessible through the SystemDictionary. 35 // 36 // The object heap is allocated and accessed through Universe, and various allocation 37 // support is provided. Allocation by the interpreter and compiled code is done inline 38 // and bails out to Scavenge::invoke_and_allocate. 39 40 class CollectedHeap; 41 class DeferredObjAllocEvent; 42 43 44 // A helper class for caching a Method* when the user of the cache 45 // only cares about the latest version of the Method*. This cache safely 46 // interacts with the RedefineClasses API. 47 48 class LatestMethodCache : public CHeapObj<mtClass> { 49 // We save the Klass* and the idnum of Method* in order to get 50 // the current cached Method*. 51 private: 52 Klass* _klass; 53 int _method_idnum; 54 55 public: 56 LatestMethodCache() { _klass = NULL; _method_idnum = -1; } 57 ~LatestMethodCache() { _klass = NULL; _method_idnum = -1; } 58 59 void init(Klass* k, Method* m); 60 Klass* klass() const { return _klass; } 61 int method_idnum() const { return _method_idnum; } 62 63 Method* get_method(); 64 65 // CDS support. Replace the klass in this with the archive version 66 // could use this for Enhanced Class Redefinition also. 67 void serialize(SerializeClosure* f) { 68 f->do_ptr((void**)&_klass); 69 } 70 void metaspace_pointers_do(MetaspaceClosure* it); 71 }; 72 73 74 // For UseCompressedOops. 75 struct NarrowPtrStruct { 76 // Base address for oop-within-java-object materialization. 77 // NULL if using wide oops or zero based narrow oops. 78 address _base; 79 // Number of shift bits for encoding/decoding narrow ptrs. 80 // 0 if using wide ptrs or zero based unscaled narrow ptrs, 81 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise. 82 int _shift; 83 // Generate code with implicit null checks for narrow ptrs. 84 bool _use_implicit_null_checks; 85 }; 86 87 enum VerifyOption { 88 VerifyOption_Default = 0, 89 90 // G1 91 VerifyOption_G1UsePrevMarking = VerifyOption_Default, 92 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1, 93 VerifyOption_G1UseFullMarking = VerifyOption_G1UseNextMarking + 1 94 }; 95 96 class Universe: AllStatic { 97 // Ugh. Universe is much too friendly. 98 friend class MarkSweep; 99 friend class oopDesc; 100 friend class ClassLoader; 101 friend class SystemDictionary; 102 friend class ReservedHeapSpace; 103 friend class VMStructs; 104 friend class VM_PopulateDumpSharedSpace; 105 friend class Metaspace; 106 friend class MetaspaceShared; 107 108 friend jint universe_init(); 109 friend void universe2_init(); 110 friend bool universe_post_init(); 111 friend void universe_post_module_init(); 112 113 private: 114 // Known classes in the VM 115 static Klass* _boolArrayKlassObj; 116 static Klass* _byteArrayKlassObj; 117 static Klass* _charArrayKlassObj; 118 static Klass* _intArrayKlassObj; 119 static Klass* _shortArrayKlassObj; 120 static Klass* _longArrayKlassObj; 121 static Klass* _singleArrayKlassObj; 122 static Klass* _doubleArrayKlassObj; 123 static Klass* _typeArrayKlassObjs[T_VOID+1]; 124 125 static Klass* _objectArrayKlassObj; 126 127 // Known objects in the VM 128 129 // Primitive objects 130 static oop _int_mirror; 131 static oop _float_mirror; 132 static oop _double_mirror; 133 static oop _byte_mirror; 134 static oop _bool_mirror; 135 static oop _char_mirror; 136 static oop _long_mirror; 137 static oop _short_mirror; 138 static oop _void_mirror; 139 140 static oop _main_thread_group; // Reference to the main thread group object 141 static oop _system_thread_group; // Reference to the system thread group object 142 143 static objArrayOop _the_empty_class_klass_array; // Canonicalized obj array of type java.lang.Class 144 static oop _the_null_sentinel; // A unique object pointer unused except as a sentinel for null. 145 static oop _the_null_string; // A cache of "null" as a Java string 146 static oop _the_min_jint_string; // A cache of "-2147483648" as a Java string 147 static LatestMethodCache* _finalizer_register_cache; // static method for registering finalizable objects 148 static LatestMethodCache* _loader_addClass_cache; // method for registering loaded classes in class loader vector 149 static LatestMethodCache* _pd_implies_cache; // method for checking protection domain attributes 150 static LatestMethodCache* _throw_illegal_access_error_cache; // Unsafe.throwIllegalAccessError() method 151 static LatestMethodCache* _do_stack_walk_cache; // method for stack walker callback 152 153 // preallocated error objects (no backtrace) 154 static oop _out_of_memory_error_java_heap; 155 static oop _out_of_memory_error_metaspace; 156 static oop _out_of_memory_error_class_metaspace; 157 static oop _out_of_memory_error_array_size; 158 static oop _out_of_memory_error_gc_overhead_limit; 159 static oop _out_of_memory_error_realloc_objects; 160 161 // preallocated cause message for delayed StackOverflowError 162 static oop _delayed_stack_overflow_error_message; 163 164 static Array<int>* _the_empty_int_array; // Canonicalized int array 165 static Array<u2>* _the_empty_short_array; // Canonicalized short array 166 static Array<Klass*>* _the_empty_klass_array; // Canonicalized klass obj array 167 static Array<Method*>* _the_empty_method_array; // Canonicalized method obj array 168 169 static Array<Klass*>* _the_array_interfaces_array; 170 171 // array of preallocated error objects with backtrace 172 static objArrayOop _preallocated_out_of_memory_error_array; 173 174 // number of preallocated error objects available for use 175 static volatile jint _preallocated_out_of_memory_error_avail_count; 176 177 static oop _null_ptr_exception_instance; // preallocated exception object 178 static oop _arithmetic_exception_instance; // preallocated exception object 179 static oop _virtual_machine_error_instance; // preallocated exception object 180 // The object used as an exception dummy when exceptions are thrown for 181 // the vm thread. 182 static oop _vm_exception; 183 184 // References waiting to be transferred to the ReferenceHandler 185 static oop _reference_pending_list; 186 187 // The particular choice of collected heap. 188 static CollectedHeap* _collectedHeap; 189 190 static intptr_t _non_oop_bits; 191 192 // For UseCompressedOops. 193 static struct NarrowPtrStruct _narrow_oop; 194 // For UseCompressedClassPointers. 195 static struct NarrowPtrStruct _narrow_klass; 196 static address _narrow_ptrs_base; 197 198 // array of dummy objects used with +FullGCAlot 199 debug_only(static objArrayOop _fullgc_alot_dummy_array;) 200 // index of next entry to clear 201 debug_only(static int _fullgc_alot_dummy_next;) 202 203 // Compiler/dispatch support 204 static int _base_vtable_size; // Java vtbl size of klass Object (in words) 205 206 // Initialization 207 static bool _bootstrapping; // true during genesis 208 static bool _module_initialized; // true after call_initPhase2 called 209 static bool _fully_initialized; // true after universe_init and initialize_vtables called 210 211 // the array of preallocated errors with backtraces 212 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; } 213 214 // generate an out of memory error; if possible using an error with preallocated backtrace; 215 // otherwise return the given default error. 216 static oop gen_out_of_memory_error(oop default_err); 217 218 // Historic gc information 219 static size_t _heap_capacity_at_last_gc; 220 static size_t _heap_used_at_last_gc; 221 222 static CollectedHeap* create_heap(); 223 static jint initialize_heap(); 224 static void initialize_basic_type_mirrors(TRAPS); 225 static void fixup_mirrors(TRAPS); 226 227 static void reinitialize_vtable_of(Klass* k, TRAPS); 228 static void reinitialize_itables(TRAPS); 229 static void compute_base_vtable_size(); // compute vtable size of class Object 230 231 static void genesis(TRAPS); // Create the initial world 232 233 // Mirrors for primitive classes (created eagerly) 234 static oop check_mirror(oop m) { 235 assert(m != NULL, "mirror not initialized"); 236 return m; 237 } 238 239 static void set_narrow_oop_base(address base) { 240 assert(UseCompressedOops, "no compressed oops?"); 241 _narrow_oop._base = base; 242 } 243 static void set_narrow_klass_base(address base) { 244 assert(UseCompressedClassPointers, "no compressed klass ptrs?"); 245 _narrow_klass._base = base; 246 } 247 static void set_narrow_oop_use_implicit_null_checks(bool use) { 248 assert(UseCompressedOops, "no compressed ptrs?"); 249 _narrow_oop._use_implicit_null_checks = use; 250 } 251 252 // Debugging 253 static int _verify_count; // number of verifies done 254 255 // True during call to verify(). Should only be set/cleared in verify(). 256 static bool _verify_in_progress; 257 static long verify_flags; 258 259 static uintptr_t _verify_oop_mask; 260 static uintptr_t _verify_oop_bits; 261 262 static void calculate_verify_data(HeapWord* low_boundary, HeapWord* high_boundary) PRODUCT_RETURN; 263 static void compute_verify_oop_data(); 264 265 public: 266 // Known classes in the VM 267 static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; } 268 static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; } 269 static Klass* charArrayKlassObj() { return _charArrayKlassObj; } 270 static Klass* intArrayKlassObj() { return _intArrayKlassObj; } 271 static Klass* shortArrayKlassObj() { return _shortArrayKlassObj; } 272 static Klass* longArrayKlassObj() { return _longArrayKlassObj; } 273 static Klass* singleArrayKlassObj() { return _singleArrayKlassObj; } 274 static Klass* doubleArrayKlassObj() { return _doubleArrayKlassObj; } 275 276 static Klass* objectArrayKlassObj() { 277 return _objectArrayKlassObj; 278 } 279 280 static Klass* typeArrayKlassObj(BasicType t) { 281 assert((uint)t < T_VOID+1, "range check for type: %s", type2name(t)); 282 assert(_typeArrayKlassObjs[t] != NULL, "domain check"); 283 return _typeArrayKlassObjs[t]; 284 } 285 286 // Known objects in the VM 287 static oop int_mirror() { return check_mirror(_int_mirror); } 288 static oop float_mirror() { return check_mirror(_float_mirror); } 289 static oop double_mirror() { return check_mirror(_double_mirror); } 290 static oop byte_mirror() { return check_mirror(_byte_mirror); } 291 static oop bool_mirror() { return check_mirror(_bool_mirror); } 292 static oop char_mirror() { return check_mirror(_char_mirror); } 293 static oop long_mirror() { return check_mirror(_long_mirror); } 294 static oop short_mirror() { return check_mirror(_short_mirror); } 295 static oop void_mirror() { return check_mirror(_void_mirror); } 296 297 static void set_int_mirror(oop m) { _int_mirror = m; } 298 static void set_float_mirror(oop m) { _float_mirror = m; } 299 static void set_double_mirror(oop m) { _double_mirror = m; } 300 static void set_byte_mirror(oop m) { _byte_mirror = m; } 301 static void set_bool_mirror(oop m) { _bool_mirror = m; } 302 static void set_char_mirror(oop m) { _char_mirror = m; } 303 static void set_long_mirror(oop m) { _long_mirror = m; } 304 static void set_short_mirror(oop m) { _short_mirror = m; } 305 static void set_void_mirror(oop m) { _void_mirror = m; } 306 307 // table of same 308 static oop _mirrors[T_VOID+1]; 309 310 static oop java_mirror(BasicType t) { 311 assert((uint)t < T_VOID+1, "range check"); 312 return check_mirror(_mirrors[t]); 313 } 314 static oop main_thread_group() { return _main_thread_group; } 315 static void set_main_thread_group(oop group) { _main_thread_group = group;} 316 317 static oop system_thread_group() { return _system_thread_group; } 318 static void set_system_thread_group(oop group) { _system_thread_group = group;} 319 320 static objArrayOop the_empty_class_klass_array () { return _the_empty_class_klass_array; } 321 static Array<Klass*>* the_array_interfaces_array() { return _the_array_interfaces_array; } 322 static oop the_null_string() { return _the_null_string; } 323 static oop the_min_jint_string() { return _the_min_jint_string; } 324 325 static Method* finalizer_register_method() { return _finalizer_register_cache->get_method(); } 326 static Method* loader_addClass_method() { return _loader_addClass_cache->get_method(); } 327 328 static Method* protection_domain_implies_method() { return _pd_implies_cache->get_method(); } 329 static Method* throw_illegal_access_error() { return _throw_illegal_access_error_cache->get_method(); } 330 331 static Method* do_stack_walk_method() { return _do_stack_walk_cache->get_method(); } 332 333 static oop the_null_sentinel() { return _the_null_sentinel; } 334 static address the_null_sentinel_addr() { return (address) &_the_null_sentinel; } 335 336 // Function to initialize these 337 static void initialize_known_methods(TRAPS); 338 339 static oop null_ptr_exception_instance() { return _null_ptr_exception_instance; } 340 static oop arithmetic_exception_instance() { return _arithmetic_exception_instance; } 341 static oop virtual_machine_error_instance() { return _virtual_machine_error_instance; } 342 static oop vm_exception() { return _vm_exception; } 343 344 // Reference pending list manipulation. Access is protected by 345 // Heap_lock. The getter, setter and predicate require the caller 346 // owns the lock. Swap is used by parallel non-concurrent reference 347 // processing threads, where some higher level controller owns 348 // Heap_lock, so requires the lock is locked, but not necessarily by 349 // the current thread. 350 static oop reference_pending_list(); 351 static void set_reference_pending_list(oop list); 352 static bool has_reference_pending_list(); 353 static oop swap_reference_pending_list(oop list); 354 355 static Array<int>* the_empty_int_array() { return _the_empty_int_array; } 356 static Array<u2>* the_empty_short_array() { return _the_empty_short_array; } 357 static Array<Method*>* the_empty_method_array() { return _the_empty_method_array; } 358 static Array<Klass*>* the_empty_klass_array() { return _the_empty_klass_array; } 359 360 // OutOfMemoryError support. Returns an error with the required message. The returned error 361 // may or may not have a backtrace. If error has a backtrace then the stack trace is already 362 // filled in. 363 static oop out_of_memory_error_java_heap() { return gen_out_of_memory_error(_out_of_memory_error_java_heap); } 364 static oop out_of_memory_error_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_metaspace); } 365 static oop out_of_memory_error_class_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace); } 366 static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); } 367 static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); } 368 static oop out_of_memory_error_realloc_objects() { return gen_out_of_memory_error(_out_of_memory_error_realloc_objects); } 369 static oop delayed_stack_overflow_error_message() { return _delayed_stack_overflow_error_message; } 370 371 // Accessors needed for fast allocation 372 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; } 373 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; } 374 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; } 375 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; } 376 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; } 377 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; } 378 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; } 379 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; } 380 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; } 381 382 // The particular choice of collected heap. 383 static CollectedHeap* heap() { return _collectedHeap; } 384 385 // For UseCompressedOops 386 // Narrow Oop encoding mode: 387 // 0 - Use 32-bits oops without encoding when 388 // NarrowOopHeapBaseMin + heap_size < 4Gb 389 // 1 - Use zero based compressed oops with encoding when 390 // NarrowOopHeapBaseMin + heap_size < 32Gb 391 // 2 - Use compressed oops with disjoint heap base if 392 // base is 32G-aligned and base > 0. This allows certain 393 // optimizations in encoding/decoding. 394 // Disjoint: Bits used in base are disjoint from bits used 395 // for oops ==> oop = (cOop << 3) | base. One can disjoint 396 // the bits of an oop into base and compressed oop. 397 // 3 - Use compressed oops with heap base + encoding. 398 enum NARROW_OOP_MODE { 399 UnscaledNarrowOop = 0, 400 ZeroBasedNarrowOop = 1, 401 DisjointBaseNarrowOop = 2, 402 HeapBasedNarrowOop = 3, 403 AnyNarrowOopMode = 4 404 }; 405 static NARROW_OOP_MODE narrow_oop_mode(); 406 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode); 407 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode); 408 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode); 409 static address narrow_oop_base() { return _narrow_oop._base; } 410 // Test whether bits of addr and possible offsets into the heap overlap. 411 static bool is_disjoint_heap_base_address(address addr) { 412 return (((uint64_t)(intptr_t)addr) & 413 (((uint64_t)UCONST64(0xFFFFffffFFFFffff)) >> (32-LogMinObjAlignmentInBytes))) == 0; 414 } 415 // Check for disjoint base compressed oops. 416 static bool narrow_oop_base_disjoint() { 417 return _narrow_oop._base != NULL && is_disjoint_heap_base_address(_narrow_oop._base); 418 } 419 // Check for real heapbased compressed oops. 420 // We must subtract the base as the bits overlap. 421 // If we negate above function, we also get unscaled and zerobased. 422 static bool narrow_oop_base_overlaps() { 423 return _narrow_oop._base != NULL && !is_disjoint_heap_base_address(_narrow_oop._base); 424 } 425 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); } 426 static int narrow_oop_shift() { return _narrow_oop._shift; } 427 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; } 428 429 // For UseCompressedClassPointers 430 static address narrow_klass_base() { return _narrow_klass._base; } 431 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); } 432 static int narrow_klass_shift() { return _narrow_klass._shift; } 433 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; } 434 435 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; } 436 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; } 437 static address narrow_ptrs_base() { return _narrow_ptrs_base; } 438 439 static void print_compressed_oops_mode(outputStream* st); 440 441 // this is set in vm_version on sparc (and then reset in universe afaict) 442 static void set_narrow_oop_shift(int shift) { 443 _narrow_oop._shift = shift; 444 } 445 446 static void set_narrow_klass_shift(int shift) { 447 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs"); 448 _narrow_klass._shift = shift; 449 } 450 451 // Reserve Java heap and determine CompressedOops mode 452 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment); 453 454 // Historic gc information 455 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; } 456 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; } 457 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; } 458 static void update_heap_info_at_gc(); 459 460 // Testers 461 static bool is_bootstrapping() { return _bootstrapping; } 462 static bool is_module_initialized() { return _module_initialized; } 463 static bool is_fully_initialized() { return _fully_initialized; } 464 465 static inline bool element_type_should_be_aligned(BasicType type); 466 static inline bool field_type_should_be_aligned(BasicType type); 467 static bool on_page_boundary(void* addr); 468 static bool should_fill_in_stack_trace(Handle throwable); 469 static void check_alignment(uintx size, uintx alignment, const char* name); 470 471 // Iteration 472 473 // Apply "f" to the addresses of all the direct heap pointers maintained 474 // as static fields of "Universe". 475 static void oops_do(OopClosure* f, bool do_all = false); 476 477 // CDS support 478 static void serialize(SerializeClosure* f, bool do_all = false); 479 480 // Apply "f" to all klasses for basic types (classes not present in 481 // SystemDictionary). 482 static void basic_type_classes_do(void f(Klass*)); 483 static void metaspace_pointers_do(MetaspaceClosure* it); 484 485 // Debugging 486 enum VERIFY_FLAGS { 487 Verify_Threads = 1, 488 Verify_Heap = 2, 489 Verify_SymbolTable = 4, 490 Verify_StringTable = 8, 491 Verify_CodeCache = 16, 492 Verify_SystemDictionary = 32, 493 Verify_ClassLoaderDataGraph = 64, 494 Verify_MetaspaceAux = 128, 495 Verify_JNIHandles = 256, 496 Verify_CodeCacheOops = 512, 497 Verify_All = -1 498 }; 499 static void initialize_verify_flags(); 500 static bool should_verify_subset(uint subset); 501 static bool verify_in_progress() { return _verify_in_progress; } 502 static void verify(VerifyOption option, const char* prefix); 503 static void verify(const char* prefix) { 504 verify(VerifyOption_Default, prefix); 505 } 506 static void verify() { 507 verify(""); 508 } 509 510 static int verify_count() { return _verify_count; } 511 static void print_on(outputStream* st); 512 static void print_heap_at_SIGBREAK(); 513 static void print_heap_before_gc(); 514 static void print_heap_after_gc(); 515 516 // Change the number of dummy objects kept reachable by the full gc dummy 517 // array; this should trigger relocation in a sliding compaction collector. 518 debug_only(static bool release_fullgc_alot_dummy();) 519 // The non-oop pattern (see compiledIC.hpp, etc) 520 static void* non_oop_word(); 521 522 // Oop verification (see MacroAssembler::verify_oop) 523 static uintptr_t verify_oop_mask() PRODUCT_RETURN0; 524 static uintptr_t verify_oop_bits() PRODUCT_RETURN0; 525 static uintptr_t verify_mark_bits() PRODUCT_RETURN0; 526 static uintptr_t verify_mark_mask() PRODUCT_RETURN0; 527 528 // Compiler support 529 static int base_vtable_size() { return _base_vtable_size; } 530 }; 531 532 class DeferredObjAllocEvent : public CHeapObj<mtInternal> { 533 private: 534 oop _oop; 535 size_t _bytesize; 536 jint _arena_id; 537 538 public: 539 DeferredObjAllocEvent(const oop o, const size_t s, const jint id) { 540 _oop = o; 541 _bytesize = s; 542 _arena_id = id; 543 } 544 545 ~DeferredObjAllocEvent() { 546 } 547 548 jint arena_id() { return _arena_id; } 549 size_t bytesize() { return _bytesize; } 550 oop get_oop() { return _oop; } 551 }; 552 553 #endif // SHARE_VM_MEMORY_UNIVERSE_HPP