1 /*
2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP
26 #define SHARE_VM_MEMORY_UNIVERSE_HPP
27
28 #include "runtime/handles.hpp"
29 #include "utilities/array.hpp"
30 #include "utilities/growableArray.hpp"
31
32 // Universe is a name space holding known system classes and objects in the VM.
33 //
34 // Loaded classes are accessible through the SystemDictionary.
35 //
36 // The object heap is allocated and accessed through Universe, and various allocation
37 // support is provided. Allocation by the interpreter and compiled code is done inline
38 // and bails out to Scavenge::invoke_and_allocate.
39
40 class CollectedHeap;
41 class DeferredObjAllocEvent;
42
43
44 // A helper class for caching a Method* when the user of the cache
45 // only cares about the latest version of the Method*. This cache safely
46 // interacts with the RedefineClasses API.
47
48 class LatestMethodCache : public CHeapObj<mtClass> {
49 // We save the Klass* and the idnum of Method* in order to get
50 // the current cached Method*.
51 private:
52 Klass* _klass;
53 int _method_idnum;
54
55 public:
56 LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
57 ~LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
58
59 void init(Klass* k, Method* m);
60 Klass* klass() const { return _klass; }
61 int method_idnum() const { return _method_idnum; }
62
63 Method* get_method();
64
65 // Enhanced Class Redefinition support
66 void classes_do(void f(Klass*)) {
67 f(_klass);
68 }
69
70 // CDS support. Replace the klass in this with the archive version
71 // could use this for Enhanced Class Redefinition also.
72 void serialize(SerializeClosure* f) {
73 f->do_ptr((void**)&_klass);
74 }
75 };
76
77
78 // For UseCompressedOops.
79 struct NarrowPtrStruct {
80 // Base address for oop-within-java-object materialization.
81 // NULL if using wide oops or zero based narrow oops.
82 address _base;
83 // Number of shift bits for encoding/decoding narrow ptrs.
84 // 0 if using wide ptrs or zero based unscaled narrow ptrs,
85 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
86 int _shift;
87 // Generate code with implicit null checks for narrow ptrs.
88 bool _use_implicit_null_checks;
89 };
90
91 enum VerifyOption {
92 VerifyOption_Default = 0,
93
94 // G1
95 VerifyOption_G1UsePrevMarking = VerifyOption_Default,
96 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
97 VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1
98 };
99
100 class Universe: AllStatic {
101 // Ugh. Universe is much too friendly.
102 friend class MarkSweep;
103 friend class oopDesc;
104 friend class ClassLoader;
105 friend class SystemDictionary;
106 friend class ReservedHeapSpace;
107 friend class VMStructs;
108 friend class VM_PopulateDumpSharedSpace;
109 friend class Metaspace;
110
111 friend jint universe_init();
112 friend void universe2_init();
113 friend bool universe_post_init();
114 friend void universe_post_module_init();
115
116 private:
117 // Known classes in the VM
118 static Klass* _boolArrayKlassObj;
119 static Klass* _byteArrayKlassObj;
120 static Klass* _charArrayKlassObj;
121 static Klass* _intArrayKlassObj;
122 static Klass* _shortArrayKlassObj;
123 static Klass* _longArrayKlassObj;
124 static Klass* _singleArrayKlassObj;
125 static Klass* _doubleArrayKlassObj;
126 static Klass* _typeArrayKlassObjs[T_VOID+1];
127
128 static Klass* _objectArrayKlassObj;
129
130 // Known objects in the VM
131
132 // Primitive objects
133 static oop _int_mirror;
134 static oop _float_mirror;
135 static oop _double_mirror;
136 static oop _byte_mirror;
137 static oop _bool_mirror;
138 static oop _char_mirror;
139 static oop _long_mirror;
140 static oop _short_mirror;
141 static oop _void_mirror;
142
143 static oop _main_thread_group; // Reference to the main thread group object
144 static oop _system_thread_group; // Reference to the system thread group object
145
146 static objArrayOop _the_empty_class_klass_array; // Canonicalized obj array of type java.lang.Class
147 static oop _the_null_string; // A cache of "null" as a Java string
148 static oop _the_min_jint_string; // A cache of "-2147483648" as a Java string
149 static LatestMethodCache* _finalizer_register_cache; // static method for registering finalizable objects
150 static LatestMethodCache* _loader_addClass_cache; // method for registering loaded classes in class loader vector
151 static LatestMethodCache* _pd_implies_cache; // method for checking protection domain attributes
152 static LatestMethodCache* _throw_illegal_access_error_cache; // Unsafe.throwIllegalAccessError() method
153 static LatestMethodCache* _do_stack_walk_cache; // method for stack walker callback
154
155 // preallocated error objects (no backtrace)
156 static oop _out_of_memory_error_java_heap;
157 static oop _out_of_memory_error_metaspace;
158 static oop _out_of_memory_error_class_metaspace;
159 static oop _out_of_memory_error_array_size;
160 static oop _out_of_memory_error_gc_overhead_limit;
161 static oop _out_of_memory_error_realloc_objects;
162
163 // preallocated cause message for delayed StackOverflowError
164 static oop _delayed_stack_overflow_error_message;
165
166 static Array<int>* _the_empty_int_array; // Canonicalized int array
167 static Array<u2>* _the_empty_short_array; // Canonicalized short array
168 static Array<Klass*>* _the_empty_klass_array; // Canonicalized klass obj array
169 static Array<Method*>* _the_empty_method_array; // Canonicalized method obj array
170
171 static Array<Klass*>* _the_array_interfaces_array;
172
173 // array of preallocated error objects with backtrace
174 static objArrayOop _preallocated_out_of_memory_error_array;
175
176 // number of preallocated error objects available for use
177 static volatile jint _preallocated_out_of_memory_error_avail_count;
178
179 static oop _null_ptr_exception_instance; // preallocated exception object
180 static oop _arithmetic_exception_instance; // preallocated exception object
181 static oop _virtual_machine_error_instance; // preallocated exception object
182 // The object used as an exception dummy when exceptions are thrown for
183 // the vm thread.
184 static oop _vm_exception;
185
186 static oop _allocation_context_notification_obj;
187
188 // References waiting to be transferred to the ReferenceHandler
189 static oop _reference_pending_list;
190
191 // The particular choice of collected heap.
192 static CollectedHeap* _collectedHeap;
193
194 static intptr_t _non_oop_bits;
195
196 // For UseCompressedOops.
197 static struct NarrowPtrStruct _narrow_oop;
198 // For UseCompressedClassPointers.
199 static struct NarrowPtrStruct _narrow_klass;
200 static address _narrow_ptrs_base;
201
202 // array of dummy objects used with +FullGCAlot
203 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
204 // index of next entry to clear
205 debug_only(static int _fullgc_alot_dummy_next;)
206
207 // Compiler/dispatch support
208 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
209
210 // Initialization
211 static bool _bootstrapping; // true during genesis
212 static bool _module_initialized; // true after call_initPhase2 called
213 static bool _fully_initialized; // true after universe_init and initialize_vtables called
214
215 // the array of preallocated errors with backtraces
216 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
217
218 // generate an out of memory error; if possible using an error with preallocated backtrace;
219 // otherwise return the given default error.
220 static oop gen_out_of_memory_error(oop default_err);
221
222 // Historic gc information
223 static size_t _heap_capacity_at_last_gc;
224 static size_t _heap_used_at_last_gc;
225
226 template <class Heap, class Policy> static CollectedHeap* create_heap_with_policy();
227 static CollectedHeap* create_heap();
228 static CollectedHeap* create_heap_ext();
229 static jint initialize_heap();
230 static void initialize_basic_type_mirrors(TRAPS);
231 static void fixup_mirrors(TRAPS);
232
233 static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
234 static void reinitialize_itables(TRAPS);
235 static void compute_base_vtable_size(); // compute vtable size of class Object
236
237 static void genesis(TRAPS); // Create the initial world
238
239 // Mirrors for primitive classes (created eagerly)
240 static oop check_mirror(oop m) {
241 assert(m != NULL, "mirror not initialized");
242 return m;
243 }
244
245 static void set_narrow_oop_base(address base) {
246 assert(UseCompressedOops, "no compressed oops?");
247 _narrow_oop._base = base;
248 }
249 static void set_narrow_klass_base(address base) {
250 assert(UseCompressedClassPointers, "no compressed klass ptrs?");
251 _narrow_klass._base = base;
252 }
253 static void set_narrow_oop_use_implicit_null_checks(bool use) {
254 assert(UseCompressedOops, "no compressed ptrs?");
255 _narrow_oop._use_implicit_null_checks = use;
256 }
257
258 // Debugging
259 static int _verify_count; // number of verifies done
260
261 // True during call to verify(). Should only be set/cleared in verify().
262 static bool _verify_in_progress;
263 static long verify_flags;
264
265 static uintptr_t _verify_oop_mask;
266 static uintptr_t _verify_oop_bits;
267
268 static void calculate_verify_data(HeapWord* low_boundary, HeapWord* high_boundary) PRODUCT_RETURN;
269 static void compute_verify_oop_data();
270
271 public:
272 // Known classes in the VM
273 static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; }
274 static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; }
275 static Klass* charArrayKlassObj() { return _charArrayKlassObj; }
276 static Klass* intArrayKlassObj() { return _intArrayKlassObj; }
277 static Klass* shortArrayKlassObj() { return _shortArrayKlassObj; }
278 static Klass* longArrayKlassObj() { return _longArrayKlassObj; }
279 static Klass* singleArrayKlassObj() { return _singleArrayKlassObj; }
280 static Klass* doubleArrayKlassObj() { return _doubleArrayKlassObj; }
281
282 static Klass* objectArrayKlassObj() {
283 return _objectArrayKlassObj;
284 }
285
286 static Klass* typeArrayKlassObj(BasicType t) {
287 assert((uint)t < T_VOID+1, "range check for type: %s", type2name(t));
288 assert(_typeArrayKlassObjs[t] != NULL, "domain check");
289 return _typeArrayKlassObjs[t];
290 }
291
292 // Known objects in the VM
293 static oop int_mirror() { return check_mirror(_int_mirror); }
294 static oop float_mirror() { return check_mirror(_float_mirror); }
295 static oop double_mirror() { return check_mirror(_double_mirror); }
296 static oop byte_mirror() { return check_mirror(_byte_mirror); }
297 static oop bool_mirror() { return check_mirror(_bool_mirror); }
298 static oop char_mirror() { return check_mirror(_char_mirror); }
299 static oop long_mirror() { return check_mirror(_long_mirror); }
300 static oop short_mirror() { return check_mirror(_short_mirror); }
301 static oop void_mirror() { return check_mirror(_void_mirror); }
302
303 // table of same
304 static oop _mirrors[T_VOID+1];
305
306 static oop java_mirror(BasicType t) {
307 assert((uint)t < T_VOID+1, "range check");
308 return check_mirror(_mirrors[t]);
309 }
310 static oop main_thread_group() { return _main_thread_group; }
311 static void set_main_thread_group(oop group) { _main_thread_group = group;}
312
313 static oop system_thread_group() { return _system_thread_group; }
314 static void set_system_thread_group(oop group) { _system_thread_group = group;}
315
316 static objArrayOop the_empty_class_klass_array () { return _the_empty_class_klass_array; }
317 static Array<Klass*>* the_array_interfaces_array() { return _the_array_interfaces_array; }
318 static oop the_null_string() { return _the_null_string; }
319 static oop the_min_jint_string() { return _the_min_jint_string; }
320
321 static Method* finalizer_register_method() { return _finalizer_register_cache->get_method(); }
322 static Method* loader_addClass_method() { return _loader_addClass_cache->get_method(); }
323
324 static Method* protection_domain_implies_method() { return _pd_implies_cache->get_method(); }
325 static Method* throw_illegal_access_error() { return _throw_illegal_access_error_cache->get_method(); }
326
327 static Method* do_stack_walk_method() { return _do_stack_walk_cache->get_method(); }
328
329 // Function to initialize these
330 static void initialize_known_methods(TRAPS);
331
332 static oop null_ptr_exception_instance() { return _null_ptr_exception_instance; }
333 static oop arithmetic_exception_instance() { return _arithmetic_exception_instance; }
334 static oop virtual_machine_error_instance() { return _virtual_machine_error_instance; }
335 static oop vm_exception() { return _vm_exception; }
336
337 static inline oop allocation_context_notification_obj();
338 static inline void set_allocation_context_notification_obj(oop obj);
339
340 // Reference pending list manipulation. Access is protected by
341 // Heap_lock. The getter, setter and predicate require the caller
342 // owns the lock. Swap is used by parallel non-concurrent reference
343 // processing threads, where some higher level controller owns
344 // Heap_lock, so requires the lock is locked, but not necessarily by
345 // the current thread.
346 static oop reference_pending_list();
347 static void set_reference_pending_list(oop list);
348 static bool has_reference_pending_list();
349 static oop swap_reference_pending_list(oop list);
350
351 static Array<int>* the_empty_int_array() { return _the_empty_int_array; }
352 static Array<u2>* the_empty_short_array() { return _the_empty_short_array; }
353 static Array<Method*>* the_empty_method_array() { return _the_empty_method_array; }
354 static Array<Klass*>* the_empty_klass_array() { return _the_empty_klass_array; }
355
356 // OutOfMemoryError support. Returns an error with the required message. The returned error
357 // may or may not have a backtrace. If error has a backtrace then the stack trace is already
358 // filled in.
359 static oop out_of_memory_error_java_heap() { return gen_out_of_memory_error(_out_of_memory_error_java_heap); }
360 static oop out_of_memory_error_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_metaspace); }
361 static oop out_of_memory_error_class_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace); }
362 static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
363 static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); }
364 static oop out_of_memory_error_realloc_objects() { return gen_out_of_memory_error(_out_of_memory_error_realloc_objects); }
365 static oop delayed_stack_overflow_error_message() { return _delayed_stack_overflow_error_message; }
366
367 // Accessors needed for fast allocation
368 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
369 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
370 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
371 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
372 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
373 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
374 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
375 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
376 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
377
378 // The particular choice of collected heap.
379 static CollectedHeap* heap() { return _collectedHeap; }
380
381 // For UseCompressedOops
382 // Narrow Oop encoding mode:
383 // 0 - Use 32-bits oops without encoding when
384 // NarrowOopHeapBaseMin + heap_size < 4Gb
385 // 1 - Use zero based compressed oops with encoding when
386 // NarrowOopHeapBaseMin + heap_size < 32Gb
387 // 2 - Use compressed oops with disjoint heap base if
388 // base is 32G-aligned and base > 0. This allows certain
389 // optimizations in encoding/decoding.
390 // Disjoint: Bits used in base are disjoint from bits used
391 // for oops ==> oop = (cOop << 3) | base. One can disjoint
392 // the bits of an oop into base and compressed oop.
393 // 3 - Use compressed oops with heap base + encoding.
394 enum NARROW_OOP_MODE {
395 UnscaledNarrowOop = 0,
396 ZeroBasedNarrowOop = 1,
397 DisjointBaseNarrowOop = 2,
398 HeapBasedNarrowOop = 3,
399 AnyNarrowOopMode = 4
400 };
401 static NARROW_OOP_MODE narrow_oop_mode();
402 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
403 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
404 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
405 static address narrow_oop_base() { return _narrow_oop._base; }
406 // Test whether bits of addr and possible offsets into the heap overlap.
407 static bool is_disjoint_heap_base_address(address addr) {
408 return (((uint64_t)(intptr_t)addr) &
409 (((uint64_t)UCONST64(0xFFFFffffFFFFffff)) >> (32-LogMinObjAlignmentInBytes))) == 0;
410 }
411 // Check for disjoint base compressed oops.
412 static bool narrow_oop_base_disjoint() {
413 return _narrow_oop._base != NULL && is_disjoint_heap_base_address(_narrow_oop._base);
414 }
415 // Check for real heapbased compressed oops.
416 // We must subtract the base as the bits overlap.
417 // If we negate above function, we also get unscaled and zerobased.
418 static bool narrow_oop_base_overlaps() {
419 return _narrow_oop._base != NULL && !is_disjoint_heap_base_address(_narrow_oop._base);
420 }
421 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
422 static int narrow_oop_shift() { return _narrow_oop._shift; }
423 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
424
425 // For UseCompressedClassPointers
426 static address narrow_klass_base() { return _narrow_klass._base; }
427 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
428 static int narrow_klass_shift() { return _narrow_klass._shift; }
429 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
430
431 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
432 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
433 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
434
435 static void print_compressed_oops_mode(outputStream* st);
436
437 // this is set in vm_version on sparc (and then reset in universe afaict)
438 static void set_narrow_oop_shift(int shift) {
439 _narrow_oop._shift = shift;
440 }
441
442 static void set_narrow_klass_shift(int shift) {
443 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
444 _narrow_klass._shift = shift;
445 }
446
447 // Reserve Java heap and determine CompressedOops mode
448 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
449
450 // Historic gc information
451 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; }
452 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
453 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; }
454 static void update_heap_info_at_gc();
455
456 // Testers
457 static bool is_bootstrapping() { return _bootstrapping; }
458 static bool is_module_initialized() { return _module_initialized; }
459 static bool is_fully_initialized() { return _fully_initialized; }
460
461 static inline bool element_type_should_be_aligned(BasicType type);
462 static inline bool field_type_should_be_aligned(BasicType type);
463 static bool on_page_boundary(void* addr);
464 static bool should_fill_in_stack_trace(Handle throwable);
465 static void check_alignment(uintx size, uintx alignment, const char* name);
466
467 // Finalizer support.
468 static void run_finalizers_on_exit();
469
470 // Iteration
471
472 // Apply "f" to the addresses of all the direct heap pointers maintained
473 // as static fields of "Universe".
474 static void oops_do(OopClosure* f, bool do_all = false);
475
476 // CDS support
477 static void serialize(SerializeClosure* f, bool do_all = false);
478
479 // Apply "f" to all klasses for basic types (classes not present in
480 // SystemDictionary).
481 static void basic_type_classes_do(void f(Klass*));
482
483 // For sharing -- fill in a list of known vtable pointers.
484 static void init_self_patching_vtbl_list(void** list, int count);
485
486 // Debugging
487 enum VERIFY_FLAGS {
488 Verify_Threads = 1,
489 Verify_Heap = 2,
490 Verify_SymbolTable = 4,
491 Verify_StringTable = 8,
492 Verify_CodeCache = 16,
493 Verify_SystemDictionary = 32,
494 Verify_ClassLoaderDataGraph = 64,
495 Verify_MetaspaceAux = 128,
496 Verify_JNIHandles = 256,
497 Verify_CodeCacheOops = 512,
498 Verify_All = -1
499 };
500 static void initialize_verify_flags();
501 static bool should_verify_subset(uint subset);
502 static bool verify_in_progress() { return _verify_in_progress; }
503 static void verify(VerifyOption option, const char* prefix);
504 static void verify(const char* prefix) {
505 verify(VerifyOption_Default, prefix);
506 }
507 static void verify() {
508 verify("");
509 }
510
511 static int verify_count() { return _verify_count; }
512 static void print_on(outputStream* st);
513 static void print_heap_at_SIGBREAK();
514 static void print_heap_before_gc();
515 static void print_heap_after_gc();
516
517 // Change the number of dummy objects kept reachable by the full gc dummy
518 // array; this should trigger relocation in a sliding compaction collector.
519 debug_only(static bool release_fullgc_alot_dummy();)
520 // The non-oop pattern (see compiledIC.hpp, etc)
521 static void* non_oop_word();
522
523 // Oop verification (see MacroAssembler::verify_oop)
524 static uintptr_t verify_oop_mask() PRODUCT_RETURN0;
525 static uintptr_t verify_oop_bits() PRODUCT_RETURN0;
526 static uintptr_t verify_mark_bits() PRODUCT_RETURN0;
527 static uintptr_t verify_mark_mask() PRODUCT_RETURN0;
528
529 // Compiler support
530 static int base_vtable_size() { return _base_vtable_size; }
531 };
532
533 class DeferredObjAllocEvent : public CHeapObj<mtInternal> {
534 private:
535 oop _oop;
536 size_t _bytesize;
537 jint _arena_id;
538
539 public:
540 DeferredObjAllocEvent(const oop o, const size_t s, const jint id) {
541 _oop = o;
542 _bytesize = s;
543 _arena_id = id;
544 }
545
546 ~DeferredObjAllocEvent() {
547 }
548
549 jint arena_id() { return _arena_id; }
550 size_t bytesize() { return _bytesize; }
551 oop get_oop() { return _oop; }
552 };
553
554 #endif // SHARE_VM_MEMORY_UNIVERSE_HPP
--- EOF ---