19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP
25 #define SHARE_VM_MEMORY_UNIVERSE_HPP
26
27 #include "oops/array.hpp"
28 #include "runtime/handles.hpp"
29 #include "utilities/growableArray.hpp"
30
31 // Universe is a name space holding known system classes and objects in the VM.
32 //
33 // Loaded classes are accessible through the SystemDictionary.
34 //
35 // The object heap is allocated and accessed through Universe, and various allocation
36 // support is provided. Allocation by the interpreter and compiled code is done inline
37 // and bails out to Scavenge::invoke_and_allocate.
38
39 class CollectedHeap;
40 class DeferredObjAllocEvent;
41
42
43 // A helper class for caching a Method* when the user of the cache
44 // only cares about the latest version of the Method*. This cache safely
45 // interacts with the RedefineClasses API.
46
47 class LatestMethodCache : public CHeapObj<mtClass> {
48 // We save the Klass* and the idnum of Method* in order to get
49 // the current cached Method*.
50 private:
51 Klass* _klass;
52 int _method_idnum;
53
54 public:
55 LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
56 ~LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
57
|
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP
25 #define SHARE_VM_MEMORY_UNIVERSE_HPP
26
27 #include "oops/array.hpp"
28 #include "runtime/handles.hpp"
29 #include "utilities/growableArray.hpp"
30
31 // Universe is a name space holding known system classes and objects in the VM.
32 //
33 // Loaded classes are accessible through the SystemDictionary.
34 //
35 // The object heap is allocated and accessed through Universe, and various allocation
36 // support is provided. Allocation by the interpreter and compiled code is done inline
37 // and bails out to Scavenge::invoke_and_allocate.
38
39 class BehaviourProviderCollection;
40 class CollectedHeap;
41 class DeferredObjAllocEvent;
42
43
44 // A helper class for caching a Method* when the user of the cache
45 // only cares about the latest version of the Method*. This cache safely
46 // interacts with the RedefineClasses API.
47
48 class LatestMethodCache : public CHeapObj<mtClass> {
49 // We save the Klass* and the idnum of Method* in order to get
50 // the current cached Method*.
51 private:
52 Klass* _klass;
53 int _method_idnum;
54
55 public:
56 LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
57 ~LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
58
|
191 // For UseCompressedOops.
192 static struct NarrowPtrStruct _narrow_oop;
193 // For UseCompressedClassPointers.
194 static struct NarrowPtrStruct _narrow_klass;
195 static address _narrow_ptrs_base;
196 // CompressedClassSpaceSize set to 1GB, but appear 3GB away from _narrow_ptrs_base during CDS dump.
197 static uint64_t _narrow_klass_range;
198 // array of dummy objects used with +FullGCAlot
199 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
200 // index of next entry to clear
201 debug_only(static int _fullgc_alot_dummy_next;)
202
203 // Compiler/dispatch support
204 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
205
206 // Initialization
207 static bool _bootstrapping; // true during genesis
208 static bool _module_initialized; // true after call_initPhase2 called
209 static bool _fully_initialized; // true after universe_init and initialize_vtables called
210
211 // the array of preallocated errors with backtraces
212 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
213
214 // generate an out of memory error; if possible using an error with preallocated backtrace;
215 // otherwise return the given default error.
216 static oop gen_out_of_memory_error(oop default_err);
217
218 // Historic gc information
219 static size_t _heap_capacity_at_last_gc;
220 static size_t _heap_used_at_last_gc;
221
222 static CollectedHeap* create_heap();
223 static jint initialize_heap();
224 static void initialize_basic_type_mirrors(TRAPS);
225 static void fixup_mirrors(TRAPS);
226
227 static void reinitialize_vtable_of(Klass* k, TRAPS);
228 static void reinitialize_itables(TRAPS);
229 static void compute_base_vtable_size(); // compute vtable size of class Object
230
231 static void genesis(TRAPS); // Create the initial world
232
233 // Mirrors for primitive classes (created eagerly)
234 static oop check_mirror(oop m) {
235 assert(m != NULL, "mirror not initialized");
236 return m;
237 }
238
239 static void set_narrow_oop_base(address base) {
240 assert(UseCompressedOops, "no compressed oops?");
|
192 // For UseCompressedOops.
193 static struct NarrowPtrStruct _narrow_oop;
194 // For UseCompressedClassPointers.
195 static struct NarrowPtrStruct _narrow_klass;
196 static address _narrow_ptrs_base;
197 // CompressedClassSpaceSize set to 1GB, but appear 3GB away from _narrow_ptrs_base during CDS dump.
198 static uint64_t _narrow_klass_range;
199 // array of dummy objects used with +FullGCAlot
200 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
201 // index of next entry to clear
202 debug_only(static int _fullgc_alot_dummy_next;)
203
204 // Compiler/dispatch support
205 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
206
207 // Initialization
208 static bool _bootstrapping; // true during genesis
209 static bool _module_initialized; // true after call_initPhase2 called
210 static bool _fully_initialized; // true after universe_init and initialize_vtables called
211
212 // Global behaviours
213 static BehaviourProviderCollection* _vm_behaviours;
214 static BehaviourProviderCollection* _gc_behaviours;
215
216 // the array of preallocated errors with backtraces
217 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
218
219 // generate an out of memory error; if possible using an error with preallocated backtrace;
220 // otherwise return the given default error.
221 static oop gen_out_of_memory_error(oop default_err);
222
223 // Historic gc information
224 static size_t _heap_capacity_at_last_gc;
225 static size_t _heap_used_at_last_gc;
226
227 static void initialize_global_behaviours();
228
229 static CollectedHeap* create_heap();
230 static jint initialize_heap();
231 static void initialize_basic_type_mirrors(TRAPS);
232 static void fixup_mirrors(TRAPS);
233
234 static void reinitialize_vtable_of(Klass* k, TRAPS);
235 static void reinitialize_itables(TRAPS);
236 static void compute_base_vtable_size(); // compute vtable size of class Object
237
238 static void genesis(TRAPS); // Create the initial world
239
240 // Mirrors for primitive classes (created eagerly)
241 static oop check_mirror(oop m) {
242 assert(m != NULL, "mirror not initialized");
243 return m;
244 }
245
246 static void set_narrow_oop_base(address base) {
247 assert(UseCompressedOops, "no compressed oops?");
|
368 static oop out_of_memory_error_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_metaspace); }
369 static oop out_of_memory_error_class_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace); }
370 static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
371 static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); }
372 static oop out_of_memory_error_realloc_objects() { return gen_out_of_memory_error(_out_of_memory_error_realloc_objects); }
373 static oop delayed_stack_overflow_error_message() { return _delayed_stack_overflow_error_message; }
374
375 // Accessors needed for fast allocation
376 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
377 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
378 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
379 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
380 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
381 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
382 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
383 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
384 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
385
386 // The particular choice of collected heap.
387 static CollectedHeap* heap() { return _collectedHeap; }
388
389 // For UseCompressedOops
390 // Narrow Oop encoding mode:
391 // 0 - Use 32-bits oops without encoding when
392 // NarrowOopHeapBaseMin + heap_size < 4Gb
393 // 1 - Use zero based compressed oops with encoding when
394 // NarrowOopHeapBaseMin + heap_size < 32Gb
395 // 2 - Use compressed oops with disjoint heap base if
396 // base is 32G-aligned and base > 0. This allows certain
397 // optimizations in encoding/decoding.
398 // Disjoint: Bits used in base are disjoint from bits used
399 // for oops ==> oop = (cOop << 3) | base. One can disjoint
400 // the bits of an oop into base and compressed oop.
401 // 3 - Use compressed oops with heap base + encoding.
402 enum NARROW_OOP_MODE {
403 UnscaledNarrowOop = 0,
404 ZeroBasedNarrowOop = 1,
405 DisjointBaseNarrowOop = 2,
406 HeapBasedNarrowOop = 3,
|
375 static oop out_of_memory_error_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_metaspace); }
376 static oop out_of_memory_error_class_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace); }
377 static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
378 static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); }
379 static oop out_of_memory_error_realloc_objects() { return gen_out_of_memory_error(_out_of_memory_error_realloc_objects); }
380 static oop delayed_stack_overflow_error_message() { return _delayed_stack_overflow_error_message; }
381
382 // Accessors needed for fast allocation
383 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
384 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
385 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
386 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
387 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
388 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
389 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
390 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
391 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
392
393 // The particular choice of collected heap.
394 static CollectedHeap* heap() { return _collectedHeap; }
395
396 static BehaviourProviderCollection* vm_behaviours() { return _vm_behaviours; }
397 static BehaviourProviderCollection* gc_behaviours() { return _gc_behaviours; }
398
399 // For UseCompressedOops
400 // Narrow Oop encoding mode:
401 // 0 - Use 32-bits oops without encoding when
402 // NarrowOopHeapBaseMin + heap_size < 4Gb
403 // 1 - Use zero based compressed oops with encoding when
404 // NarrowOopHeapBaseMin + heap_size < 32Gb
405 // 2 - Use compressed oops with disjoint heap base if
406 // base is 32G-aligned and base > 0. This allows certain
407 // optimizations in encoding/decoding.
408 // Disjoint: Bits used in base are disjoint from bits used
409 // for oops ==> oop = (cOop << 3) | base. One can disjoint
410 // the bits of an oop into base and compressed oop.
411 // 3 - Use compressed oops with heap base + encoding.
412 enum NARROW_OOP_MODE {
413 UnscaledNarrowOop = 0,
414 ZeroBasedNarrowOop = 1,
415 DisjointBaseNarrowOop = 2,
416 HeapBasedNarrowOop = 3,
|