34 #include "oops/instanceMirrorKlass.hpp"
35 #include "oops/instanceOop.hpp"
36 #include "oops/oop.inline.hpp"
37 #include "oops/symbol.hpp"
38 #include "runtime/handles.inline.hpp"
39 #include "utilities/macros.hpp"
40 #if INCLUDE_ALL_GCS
41 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
42 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
43 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
44 #include "gc_implementation/g1/g1RemSet.inline.hpp"
45 #include "gc_implementation/g1/heapRegionManager.inline.hpp"
46 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
47 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
48 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
49 #include "oops/oop.pcgc.inline.hpp"
50 #endif // INCLUDE_ALL_GCS
51
52 int InstanceMirrorKlass::_offset_of_static_fields = 0;
53
54 #ifdef ASSERT
55 template <class T> void assert_is_in(T *p) {
56 T heap_oop = oopDesc::load_heap_oop(p);
57 if (!oopDesc::is_null(heap_oop)) {
58 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
59 assert(Universe::heap()->is_in(o), "should be in heap");
60 }
61 }
62 template <class T> void assert_is_in_closed_subset(T *p) {
63 T heap_oop = oopDesc::load_heap_oop(p);
64 if (!oopDesc::is_null(heap_oop)) {
65 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
66 assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
67 }
68 }
69 template <class T> void assert_is_in_reserved(T *p) {
70 T heap_oop = oopDesc::load_heap_oop(p);
71 if (!oopDesc::is_null(heap_oop)) {
72 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
73 assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
74 }
75 }
76 template <class T> void assert_nothing(T *p) {}
77
78 #else
79 template <class T> void assert_is_in(T *p) {}
80 template <class T> void assert_is_in_closed_subset(T *p) {}
81 template <class T> void assert_is_in_reserved(T *p) {}
82 template <class T> void assert_nothing(T *p) {}
83 #endif // ASSERT
84
85 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
86 T, start_p, count, do_oop, \
87 assert_fn) \
88 { \
89 T* p = (T*)(start_p); \
90 T* const end = p + (count); \
91 while (p < end) { \
92 (assert_fn)(p); \
93 do_oop; \
94 ++p; \
95 } \
96 }
97
98 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
99 T, start_p, count, low, high, \
100 do_oop, assert_fn) \
101 { \
102 T* const l = (T*)(low); \
103 T* const h = (T*)(high); \
104 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
|
34 #include "oops/instanceMirrorKlass.hpp"
35 #include "oops/instanceOop.hpp"
36 #include "oops/oop.inline.hpp"
37 #include "oops/symbol.hpp"
38 #include "runtime/handles.inline.hpp"
39 #include "utilities/macros.hpp"
40 #if INCLUDE_ALL_GCS
41 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
42 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
43 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
44 #include "gc_implementation/g1/g1RemSet.inline.hpp"
45 #include "gc_implementation/g1/heapRegionManager.inline.hpp"
46 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
47 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
48 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
49 #include "oops/oop.pcgc.inline.hpp"
50 #endif // INCLUDE_ALL_GCS
51
52 int InstanceMirrorKlass::_offset_of_static_fields = 0;
53
54 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
55 T, start_p, count, do_oop, \
56 assert_fn) \
57 { \
58 T* p = (T*)(start_p); \
59 T* const end = p + (count); \
60 while (p < end) { \
61 (assert_fn)(p); \
62 do_oop; \
63 ++p; \
64 } \
65 }
66
67 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
68 T, start_p, count, low, high, \
69 do_oop, assert_fn) \
70 { \
71 T* const l = (T*)(low); \
72 T* const h = (T*)(high); \
73 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
|