23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/verifier.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "compiler/compileBroker.hpp"
31 #include "gc_implementation/shared/markSweep.inline.hpp"
32 #include "gc_interface/collectedHeap.inline.hpp"
33 #include "interpreter/oopMapCache.hpp"
34 #include "interpreter/rewriter.hpp"
35 #include "jvmtifiles/jvmti.h"
36 #include "memory/genOopClosures.inline.hpp"
37 #include "memory/heapInspection.hpp"
38 #include "memory/iterator.inline.hpp"
39 #include "memory/metadataFactory.hpp"
40 #include "memory/oopFactory.hpp"
41 #include "oops/fieldStreams.hpp"
42 #include "oops/instanceClassLoaderKlass.hpp"
43 #include "oops/instanceKlass.hpp"
44 #include "oops/instanceMirrorKlass.hpp"
45 #include "oops/instanceOop.hpp"
46 #include "oops/klass.inline.hpp"
47 #include "oops/method.hpp"
48 #include "oops/oop.inline.hpp"
49 #include "oops/symbol.hpp"
50 #include "prims/jvmtiExport.hpp"
51 #include "prims/jvmtiRedefineClassesTrace.hpp"
52 #include "prims/jvmtiRedefineClasses.hpp"
53 #include "prims/jvmtiThreadState.hpp"
54 #include "prims/methodComparator.hpp"
55 #include "runtime/atomic.inline.hpp"
56 #include "runtime/fieldDescriptor.hpp"
57 #include "runtime/handles.inline.hpp"
58 #include "runtime/javaCalls.hpp"
59 #include "runtime/mutexLocker.hpp"
60 #include "runtime/orderAccess.inline.hpp"
61 #include "runtime/thread.inline.hpp"
62 #include "services/classLoadingService.hpp"
63 #include "services/threadService.hpp"
195 }
196 } else {
197 _method_ordering = Universe::the_empty_int_array();
198 }
199 }
200
201 // create a new array of vtable_indices for default methods
202 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
203 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
204 assert(default_vtable_indices() == NULL, "only create once");
205 set_default_vtable_indices(vtable_indices);
206 return vtable_indices;
207 }
208
209 InstanceKlass::InstanceKlass(int vtable_len,
210 int itable_len,
211 int static_field_size,
212 int nonstatic_oop_map_size,
213 ReferenceType rt,
214 AccessFlags access_flags,
215 bool is_anonymous) {
216 No_Safepoint_Verifier no_safepoint; // until k becomes parsable
217
218 int iksize = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size,
219 access_flags.is_interface(), is_anonymous);
220
221 set_vtable_length(vtable_len);
222 set_itable_length(itable_len);
223 set_static_field_size(static_field_size);
224 set_nonstatic_oop_map_size(nonstatic_oop_map_size);
225 set_access_flags(access_flags);
226 _misc_flags = 0; // initialize to zero
227 set_is_anonymous(is_anonymous);
228 assert(size() == iksize, "wrong size for object");
229
230 set_array_klasses(NULL);
231 set_methods(NULL);
232 set_method_ordering(NULL);
233 set_default_methods(NULL);
234 set_default_vtable_indices(NULL);
235 set_local_interfaces(NULL);
1947
1948 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
1949 nmethodBucket* b = _dependencies;
1950 while (b != NULL) {
1951 if (nm == b->get_nmethod()) {
1952 #ifdef ASSERT
1953 int count = b->count();
1954 assert(count >= 0, err_msg("count shouldn't be negative: %d", count));
1955 #endif
1956 return true;
1957 }
1958 b = b->next();
1959 }
1960 return false;
1961 }
1962 #endif //PRODUCT
1963
1964
1965 // Garbage collection
1966
1967 #ifdef ASSERT
1968 template <class T> void assert_is_in(T *p) {
1969 T heap_oop = oopDesc::load_heap_oop(p);
1970 if (!oopDesc::is_null(heap_oop)) {
1971 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1972 assert(Universe::heap()->is_in(o), "should be in heap");
1973 }
1974 }
1975 template <class T> void assert_is_in_closed_subset(T *p) {
1976 T heap_oop = oopDesc::load_heap_oop(p);
1977 if (!oopDesc::is_null(heap_oop)) {
1978 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1979 assert(Universe::heap()->is_in_closed_subset(o),
1980 err_msg("should be in closed *p " INTPTR_FORMAT " " INTPTR_FORMAT, (address)p, (address)o));
1981 }
1982 }
1983 template <class T> void assert_is_in_reserved(T *p) {
1984 T heap_oop = oopDesc::load_heap_oop(p);
1985 if (!oopDesc::is_null(heap_oop)) {
1986 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1987 assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
1988 }
1989 }
1990 template <class T> void assert_nothing(T *p) {}
1991
1992 #else
1993 template <class T> void assert_is_in(T *p) {}
1994 template <class T> void assert_is_in_closed_subset(T *p) {}
1995 template <class T> void assert_is_in_reserved(T *p) {}
1996 template <class T> void assert_nothing(T *p) {}
1997 #endif // ASSERT
1998
1999 //
2000 // Macros that iterate over areas of oops which are specialized on type of
2001 // oop pointer either narrow or wide, depending on UseCompressedOops
2002 //
2003 // Parameters are:
2004 // T - type of oop to point to (either oop or narrowOop)
2005 // start_p - starting pointer for region to iterate over
2006 // count - number of oops or narrowOops to iterate over
2007 // do_oop - action to perform on each oop (it's arbitrary C code which
2008 // makes it more efficient to put in a macro rather than making
2009 // it a template function)
2010 // assert_fn - assert function which is template function because performance
2011 // doesn't matter when enabled.
2012 #define InstanceKlass_SPECIALIZED_OOP_ITERATE( \
2013 T, start_p, count, do_oop, \
2014 assert_fn) \
2015 { \
2016 T* p = (T*)(start_p); \
2017 T* const end = p + (count); \
2018 while (p < end) { \
2019 (assert_fn)(p); \
2020 do_oop; \
2021 ++p; \
2022 } \
2023 }
2024
2025 #define InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE( \
2026 T, start_p, count, do_oop, \
2027 assert_fn) \
2028 { \
2029 T* const start = (T*)(start_p); \
2030 T* p = start + (count); \
2031 while (start < p) { \
2032 --p; \
2033 (assert_fn)(p); \
2034 do_oop; \
2035 } \
2036 }
2037
2038 #define InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
2039 T, start_p, count, low, high, \
2040 do_oop, assert_fn) \
2041 { \
2042 T* const l = (T*)(low); \
2043 T* const h = (T*)(high); \
2044 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
2045 mask_bits((intptr_t)h, sizeof(T)-1) == 0, \
2046 "bounded region must be properly aligned"); \
2047 T* p = (T*)(start_p); \
2048 T* end = p + (count); \
2049 if (p < l) p = l; \
2050 if (end > h) end = h; \
2051 while (p < end) { \
2052 (assert_fn)(p); \
2053 do_oop; \
2054 ++p; \
2055 } \
2056 }
2057
2058
2059 // The following macros call specialized macros, passing either oop or
2060 // narrowOop as the specialization type. These test the UseCompressedOops
2061 // flag.
2062 #define InstanceKlass_OOP_MAP_ITERATE(obj, do_oop, assert_fn) \
2063 { \
2064 /* Compute oopmap block range. The common case \
2065 is nonstatic_oop_map_size == 1. */ \
2066 OopMapBlock* map = start_of_nonstatic_oop_maps(); \
2067 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); \
2068 if (UseCompressedOops) { \
2069 while (map < end_map) { \
2070 InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
2071 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \
2072 do_oop, assert_fn) \
2073 ++map; \
2074 } \
2075 } else { \
2076 while (map < end_map) { \
2077 InstanceKlass_SPECIALIZED_OOP_ITERATE(oop, \
2078 obj->obj_field_addr<oop>(map->offset()), map->count(), \
2079 do_oop, assert_fn) \
2080 ++map; \
2081 } \
2082 } \
2083 }
2084
2085 #define InstanceKlass_OOP_MAP_REVERSE_ITERATE(obj, do_oop, assert_fn) \
2086 { \
2087 OopMapBlock* const start_map = start_of_nonstatic_oop_maps(); \
2088 OopMapBlock* map = start_map + nonstatic_oop_map_count(); \
2089 if (UseCompressedOops) { \
2090 while (start_map < map) { \
2091 --map; \
2092 InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(narrowOop, \
2093 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \
2094 do_oop, assert_fn) \
2095 } \
2096 } else { \
2097 while (start_map < map) { \
2098 --map; \
2099 InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(oop, \
2100 obj->obj_field_addr<oop>(map->offset()), map->count(), \
2101 do_oop, assert_fn) \
2102 } \
2103 } \
2104 }
2105
2106 #define InstanceKlass_BOUNDED_OOP_MAP_ITERATE(obj, low, high, do_oop, \
2107 assert_fn) \
2108 { \
2109 /* Compute oopmap block range. The common case is \
2110 nonstatic_oop_map_size == 1, so we accept the \
2111 usually non-existent extra overhead of examining \
2112 all the maps. */ \
2113 OopMapBlock* map = start_of_nonstatic_oop_maps(); \
2114 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); \
2115 if (UseCompressedOops) { \
2116 while (map < end_map) { \
2117 InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \
2118 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \
2119 low, high, \
2120 do_oop, assert_fn) \
2121 ++map; \
2122 } \
2123 } else { \
2124 while (map < end_map) { \
2125 InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \
2126 obj->obj_field_addr<oop>(map->offset()), map->count(), \
2127 low, high, \
2128 do_oop, assert_fn) \
2129 ++map; \
2130 } \
2131 } \
2132 }
2133
2134 void InstanceKlass::oop_follow_contents(oop obj) {
2135 assert(obj != NULL, "can't follow the content of NULL object");
2136 MarkSweep::follow_klass(obj->klass());
2137 InstanceKlass_OOP_MAP_ITERATE( \
2138 obj, \
2139 MarkSweep::mark_and_push(p), \
2140 assert_is_in_closed_subset)
2141 }
2142
2143 #if INCLUDE_ALL_GCS
2144 void InstanceKlass::oop_follow_contents(ParCompactionManager* cm,
2145 oop obj) {
2146 assert(obj != NULL, "can't follow the content of NULL object");
2147 PSParallelCompact::follow_klass(cm, obj->klass());
2148 // Only mark the header and let the scan of the meta-data mark
2149 // everything else.
2150 InstanceKlass_OOP_MAP_ITERATE( \
2151 obj, \
2152 PSParallelCompact::mark_and_push(cm, p), \
2153 assert_is_in)
2154 }
2155 #endif // INCLUDE_ALL_GCS
2156
2157 // closure's do_metadata() method dictates whether the given closure should be
2158 // applied to the klass ptr in the object header.
2159
2160 #define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
2161 \
2162 int InstanceKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
2163 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
2164 /* header */ \
2165 if_do_metadata_checked(closure, nv_suffix) { \
2166 closure->do_klass##nv_suffix(obj->klass()); \
2167 } \
2168 InstanceKlass_OOP_MAP_ITERATE( \
2169 obj, \
2170 SpecializationStats:: \
2171 record_do_oop_call##nv_suffix(SpecializationStats::ik); \
2172 (closure)->do_oop##nv_suffix(p), \
2173 assert_is_in_closed_subset) \
2174 return size_helper(); \
2175 }
2176
2177 #if INCLUDE_ALL_GCS
2178 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
2179 \
2180 int InstanceKlass::oop_oop_iterate_backwards##nv_suffix(oop obj, \
2181 OopClosureType* closure) { \
2182 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik); \
2183 \
2184 assert_should_ignore_metadata(closure, nv_suffix); \
2185 \
2186 /* instance variables */ \
2187 InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
2188 obj, \
2189 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::ik);\
2190 (closure)->do_oop##nv_suffix(p), \
2191 assert_is_in_closed_subset) \
2192 return size_helper(); \
2193 }
2194 #endif // INCLUDE_ALL_GCS
2195
2196 #define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
2197 \
2198 int InstanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj, \
2199 OopClosureType* closure, \
2200 MemRegion mr) { \
2201 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
2202 if_do_metadata_checked(closure, nv_suffix) { \
2203 if (mr.contains(obj)) { \
2204 closure->do_klass##nv_suffix(obj->klass()); \
2205 } \
2206 } \
2207 InstanceKlass_BOUNDED_OOP_MAP_ITERATE( \
2208 obj, mr.start(), mr.end(), \
2209 (closure)->do_oop##nv_suffix(p), \
2210 assert_is_in_closed_subset) \
2211 return size_helper(); \
2212 }
2213
2214 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN)
2215 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN)
2216 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
2217 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
2218 #if INCLUDE_ALL_GCS
2219 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
2220 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
2221 #endif // INCLUDE_ALL_GCS
2222
2223 int InstanceKlass::oop_adjust_pointers(oop obj) {
2224 int size = size_helper();
2225 InstanceKlass_OOP_MAP_ITERATE( \
2226 obj, \
2227 MarkSweep::adjust_pointer(p), \
2228 assert_is_in)
2229 return size;
2230 }
2231
2232 #if INCLUDE_ALL_GCS
2233 void InstanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
2234 InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
2235 obj, \
2236 if (PSScavenge::should_scavenge(p)) { \
2237 pm->claim_or_forward_depth(p); \
2238 }, \
2239 assert_nothing )
2240 }
|
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/verifier.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "compiler/compileBroker.hpp"
31 #include "gc_implementation/shared/markSweep.inline.hpp"
32 #include "gc_interface/collectedHeap.inline.hpp"
33 #include "interpreter/oopMapCache.hpp"
34 #include "interpreter/rewriter.hpp"
35 #include "jvmtifiles/jvmti.h"
36 #include "memory/genOopClosures.inline.hpp"
37 #include "memory/heapInspection.hpp"
38 #include "memory/iterator.inline.hpp"
39 #include "memory/metadataFactory.hpp"
40 #include "memory/oopFactory.hpp"
41 #include "oops/fieldStreams.hpp"
42 #include "oops/instanceClassLoaderKlass.hpp"
43 #include "oops/instanceKlass.inline.hpp"
44 #include "oops/instanceMirrorKlass.hpp"
45 #include "oops/instanceOop.hpp"
46 #include "oops/klass.inline.hpp"
47 #include "oops/method.hpp"
48 #include "oops/oop.inline.hpp"
49 #include "oops/symbol.hpp"
50 #include "prims/jvmtiExport.hpp"
51 #include "prims/jvmtiRedefineClassesTrace.hpp"
52 #include "prims/jvmtiRedefineClasses.hpp"
53 #include "prims/jvmtiThreadState.hpp"
54 #include "prims/methodComparator.hpp"
55 #include "runtime/atomic.inline.hpp"
56 #include "runtime/fieldDescriptor.hpp"
57 #include "runtime/handles.inline.hpp"
58 #include "runtime/javaCalls.hpp"
59 #include "runtime/mutexLocker.hpp"
60 #include "runtime/orderAccess.inline.hpp"
61 #include "runtime/thread.inline.hpp"
62 #include "services/classLoadingService.hpp"
63 #include "services/threadService.hpp"
195 }
196 } else {
197 _method_ordering = Universe::the_empty_int_array();
198 }
199 }
200
201 // create a new array of vtable_indices for default methods
202 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
203 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
204 assert(default_vtable_indices() == NULL, "only create once");
205 set_default_vtable_indices(vtable_indices);
206 return vtable_indices;
207 }
208
209 InstanceKlass::InstanceKlass(int vtable_len,
210 int itable_len,
211 int static_field_size,
212 int nonstatic_oop_map_size,
213 ReferenceType rt,
214 AccessFlags access_flags,
215 bool is_anonymous,
216 DispatchTag tag)
217 : Klass(tag) {
218 No_Safepoint_Verifier no_safepoint; // until k becomes parsable
219
220 int iksize = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size,
221 access_flags.is_interface(), is_anonymous);
222
223 set_vtable_length(vtable_len);
224 set_itable_length(itable_len);
225 set_static_field_size(static_field_size);
226 set_nonstatic_oop_map_size(nonstatic_oop_map_size);
227 set_access_flags(access_flags);
228 _misc_flags = 0; // initialize to zero
229 set_is_anonymous(is_anonymous);
230 assert(size() == iksize, "wrong size for object");
231
232 set_array_klasses(NULL);
233 set_methods(NULL);
234 set_method_ordering(NULL);
235 set_default_methods(NULL);
236 set_default_vtable_indices(NULL);
237 set_local_interfaces(NULL);
1949
1950 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
1951 nmethodBucket* b = _dependencies;
1952 while (b != NULL) {
1953 if (nm == b->get_nmethod()) {
1954 #ifdef ASSERT
1955 int count = b->count();
1956 assert(count >= 0, err_msg("count shouldn't be negative: %d", count));
1957 #endif
1958 return true;
1959 }
1960 b = b->next();
1961 }
1962 return false;
1963 }
1964 #endif //PRODUCT
1965
1966
1967 // Garbage collection
1968
1969 void InstanceKlass::oop_follow_contents(oop obj) {
1970 assert(obj != NULL, "can't follow the content of NULL object");
1971 MarkSweep::follow_klass(obj->klass());
1972 InstanceKlass_OOP_MAP_ITERATE( \
1973 obj, \
1974 MarkSweep::mark_and_push(p), \
1975 assert_is_in_closed_subset)
1976 }
1977
1978 #if INCLUDE_ALL_GCS
1979 void InstanceKlass::oop_follow_contents(ParCompactionManager* cm,
1980 oop obj) {
1981 assert(obj != NULL, "can't follow the content of NULL object");
1982 PSParallelCompact::follow_klass(cm, obj->klass());
1983 // Only mark the header and let the scan of the meta-data mark
1984 // everything else.
1985 InstanceKlass_OOP_MAP_ITERATE( \
1986 obj, \
1987 PSParallelCompact::mark_and_push(cm, p), \
1988 assert_is_in)
1989 }
1990 #endif // INCLUDE_ALL_GCS
1991
1992 int InstanceKlass::oop_adjust_pointers(oop obj) {
1993 int size = size_helper();
1994 InstanceKlass_OOP_MAP_ITERATE( \
1995 obj, \
1996 MarkSweep::adjust_pointer(p), \
1997 assert_is_in)
1998 return size;
1999 }
2000
2001 #if INCLUDE_ALL_GCS
2002 void InstanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
2003 InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
2004 obj, \
2005 if (PSScavenge::should_scavenge(p)) { \
2006 pm->claim_or_forward_depth(p); \
2007 }, \
2008 assert_nothing )
2009 }
|