1 /* 2 * Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.hpp" 27 #include "classfile/systemDictionary.hpp" 28 #include "gc_implementation/shared/markSweep.inline.hpp" 29 #include "gc_interface/collectedHeap.inline.hpp" 30 #include "memory/genOopClosures.inline.hpp" 31 #include "memory/iterator.inline.hpp" 32 #include "memory/oopFactory.hpp" 33 #include "oops/instanceKlass.hpp" 34 #include "oops/instanceMirrorKlass.hpp" 35 #include "oops/instanceOop.hpp" 36 #include "oops/oop.inline.hpp" 37 #include "oops/symbol.hpp" 38 #include "runtime/handles.inline.hpp" 39 #include "utilities/macros.hpp" 40 #if INCLUDE_ALL_GCS 41 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp" 42 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp" 43 #include "gc_implementation/g1/g1OopClosures.inline.hpp" 44 #include "gc_implementation/g1/g1RemSet.inline.hpp" 45 #include "gc_implementation/g1/heapRegionManager.inline.hpp" 46 #include "gc_implementation/parNew/parOopClosures.inline.hpp" 47 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp" 48 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp" 49 #include "oops/oop.pcgc.inline.hpp" 50 #endif // INCLUDE_ALL_GCS 51 52 int InstanceMirrorKlass::_offset_of_static_fields = 0; 53 54 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \ 55 T, start_p, count, do_oop, \ 56 assert_fn) \ 57 { \ 58 T* p = (T*)(start_p); \ 59 T* const end = p + (count); \ 60 while (p < end) { \ 61 (assert_fn)(p); \ 62 do_oop; \ 63 ++p; \ 64 } \ 65 } 66 67 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \ 68 T, start_p, count, low, high, \ 69 do_oop, assert_fn) \ 70 { \ 71 T* const l = (T*)(low); \ 72 T* const h = (T*)(high); \ 73 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \ 74 mask_bits((intptr_t)h, sizeof(T)-1) == 0, \ 75 "bounded region must be properly aligned"); \ 76 T* p = (T*)(start_p); \ 77 T* end = p + (count); \ 78 if (p < l) p = l; \ 79 if (end > h) end = h; \ 80 while (p < end) { \ 81 (assert_fn)(p); \ 82 do_oop; \ 83 ++p; \ 84 } \ 85 } 86 87 88 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count, \ 89 do_oop, assert_fn) \ 90 { \ 91 if (UseCompressedOops) { \ 92 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \ 93 start_p, count, \ 94 do_oop, assert_fn) \ 95 } else { \ 96 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop, \ 97 start_p, count, \ 98 do_oop, assert_fn) \ 99 } \ 100 } 101 102 // The following macros call specialized macros, passing either oop or 103 // narrowOop as the specialization type. These test the UseCompressedOops 104 // flag. 105 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \ 106 do_oop, assert_fn) \ 107 { \ 108 if (UseCompressedOops) { \ 109 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \ 110 start_p, count, \ 111 low, high, \ 112 do_oop, assert_fn) \ 113 } else { \ 114 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \ 115 start_p, count, \ 116 low, high, \ 117 do_oop, assert_fn) \ 118 } \ 119 } 120 121 122 void InstanceMirrorKlass::oop_follow_contents(oop obj) { 123 InstanceKlass::oop_follow_contents(obj); 124 125 // Follow the klass field in the mirror. 126 Klass* klass = java_lang_Class::as_Klass(obj); 127 if (klass != NULL) { 128 // An anonymous class doesn't have its own class loader, so the call 129 // to follow_klass will mark and push its java mirror instead of the 130 // class loader. When handling the java mirror for an anonymous class 131 // we need to make sure its class loader data is claimed, this is done 132 // by calling follow_class_loader explicitly. For non-anonymous classes 133 // the call to follow_class_loader is made when the class loader itself 134 // is handled. 135 if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) { 136 MarkSweep::follow_class_loader(klass->class_loader_data()); 137 } else { 138 MarkSweep::follow_klass(klass); 139 } 140 } else { 141 // If klass is NULL then this a mirror for a primitive type. 142 // We don't have to follow them, since they are handled as strong 143 // roots in Universe::oops_do. 144 assert(java_lang_Class::is_primitive(obj), "Sanity check"); 145 } 146 147 InstanceMirrorKlass_OOP_ITERATE( \ 148 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \ 149 MarkSweep::mark_and_push(p), \ 150 assert_is_in_closed_subset) 151 } 152 153 #if INCLUDE_ALL_GCS 154 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm, 155 oop obj) { 156 InstanceKlass::oop_follow_contents(cm, obj); 157 158 // Follow the klass field in the mirror. 159 Klass* klass = java_lang_Class::as_Klass(obj); 160 if (klass != NULL) { 161 // An anonymous class doesn't have its own class loader, so the call 162 // to follow_klass will mark and push its java mirror instead of the 163 // class loader. When handling the java mirror for an anonymous class 164 // we need to make sure its class loader data is claimed, this is done 165 // by calling follow_class_loader explicitly. For non-anonymous classes 166 // the call to follow_class_loader is made when the class loader itself 167 // is handled. 168 if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) { 169 PSParallelCompact::follow_class_loader(cm, klass->class_loader_data()); 170 } else { 171 PSParallelCompact::follow_klass(cm, klass); 172 } 173 } else { 174 // If klass is NULL then this a mirror for a primitive type. 175 // We don't have to follow them, since they are handled as strong 176 // roots in Universe::oops_do. 177 assert(java_lang_Class::is_primitive(obj), "Sanity check"); 178 } 179 180 InstanceMirrorKlass_OOP_ITERATE( \ 181 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \ 182 PSParallelCompact::mark_and_push(cm, p), \ 183 assert_is_in) 184 } 185 #endif // INCLUDE_ALL_GCS 186 187 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) { 188 int size = oop_size(obj); 189 InstanceKlass::oop_adjust_pointers(obj); 190 191 InstanceMirrorKlass_OOP_ITERATE( \ 192 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \ 193 MarkSweep::adjust_pointer(p), \ 194 assert_nothing) 195 return size; 196 } 197 198 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix) \ 199 InstanceMirrorKlass_OOP_ITERATE( \ 200 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \ 201 (closure)->do_oop##nv_suffix(p), \ 202 assert_is_in_closed_subset) \ 203 return oop_size(obj); \ 204 205 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr) \ 206 InstanceMirrorKlass_BOUNDED_OOP_ITERATE( \ 207 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \ 208 mr.start(), mr.end(), \ 209 (closure)->do_oop##nv_suffix(p), \ 210 assert_is_in_closed_subset) \ 211 return oop_size(obj); \ 212 213 214 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for 215 // all closures. Macros calling macros above for each oop size. 216 217 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \ 218 \ 219 int InstanceMirrorKlass:: \ 220 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \ 221 /* Get size before changing pointers */ \ 222 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \ 223 \ 224 InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \ 225 \ 226 if_do_metadata_checked(closure, nv_suffix) { \ 227 Klass* klass = java_lang_Class::as_Klass(obj); \ 228 /* We'll get NULL for primitive mirrors. */ \ 229 if (klass != NULL) { \ 230 closure->do_klass##nv_suffix(klass); \ 231 } \ 232 } \ 233 \ 234 if (UseCompressedOops) { \ 235 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \ 236 } else { \ 237 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \ 238 } \ 239 } 240 241 #if INCLUDE_ALL_GCS 242 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \ 243 \ 244 int InstanceMirrorKlass:: \ 245 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \ 246 /* Get size before changing pointers */ \ 247 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \ 248 \ 249 InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \ 250 \ 251 if (UseCompressedOops) { \ 252 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \ 253 } else { \ 254 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \ 255 } \ 256 } 257 #endif // INCLUDE_ALL_GCS 258 259 260 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \ 261 \ 262 int InstanceMirrorKlass:: \ 263 oop_oop_iterate##nv_suffix##_m(oop obj, \ 264 OopClosureType* closure, \ 265 MemRegion mr) { \ 266 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \ 267 \ 268 InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \ 269 \ 270 if_do_metadata_checked(closure, nv_suffix) { \ 271 if (mr.contains(obj)) { \ 272 Klass* klass = java_lang_Class::as_Klass(obj); \ 273 /* We'll get NULL for primitive mirrors. */ \ 274 if (klass != NULL) { \ 275 closure->do_klass##nv_suffix(klass); \ 276 } \ 277 } \ 278 } \ 279 \ 280 if (UseCompressedOops) { \ 281 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr); \ 282 } else { \ 283 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr); \ 284 } \ 285 } 286 287 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN) 288 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN) 289 #if INCLUDE_ALL_GCS 290 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) 291 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) 292 #endif // INCLUDE_ALL_GCS 293 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m) 294 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m) 295 296 #if INCLUDE_ALL_GCS 297 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) { 298 // Note that we don't have to follow the mirror -> klass pointer, since all 299 // klasses that are dirty will be scavenged when we iterate over the 300 // ClassLoaderData objects. 301 302 InstanceKlass::oop_push_contents(pm, obj); 303 InstanceMirrorKlass_OOP_ITERATE( \ 304 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\ 305 if (PSScavenge::should_scavenge(p)) { \ 306 pm->claim_or_forward_depth(p); \ 307 }, \ 308 assert_nothing ) 309 } 310 311 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) { 312 int size = oop_size(obj); 313 InstanceKlass::oop_update_pointers(cm, obj); 314 315 InstanceMirrorKlass_OOP_ITERATE( \ 316 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\ 317 PSParallelCompact::adjust_pointer(p), \ 318 assert_nothing) 319 return size; 320 } 321 #endif // INCLUDE_ALL_GCS 322 323 int InstanceMirrorKlass::instance_size(KlassHandle k) { 324 if (k() != NULL && k->oop_is_instance()) { 325 return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size()); 326 } 327 return size_helper(); 328 } 329 330 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) { 331 // Query before forming handle. 332 int size = instance_size(k); 333 KlassHandle h_k(THREAD, this); 334 instanceOop i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL); 335 336 // Since mirrors can be variable sized because of the static fields, store 337 // the size in the mirror itself. 338 java_lang_Class::set_oop_size(i, size); 339 340 return i; 341 } 342 343 int InstanceMirrorKlass::oop_size(oop obj) const { 344 return java_lang_Class::oop_size(obj); 345 } 346 347 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) { 348 Klass* k = java_lang_Class::as_Klass(obj); 349 if (k != NULL && k->oop_is_instance()) { 350 return InstanceKlass::cast(k)->static_oop_field_count(); 351 } 352 return 0; 353 }