1 /*
   2  * Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "gc_implementation/shared/markSweep.inline.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/genOopClosures.inline.hpp"
  31 #include "memory/oopFactory.hpp"
  32 #include "oops/instanceKlass.hpp"
  33 #include "oops/instanceMirrorKlass.hpp"
  34 #include "oops/instanceOop.hpp"
  35 #include "oops/oop.inline.hpp"
  36 #include "oops/symbol.hpp"
  37 #include "runtime/handles.inline.hpp"
  38 #include "utilities/macros.hpp"
  39 #if INCLUDE_ALL_GCS
  40 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
  41 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  42 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  43 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  44 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  45 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  46 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  47 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  48 #include "oops/oop.pcgc.inline.hpp"
  49 #endif // INCLUDE_ALL_GCS
  50 
  51 int InstanceMirrorKlass::_offset_of_static_fields = 0;
  52 
  53 #ifdef ASSERT
  54 template <class T> void assert_is_in(T *p) {
  55   T heap_oop = oopDesc::load_heap_oop(p);
  56   if (!oopDesc::is_null(heap_oop)) {
  57     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  58     assert(Universe::heap()->is_in(o), "should be in heap");
  59   }
  60 }
  61 template <class T> void assert_is_in_closed_subset(T *p) {
  62   T heap_oop = oopDesc::load_heap_oop(p);
  63   if (!oopDesc::is_null(heap_oop)) {
  64     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  65     assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
  66   }
  67 }
  68 template <class T> void assert_is_in_reserved(T *p) {
  69   T heap_oop = oopDesc::load_heap_oop(p);
  70   if (!oopDesc::is_null(heap_oop)) {
  71     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  72     assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
  73   }
  74 }
  75 template <class T> void assert_nothing(T *p) {}
  76 
  77 #else
  78 template <class T> void assert_is_in(T *p) {}
  79 template <class T> void assert_is_in_closed_subset(T *p) {}
  80 template <class T> void assert_is_in_reserved(T *p) {}
  81 template <class T> void assert_nothing(T *p) {}
  82 #endif // ASSERT
  83 
  84 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
  85   T, start_p, count, do_oop,                         \
  86   assert_fn)                                         \
  87 {                                                    \
  88   T* p         = (T*)(start_p);                      \
  89   T* const end = p + (count);                        \
  90   while (p < end) {                                  \
  91     (assert_fn)(p);                                  \
  92     do_oop;                                          \
  93     ++p;                                             \
  94   }                                                  \
  95 }
  96 
  97 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
  98   T, start_p, count, low, high,                              \
  99   do_oop, assert_fn)                                         \
 100 {                                                            \
 101   T* const l = (T*)(low);                                    \
 102   T* const h = (T*)(high);                                   \
 103   assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 &&         \
 104          mask_bits((intptr_t)h, sizeof(T)-1) == 0,           \
 105          "bounded region must be properly aligned");         \
 106   T* p       = (T*)(start_p);                                \
 107   T* end     = p + (count);                                  \
 108   if (p < l) p = l;                                          \
 109   if (end > h) end = h;                                      \
 110   while (p < end) {                                          \
 111     (assert_fn)(p);                                          \
 112     do_oop;                                                  \
 113     ++p;                                                     \
 114   }                                                          \
 115 }
 116 
 117 
 118 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count,    \
 119                                   do_oop, assert_fn)       \
 120 {                                                          \
 121   if (UseCompressedOops) {                                 \
 122     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
 123       start_p, count,                                      \
 124       do_oop, assert_fn)                                   \
 125   } else {                                                 \
 126     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop,       \
 127       start_p, count,                                      \
 128       do_oop, assert_fn)                                   \
 129   }                                                        \
 130 }
 131 
 132 // The following macros call specialized macros, passing either oop or
 133 // narrowOop as the specialization type.  These test the UseCompressedOops
 134 // flag.
 135 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \
 136                                           do_oop, assert_fn)               \
 137 {                                                                          \
 138   if (UseCompressedOops) {                                                 \
 139     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,         \
 140       start_p, count,                                                      \
 141       low, high,                                                           \
 142       do_oop, assert_fn)                                                   \
 143   } else {                                                                 \
 144     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,               \
 145       start_p, count,                                                      \
 146       low, high,                                                           \
 147       do_oop, assert_fn)                                                   \
 148   }                                                                        \
 149 }
 150 
 151 
 152 void InstanceMirrorKlass::oop_follow_contents(oop obj) {
 153   InstanceKlass::oop_follow_contents(obj);
 154 
 155   // Follow the klass field in the mirror.
 156   Klass* klass = java_lang_Class::as_Klass(obj);
 157   if (klass != NULL) {
 158     // For anonymous classes we need to handle the class loader data,
 159     // otherwise it won't be claimed and can be unloaded.
 160     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 161       MarkSweep::follow_class_loader(klass->class_loader_data());
 162     } else {
 163       MarkSweep::follow_klass(klass);
 164     }
 165   } else {
 166     // If klass is NULL then this a mirror for a primitive type.
 167     // We don't have to follow them, since they are handled as strong
 168     // roots in Universe::oops_do.
 169     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 170   }
 171 
 172   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 173     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 174     MarkSweep::mark_and_push(p),                                                      \
 175     assert_is_in_closed_subset)
 176 }
 177 
 178 #if INCLUDE_ALL_GCS
 179 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
 180                                               oop obj) {
 181   InstanceKlass::oop_follow_contents(cm, obj);
 182 
 183   // Follow the klass field in the mirror.
 184   Klass* klass = java_lang_Class::as_Klass(obj);
 185   if (klass != NULL) {
 186     // For anonymous classes we need to handle the class loader data,
 187     // otherwise it won't be claimed and can be unloaded.
 188     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 189       PSParallelCompact::follow_class_loader(cm, klass->class_loader_data());
 190     } else {
 191       PSParallelCompact::follow_klass(cm, klass);
 192     }
 193   } else {
 194     // If klass is NULL then this a mirror for a primitive type.
 195     // We don't have to follow them, since they are handled as strong
 196     // roots in Universe::oops_do.
 197     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 198   }
 199 
 200   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 201     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 202     PSParallelCompact::mark_and_push(cm, p),                                          \
 203     assert_is_in)
 204 }
 205 #endif // INCLUDE_ALL_GCS
 206 
 207 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {
 208   int size = oop_size(obj);
 209   InstanceKlass::oop_adjust_pointers(obj);
 210 
 211   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 212     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 213     MarkSweep::adjust_pointer(p),                                                     \
 214     assert_nothing)
 215   return size;
 216 }
 217 
 218 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix)                \
 219   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 220     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 221       (closure)->do_oop##nv_suffix(p),                                                \
 222     assert_is_in_closed_subset)                                                       \
 223   return oop_size(obj);                                                               \
 224 
 225 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr)         \
 226   InstanceMirrorKlass_BOUNDED_OOP_ITERATE(                                            \
 227     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 228     mr.start(), mr.end(),                                                             \
 229       (closure)->do_oop##nv_suffix(p),                                                \
 230     assert_is_in_closed_subset)                                                       \
 231   return oop_size(obj);                                                               \
 232 
 233 
 234 #define if_do_metadata_checked(closure, nv_suffix)                    \
 235   /* Make sure the non-virtual and the virtual versions match. */     \
 236   assert(closure->do_metadata##nv_suffix() == closure->do_metadata(), \
 237       "Inconsistency in do_metadata");                                \
 238   if (closure->do_metadata##nv_suffix())
 239 
 240 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for
 241 // all closures.  Macros calling macros above for each oop size.
 242 
 243 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)           \
 244                                                                                       \
 245 int InstanceMirrorKlass::                                                             \
 246 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                        \
 247   /* Get size before changing pointers */                                             \
 248   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 249                                                                                       \
 250   InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);                            \
 251                                                                                       \
 252   if_do_metadata_checked(closure, nv_suffix) {                                        \
 253     Klass* klass = java_lang_Class::as_Klass(obj);                                    \
 254     /* We'll get NULL for primitive mirrors. */                                       \
 255     if (klass != NULL) {                                                              \
 256       closure->do_klass##nv_suffix(klass);                                            \
 257     }                                                                                 \
 258   }                                                                                   \
 259                                                                                       \
 260   if (UseCompressedOops) {                                                            \
 261     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 262   } else {                                                                            \
 263     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 264   }                                                                                   \
 265 }
 266 
 267 #if INCLUDE_ALL_GCS
 268 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
 269                                                                                       \
 270 int InstanceMirrorKlass::                                                             \
 271 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {              \
 272   /* Get size before changing pointers */                                             \
 273   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 274                                                                                       \
 275   InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure);                  \
 276                                                                                       \
 277   if (UseCompressedOops) {                                                            \
 278     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 279   } else {                                                                            \
 280     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 281   }                                                                                   \
 282 }
 283 #endif // INCLUDE_ALL_GCS
 284 
 285 
 286 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)         \
 287                                                                                       \
 288 int InstanceMirrorKlass::                                                             \
 289 oop_oop_iterate##nv_suffix##_m(oop obj,                                               \
 290                                OopClosureType* closure,                               \
 291                                MemRegion mr) {                                        \
 292   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 293                                                                                       \
 294   InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);                    \
 295                                                                                       \
 296   if_do_metadata_checked(closure, nv_suffix) {                                        \
 297     if (mr.contains(obj)) {                                                           \
 298       Klass* klass = java_lang_Class::as_Klass(obj);                                  \
 299       /* We'll get NULL for primitive mirrors. */                                     \
 300       if (klass != NULL) {                                                            \
 301         closure->do_klass##nv_suffix(klass);                                          \
 302       }                                                                               \
 303     }                                                                                 \
 304   }                                                                                   \
 305                                                                                       \
 306   if (UseCompressedOops) {                                                            \
 307     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr);    \
 308   } else {                                                                            \
 309     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr);          \
 310   }                                                                                   \
 311 }
 312 
 313 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 314 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 315 #if INCLUDE_ALL_GCS
 316 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 317 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 318 #endif // INCLUDE_ALL_GCS
 319 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 320 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 321 
 322 #if INCLUDE_ALL_GCS
 323 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
 324   // Note that we don't have to follow the mirror -> klass pointer, since all
 325   // klasses that are dirty will be scavenged when we iterate over the
 326   // ClassLoaderData objects.
 327 
 328   InstanceKlass::oop_push_contents(pm, obj);
 329   InstanceMirrorKlass_OOP_ITERATE(                                            \
 330     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 331     if (PSScavenge::should_scavenge(p)) {                                     \
 332       pm->claim_or_forward_depth(p);                                          \
 333     },                                                                        \
 334     assert_nothing )
 335 }
 336 
 337 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
 338   int size = oop_size(obj);
 339   InstanceKlass::oop_update_pointers(cm, obj);
 340 
 341   InstanceMirrorKlass_OOP_ITERATE(                                            \
 342     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 343     PSParallelCompact::adjust_pointer(p),                                     \
 344     assert_nothing)
 345   return size;
 346 }
 347 #endif // INCLUDE_ALL_GCS
 348 
 349 int InstanceMirrorKlass::instance_size(KlassHandle k) {
 350   if (k() != NULL && k->oop_is_instance()) {
 351     return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());
 352   }
 353   return size_helper();
 354 }
 355 
 356 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {
 357   // Query before forming handle.
 358   int size = instance_size(k);
 359   KlassHandle h_k(THREAD, this);
 360   instanceOop i = (instanceOop) CollectedHeap::Class_obj_allocate(h_k, size, k, CHECK_NULL);
 361   return i;
 362 }
 363 
 364 int InstanceMirrorKlass::oop_size(oop obj) const {
 365   return java_lang_Class::oop_size(obj);
 366 }
 367 
 368 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {
 369   Klass* k = java_lang_Class::as_Klass(obj);
 370   if (k != NULL && k->oop_is_instance()) {
 371     return InstanceKlass::cast(k)->static_oop_field_count();
 372   }
 373   return 0;
 374 }