1 /*
   2  * Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "gc_implementation/shared/markSweep.inline.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/genOopClosures.inline.hpp"
  31 #include "memory/iterator.inline.hpp"
  32 #include "memory/oopFactory.hpp"
  33 #include "oops/instanceKlass.hpp"
  34 #include "oops/instanceMirrorKlass.hpp"
  35 #include "oops/instanceOop.hpp"
  36 #include "oops/oop.inline.hpp"
  37 #include "oops/symbol.hpp"
  38 #include "runtime/handles.inline.hpp"
  39 #include "utilities/macros.hpp"
  40 #if INCLUDE_ALL_GCS
  41 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
  42 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  43 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  44 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  45 #include "gc_implementation/g1/heapRegionManager.inline.hpp"
  46 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  47 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  48 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  49 #include "oops/oop.pcgc.inline.hpp"
  50 #endif // INCLUDE_ALL_GCS
  51 
  52 int InstanceMirrorKlass::_offset_of_static_fields = 0;
  53 
  54 #ifdef ASSERT
  55 template <class T> void assert_is_in(T *p) {
  56   T heap_oop = oopDesc::load_heap_oop(p);
  57   if (!oopDesc::is_null(heap_oop)) {
  58     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  59     assert(Universe::heap()->is_in(o), "should be in heap");
  60   }
  61 }
  62 template <class T> void assert_is_in_closed_subset(T *p) {
  63   T heap_oop = oopDesc::load_heap_oop(p);
  64   if (!oopDesc::is_null(heap_oop)) {
  65     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  66     assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
  67   }
  68 }
  69 template <class T> void assert_is_in_reserved(T *p) {
  70   T heap_oop = oopDesc::load_heap_oop(p);
  71   if (!oopDesc::is_null(heap_oop)) {
  72     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  73     assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
  74   }
  75 }
  76 template <class T> void assert_nothing(T *p) {}
  77 
  78 #else
  79 template <class T> void assert_is_in(T *p) {}
  80 template <class T> void assert_is_in_closed_subset(T *p) {}
  81 template <class T> void assert_is_in_reserved(T *p) {}
  82 template <class T> void assert_nothing(T *p) {}
  83 #endif // ASSERT
  84 
  85 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
  86   T, start_p, count, do_oop,                         \
  87   assert_fn)                                         \
  88 {                                                    \
  89   T* p         = (T*)(start_p);                      \
  90   T* const end = p + (count);                        \
  91   while (p < end) {                                  \
  92     (assert_fn)(p);                                  \
  93     do_oop;                                          \
  94     ++p;                                             \
  95   }                                                  \
  96 }
  97 
  98 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
  99   T, start_p, count, low, high,                              \
 100   do_oop, assert_fn)                                         \
 101 {                                                            \
 102   T* const l = (T*)(low);                                    \
 103   T* const h = (T*)(high);                                   \
 104   assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 &&         \
 105          mask_bits((intptr_t)h, sizeof(T)-1) == 0,           \
 106          "bounded region must be properly aligned");         \
 107   T* p       = (T*)(start_p);                                \
 108   T* end     = p + (count);                                  \
 109   if (p < l) p = l;                                          \
 110   if (end > h) end = h;                                      \
 111   while (p < end) {                                          \
 112     (assert_fn)(p);                                          \
 113     do_oop;                                                  \
 114     ++p;                                                     \
 115   }                                                          \
 116 }
 117 
 118 
 119 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count,    \
 120                                   do_oop, assert_fn)       \
 121 {                                                          \
 122   if (UseCompressedOops) {                                 \
 123     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
 124       start_p, count,                                      \
 125       do_oop, assert_fn)                                   \
 126   } else {                                                 \
 127     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop,       \
 128       start_p, count,                                      \
 129       do_oop, assert_fn)                                   \
 130   }                                                        \
 131 }
 132 
 133 // The following macros call specialized macros, passing either oop or
 134 // narrowOop as the specialization type.  These test the UseCompressedOops
 135 // flag.
 136 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \
 137                                           do_oop, assert_fn)               \
 138 {                                                                          \
 139   if (UseCompressedOops) {                                                 \
 140     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,         \
 141       start_p, count,                                                      \
 142       low, high,                                                           \
 143       do_oop, assert_fn)                                                   \
 144   } else {                                                                 \
 145     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,               \
 146       start_p, count,                                                      \
 147       low, high,                                                           \
 148       do_oop, assert_fn)                                                   \
 149   }                                                                        \
 150 }
 151 
 152 
 153 void InstanceMirrorKlass::oop_follow_contents(oop obj) {
 154   InstanceKlass::oop_follow_contents(obj);
 155 
 156   // Follow the klass field in the mirror.
 157   Klass* klass = java_lang_Class::as_Klass(obj);
 158   if (klass != NULL) {
 159     // An anonymous class doesn't have its own class loader, so the call
 160     // to follow_klass will mark and push its java mirror instead of the
 161     // class loader. When handling the java mirror for an anonymous class
 162     // we need to make sure its class loader data is claimed, this is done
 163     // by calling follow_class_loader explicitly. For non-anonymous classes
 164     // the call to follow_class_loader is made when the class loader itself
 165     // is handled.
 166     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 167       MarkSweep::follow_class_loader(klass->class_loader_data());
 168     } else {
 169       MarkSweep::follow_klass(klass);
 170     }
 171   } else {
 172     // If klass is NULL then this a mirror for a primitive type.
 173     // We don't have to follow them, since they are handled as strong
 174     // roots in Universe::oops_do.
 175     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 176   }
 177 
 178   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 179     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 180     MarkSweep::mark_and_push(p),                                                      \
 181     assert_is_in_closed_subset)
 182 }
 183 
 184 #if INCLUDE_ALL_GCS
 185 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
 186                                               oop obj) {
 187   InstanceKlass::oop_follow_contents(cm, obj);
 188 
 189   // Follow the klass field in the mirror.
 190   Klass* klass = java_lang_Class::as_Klass(obj);
 191   if (klass != NULL) {
 192     // An anonymous class doesn't have its own class loader, so the call
 193     // to follow_klass will mark and push its java mirror instead of the
 194     // class loader. When handling the java mirror for an anonymous class
 195     // we need to make sure its class loader data is claimed, this is done
 196     // by calling follow_class_loader explicitly. For non-anonymous classes
 197     // the call to follow_class_loader is made when the class loader itself
 198     // is handled.
 199     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 200       PSParallelCompact::follow_class_loader(cm, klass->class_loader_data());
 201     } else {
 202       PSParallelCompact::follow_klass(cm, klass);
 203     }
 204   } else {
 205     // If klass is NULL then this a mirror for a primitive type.
 206     // We don't have to follow them, since they are handled as strong
 207     // roots in Universe::oops_do.
 208     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 209   }
 210 
 211   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 212     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 213     PSParallelCompact::mark_and_push(cm, p),                                          \
 214     assert_is_in)
 215 }
 216 #endif // INCLUDE_ALL_GCS
 217 
 218 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {
 219   int size = oop_size(obj);
 220   InstanceKlass::oop_adjust_pointers(obj);
 221 
 222   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 223     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 224     MarkSweep::adjust_pointer(p),                                                     \
 225     assert_nothing)
 226   return size;
 227 }
 228 
 229 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix)                \
 230   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 231     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 232       (closure)->do_oop##nv_suffix(p),                                                \
 233     assert_is_in_closed_subset)                                                       \
 234   return oop_size(obj);                                                               \
 235 
 236 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr)         \
 237   InstanceMirrorKlass_BOUNDED_OOP_ITERATE(                                            \
 238     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 239     mr.start(), mr.end(),                                                             \
 240       (closure)->do_oop##nv_suffix(p),                                                \
 241     assert_is_in_closed_subset)                                                       \
 242   return oop_size(obj);                                                               \
 243 
 244 
 245 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for
 246 // all closures.  Macros calling macros above for each oop size.
 247 
 248 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)           \
 249                                                                                       \
 250 int InstanceMirrorKlass::                                                             \
 251 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                        \
 252   /* Get size before changing pointers */                                             \
 253   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 254                                                                                       \
 255   InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);                            \
 256                                                                                       \
 257   if_do_metadata_checked(closure, nv_suffix) {                                        \
 258     Klass* klass = java_lang_Class::as_Klass(obj);                                    \
 259     /* We'll get NULL for primitive mirrors. */                                       \
 260     if (klass != NULL) {                                                              \
 261       closure->do_klass##nv_suffix(klass);                                            \
 262     }                                                                                 \
 263   }                                                                                   \
 264                                                                                       \
 265   if (UseCompressedOops) {                                                            \
 266     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 267   } else {                                                                            \
 268     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 269   }                                                                                   \
 270 }
 271 
 272 #if INCLUDE_ALL_GCS
 273 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
 274                                                                                       \
 275 int InstanceMirrorKlass::                                                             \
 276 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {              \
 277   /* Get size before changing pointers */                                             \
 278   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 279                                                                                       \
 280   InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure);                  \
 281                                                                                       \
 282   if (UseCompressedOops) {                                                            \
 283     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 284   } else {                                                                            \
 285     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 286   }                                                                                   \
 287 }
 288 #endif // INCLUDE_ALL_GCS
 289 
 290 
 291 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)         \
 292                                                                                       \
 293 int InstanceMirrorKlass::                                                             \
 294 oop_oop_iterate##nv_suffix##_m(oop obj,                                               \
 295                                OopClosureType* closure,                               \
 296                                MemRegion mr) {                                        \
 297   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 298                                                                                       \
 299   InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);                    \
 300                                                                                       \
 301   if_do_metadata_checked(closure, nv_suffix) {                                        \
 302     if (mr.contains(obj)) {                                                           \
 303       Klass* klass = java_lang_Class::as_Klass(obj);                                  \
 304       /* We'll get NULL for primitive mirrors. */                                     \
 305       if (klass != NULL) {                                                            \
 306         closure->do_klass##nv_suffix(klass);                                          \
 307       }                                                                               \
 308     }                                                                                 \
 309   }                                                                                   \
 310                                                                                       \
 311   if (UseCompressedOops) {                                                            \
 312     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr);    \
 313   } else {                                                                            \
 314     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr);          \
 315   }                                                                                   \
 316 }
 317 
 318 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 319 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 320 #if INCLUDE_ALL_GCS
 321 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 322 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 323 #endif // INCLUDE_ALL_GCS
 324 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 325 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 326 
 327 #if INCLUDE_ALL_GCS
 328 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
 329   // Note that we don't have to follow the mirror -> klass pointer, since all
 330   // klasses that are dirty will be scavenged when we iterate over the
 331   // ClassLoaderData objects.
 332 
 333   InstanceKlass::oop_push_contents(pm, obj);
 334   InstanceMirrorKlass_OOP_ITERATE(                                            \
 335     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 336     if (PSScavenge::should_scavenge(p)) {                                     \
 337       pm->claim_or_forward_depth(p);                                          \
 338     },                                                                        \
 339     assert_nothing )
 340 }
 341 
 342 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
 343   int size = oop_size(obj);
 344   InstanceKlass::oop_update_pointers(cm, obj);
 345 
 346   InstanceMirrorKlass_OOP_ITERATE(                                            \
 347     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 348     PSParallelCompact::adjust_pointer(p),                                     \
 349     assert_nothing)
 350   return size;
 351 }
 352 #endif // INCLUDE_ALL_GCS
 353 
 354 int InstanceMirrorKlass::instance_size(KlassHandle k) {
 355   if (k() != NULL && k->oop_is_instance()) {
 356     return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());
 357   }
 358   return size_helper();
 359 }
 360 
 361 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {
 362   // Query before forming handle.
 363   int size = instance_size(k);
 364   KlassHandle h_k(THREAD, this);
 365   instanceOop i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
 366 
 367   // Since mirrors can be variable sized because of the static fields, store
 368   // the size in the mirror itself.
 369   java_lang_Class::set_oop_size(i, size);
 370 
 371   return i;
 372 }
 373 
 374 int InstanceMirrorKlass::oop_size(oop obj) const {
 375   return java_lang_Class::oop_size(obj);
 376 }
 377 
 378 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {
 379   Klass* k = java_lang_Class::as_Klass(obj);
 380   if (k != NULL && k->oop_is_instance()) {
 381     return InstanceKlass::cast(k)->static_oop_field_count();
 382   }
 383   return 0;
 384 }