1 /*
   2  * Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "gc_implementation/shared/markSweep.inline.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/genOopClosures.inline.hpp"
  31 #include "memory/oopFactory.hpp"
  32 #include "oops/instanceKlass.hpp"
  33 #include "oops/instanceMirrorKlass.hpp"
  34 #include "oops/instanceOop.hpp"
  35 #include "oops/oop.inline.hpp"
  36 #include "oops/symbol.hpp"
  37 #include "runtime/handles.inline.hpp"
  38 #include "utilities/macros.hpp"
  39 #if INCLUDE_ALL_GCS
  40 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
  41 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  42 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  43 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  44 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  45 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  46 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  47 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  48 #include "oops/oop.pcgc.inline.hpp"
  49 #endif // INCLUDE_ALL_GCS
  50 
  51 int InstanceMirrorKlass::_offset_of_static_fields = 0;
  52 
  53 #ifdef ASSERT
  54 template <class T> void assert_is_in(T *p) {
  55   T heap_oop = oopDesc::load_heap_oop(p);
  56   if (!oopDesc::is_null(heap_oop)) {
  57     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  58     assert(Universe::heap()->is_in(o), "should be in heap");
  59   }
  60 }
  61 template <class T> void assert_is_in_closed_subset(T *p) {
  62   T heap_oop = oopDesc::load_heap_oop(p);
  63   if (!oopDesc::is_null(heap_oop)) {
  64     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  65     assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
  66   }
  67 }
  68 template <class T> void assert_is_in_reserved(T *p) {
  69   T heap_oop = oopDesc::load_heap_oop(p);
  70   if (!oopDesc::is_null(heap_oop)) {
  71     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  72     assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
  73   }
  74 }
  75 template <class T> void assert_nothing(T *p) {}
  76 
  77 #else
  78 template <class T> void assert_is_in(T *p) {}
  79 template <class T> void assert_is_in_closed_subset(T *p) {}
  80 template <class T> void assert_is_in_reserved(T *p) {}
  81 template <class T> void assert_nothing(T *p) {}
  82 #endif // ASSERT
  83 
  84 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
  85   T, start_p, count, do_oop,                         \
  86   assert_fn)                                         \
  87 {                                                    \
  88   T* p         = (T*)(start_p);                      \
  89   T* const end = p + (count);                        \
  90   while (p < end) {                                  \
  91     (assert_fn)(p);                                  \
  92     do_oop;                                          \
  93     ++p;                                             \
  94   }                                                  \
  95 }
  96 
  97 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
  98   T, start_p, count, low, high,                              \
  99   do_oop, assert_fn)                                         \
 100 {                                                            \
 101   T* const l = (T*)(low);                                    \
 102   T* const h = (T*)(high);                                   \
 103   assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 &&         \
 104          mask_bits((intptr_t)h, sizeof(T)-1) == 0,           \
 105          "bounded region must be properly aligned");         \
 106   T* p       = (T*)(start_p);                                \
 107   T* end     = p + (count);                                  \
 108   if (p < l) p = l;                                          \
 109   if (end > h) end = h;                                      \
 110   while (p < end) {                                          \
 111     (assert_fn)(p);                                          \
 112     do_oop;                                                  \
 113     ++p;                                                     \
 114   }                                                          \
 115 }
 116 
 117 
 118 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count,    \
 119                                   do_oop, assert_fn)       \
 120 {                                                          \
 121   if (UseCompressedOops) {                                 \
 122     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
 123       start_p, count,                                      \
 124       do_oop, assert_fn)                                   \
 125   } else {                                                 \
 126     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop,       \
 127       start_p, count,                                      \
 128       do_oop, assert_fn)                                   \
 129   }                                                        \
 130 }
 131 
 132 // The following macros call specialized macros, passing either oop or
 133 // narrowOop as the specialization type.  These test the UseCompressedOops
 134 // flag.
 135 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \
 136                                           do_oop, assert_fn)               \
 137 {                                                                          \
 138   if (UseCompressedOops) {                                                 \
 139     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,         \
 140       start_p, count,                                                      \
 141       low, high,                                                           \
 142       do_oop, assert_fn)                                                   \
 143   } else {                                                                 \
 144     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,               \
 145       start_p, count,                                                      \
 146       low, high,                                                           \
 147       do_oop, assert_fn)                                                   \
 148   }                                                                        \
 149 }
 150 
 151 
 152 void InstanceMirrorKlass::oop_follow_contents(oop obj) {
 153   InstanceKlass::oop_follow_contents(obj);
 154 
 155   // Follow the klass field in the mirror.
 156   Klass* klass = java_lang_Class::as_Klass(obj);
 157   if (klass != NULL) {
 158     // An anonymous class doesn't have its own class loader, so the call
 159     // to follow_klass will mark and push its java mirror instead of the
 160     // class loader. When handling the java mirror for an anonymous class
 161     // we need to make sure its class loader data is claimed, this is done
 162     // by calling follow_class_loader explicitly. For non-anonymous classes
 163     // the call to follow_class_loader is made when the class loader itself
 164     // is handled.
 165     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 166       MarkSweep::follow_class_loader(klass->class_loader_data());
 167     } else {
 168       MarkSweep::follow_klass(klass);
 169     }
 170   } else {
 171     // If klass is NULL then this a mirror for a primitive type.
 172     // We don't have to follow them, since they are handled as strong
 173     // roots in Universe::oops_do.
 174     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 175   }
 176 
 177   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 178     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 179     MarkSweep::mark_and_push(p),                                                      \
 180     assert_is_in_closed_subset)
 181 }
 182 
 183 #if INCLUDE_ALL_GCS
 184 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
 185                                               oop obj) {
 186   InstanceKlass::oop_follow_contents(cm, obj);
 187 
 188   // Follow the klass field in the mirror.
 189   Klass* klass = java_lang_Class::as_Klass(obj);
 190   if (klass != NULL) {
 191     // An anonymous class doesn't have its own class loader, so the call
 192     // to follow_klass will mark and push its java mirror instead of the
 193     // class loader. When handling the java mirror for an anonymous class
 194     // we need to make sure its class loader data is claimed, this is done
 195     // by calling follow_class_loader explicitly. For non-anonymous classes
 196     // the call to follow_class_loader is made when the class loader itself
 197     // is handled.
 198     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 199       PSParallelCompact::follow_class_loader(cm, klass->class_loader_data());
 200     } else {
 201       PSParallelCompact::follow_klass(cm, klass);
 202     }
 203   } else {
 204     // If klass is NULL then this a mirror for a primitive type.
 205     // We don't have to follow them, since they are handled as strong
 206     // roots in Universe::oops_do.
 207     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 208   }
 209 
 210   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 211     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 212     PSParallelCompact::mark_and_push(cm, p),                                          \
 213     assert_is_in)
 214 }
 215 #endif // INCLUDE_ALL_GCS
 216 
 217 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {
 218   int size = oop_size(obj);
 219   InstanceKlass::oop_adjust_pointers(obj);
 220 
 221   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 222     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 223     MarkSweep::adjust_pointer(p),                                                     \
 224     assert_nothing)
 225   return size;
 226 }
 227 
 228 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix)                \
 229   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 230     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 231       (closure)->do_oop##nv_suffix(p),                                                \
 232     assert_is_in_closed_subset)                                                       \
 233   return oop_size(obj);                                                               \
 234 
 235 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr)         \
 236   InstanceMirrorKlass_BOUNDED_OOP_ITERATE(                                            \
 237     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 238     mr.start(), mr.end(),                                                             \
 239       (closure)->do_oop##nv_suffix(p),                                                \
 240     assert_is_in_closed_subset)                                                       \
 241   return oop_size(obj);                                                               \
 242 
 243 
 244 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for
 245 // all closures.  Macros calling macros above for each oop size.
 246 
 247 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)           \
 248                                                                                       \
 249 int InstanceMirrorKlass::                                                             \
 250 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                        \
 251   /* Get size before changing pointers */                                             \
 252   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 253                                                                                       \
 254   InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);                            \
 255                                                                                       \
 256   if_do_metadata_checked(closure, nv_suffix) {                                        \
 257     Klass* klass = java_lang_Class::as_Klass(obj);                                    \
 258     /* We'll get NULL for primitive mirrors. */                                       \
 259     if (klass != NULL) {                                                              \
 260       closure->do_klass##nv_suffix(klass);                                            \
 261     }                                                                                 \
 262   }                                                                                   \
 263                                                                                       \
 264   if (UseCompressedOops) {                                                            \
 265     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 266   } else {                                                                            \
 267     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 268   }                                                                                   \
 269 }
 270 
 271 #if INCLUDE_ALL_GCS
 272 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
 273                                                                                       \
 274 int InstanceMirrorKlass::                                                             \
 275 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {              \
 276   /* Get size before changing pointers */                                             \
 277   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 278                                                                                       \
 279   InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure);                  \
 280                                                                                       \
 281   if (UseCompressedOops) {                                                            \
 282     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 283   } else {                                                                            \
 284     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 285   }                                                                                   \
 286 }
 287 #endif // INCLUDE_ALL_GCS
 288 
 289 
 290 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)         \
 291                                                                                       \
 292 int InstanceMirrorKlass::                                                             \
 293 oop_oop_iterate##nv_suffix##_m(oop obj,                                               \
 294                                OopClosureType* closure,                               \
 295                                MemRegion mr) {                                        \
 296   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 297                                                                                       \
 298   InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);                    \
 299                                                                                       \
 300   if_do_metadata_checked(closure, nv_suffix) {                                        \
 301     if (mr.contains(obj)) {                                                           \
 302       Klass* klass = java_lang_Class::as_Klass(obj);                                  \
 303       /* We'll get NULL for primitive mirrors. */                                     \
 304       if (klass != NULL) {                                                            \
 305         closure->do_klass##nv_suffix(klass);                                          \
 306       }                                                                               \
 307     }                                                                                 \
 308   }                                                                                   \
 309                                                                                       \
 310   if (UseCompressedOops) {                                                            \
 311     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr);    \
 312   } else {                                                                            \
 313     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr);          \
 314   }                                                                                   \
 315 }
 316 
 317 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 318 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 319 #if INCLUDE_ALL_GCS
 320 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 321 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 322 #endif // INCLUDE_ALL_GCS
 323 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 324 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 325 
 326 #if INCLUDE_ALL_GCS
 327 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
 328   // Note that we don't have to follow the mirror -> klass pointer, since all
 329   // klasses that are dirty will be scavenged when we iterate over the
 330   // ClassLoaderData objects.
 331 
 332   InstanceKlass::oop_push_contents(pm, obj);
 333   InstanceMirrorKlass_OOP_ITERATE(                                            \
 334     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 335     if (PSScavenge::should_scavenge(p)) {                                     \
 336       pm->claim_or_forward_depth(p);                                          \
 337     },                                                                        \
 338     assert_nothing )
 339 }
 340 
 341 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
 342   int size = oop_size(obj);
 343   InstanceKlass::oop_update_pointers(cm, obj);
 344 
 345   InstanceMirrorKlass_OOP_ITERATE(                                            \
 346     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 347     PSParallelCompact::adjust_pointer(p),                                     \
 348     assert_nothing)
 349   return size;
 350 }
 351 #endif // INCLUDE_ALL_GCS
 352 
 353 int InstanceMirrorKlass::instance_size(KlassHandle k) {
 354   if (k() != NULL && k->oop_is_instance()) {
 355     return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());
 356   }
 357   return size_helper();
 358 }
 359 
 360 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {
 361   // Query before forming handle.
 362   int size = instance_size(k);
 363   KlassHandle h_k(THREAD, this);
 364   instanceOop i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
 365 
 366   // Since mirrors can be variable sized because of the static fields, store
 367   // the size in the mirror itself.
 368   java_lang_Class::set_oop_size(i, size);
 369 
 370   return i;
 371 }
 372 
 373 int InstanceMirrorKlass::oop_size(oop obj) const {
 374   return java_lang_Class::oop_size(obj);
 375 }
 376 
 377 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {
 378   Klass* k = java_lang_Class::as_Klass(obj);
 379   if (k != NULL && k->oop_is_instance()) {
 380     return InstanceKlass::cast(k)->static_oop_field_count();
 381   }
 382   return 0;
 383 }