src/share/vm/oops/instanceRefKlass.cpp

Print this page
rev 6796 : [mq]: templateOopIterate


  30 #include "gc_interface/collectedHeap.inline.hpp"
  31 #include "memory/genCollectedHeap.hpp"
  32 #include "memory/genOopClosures.inline.hpp"
  33 #include "oops/instanceRefKlass.hpp"
  34 #include "oops/oop.inline.hpp"
  35 #include "utilities/preserveException.hpp"
  36 #include "utilities/macros.hpp"
  37 #if INCLUDE_ALL_GCS
  38 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  39 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  40 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  41 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  42 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  43 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  44 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  45 #include "oops/oop.pcgc.inline.hpp"
  46 #endif // INCLUDE_ALL_GCS
  47 
  48 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  49 



  50 template <class T>
  51 void specialized_oop_follow_contents(InstanceRefKlass* ref, oop obj) {
  52   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
  53   T heap_oop = oopDesc::load_heap_oop(referent_addr);
  54   debug_only(
  55     if(TraceReferenceGC && PrintGCDetails) {
  56       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
  57     }
  58   )
  59   if (!oopDesc::is_null(heap_oop)) {
  60     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
  61     if (!referent->is_gc_marked() &&
  62         MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
  63       // reference was discovered, referent will be traversed later
  64       ref->InstanceKlass::oop_follow_contents(obj);
  65       debug_only(
  66         if(TraceReferenceGC && PrintGCDetails) {
  67           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
  68         }
  69       )


 226   MarkSweep::adjust_pointer(referent_addr);
 227   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 228   MarkSweep::adjust_pointer(next_addr);
 229   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 230   MarkSweep::adjust_pointer(discovered_addr);
 231   debug_only(trace_reference_gc("InstanceRefKlass::oop_adjust_pointers", obj,
 232                                 referent_addr, next_addr, discovered_addr);)
 233 }
 234 
 235 int InstanceRefKlass::oop_adjust_pointers(oop obj) {
 236   int size = size_helper();
 237   InstanceKlass::oop_adjust_pointers(obj);
 238 
 239   if (UseCompressedOops) {
 240     specialized_oop_adjust_pointers<narrowOop>(this, obj);
 241   } else {
 242     specialized_oop_adjust_pointers<oop>(this, obj);
 243   }
 244   return size;
 245 }
 246 
 247 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
 248   T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);             \
 249   if (closure->apply_to_weak_ref_discovered_field()) {                          \
 250     closure->do_oop##nv_suffix(disc_addr);                                      \
 251   }                                                                             \
 252                                                                                 \
 253   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
 254   T heap_oop = oopDesc::load_heap_oop(referent_addr);                           \
 255   ReferenceProcessor* rp = closure->_ref_processor;                             \
 256   if (!oopDesc::is_null(heap_oop)) {                                            \
 257     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);                 \
 258     if (!referent->is_gc_marked() && (rp != NULL) &&                            \
 259         rp->discover_reference(obj, reference_type())) {                        \
 260       return size;                                                              \
 261     } else if (contains(referent_addr)) {                                       \
 262       /* treat referent as normal oop */                                        \
 263       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
 264       closure->do_oop##nv_suffix(referent_addr);                                \
 265     }                                                                           \
 266   }                                                                             \
 267   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
 268   if (ReferenceProcessor::pending_list_uses_discovered_field()) {               \
 269     T next_oop  = oopDesc::load_heap_oop(next_addr);                            \
 270     /* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\
 271     if (!oopDesc::is_null(next_oop) && contains(disc_addr)) {                   \
 272         /* i.e. ref is not "active" */                                          \
 273       debug_only(                                                               \
 274         if(TraceReferenceGC && PrintGCDetails) {                                \
 275           gclog_or_tty->print_cr("   Process discovered as normal "             \
 276                                  INTPTR_FORMAT, disc_addr);                     \
 277         }                                                                       \
 278       )                                                                         \
 279       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
 280       closure->do_oop##nv_suffix(disc_addr);                                    \
 281     }                                                                           \
 282   } else {                                                                      \
 283     /* In the case of older JDKs which do not use the discovered field for  */  \
 284     /* the pending list, an inactive ref (next != NULL) must always have a  */  \
 285     /* NULL discovered field. */                                                \
 286     debug_only(                                                                 \
 287       T next_oop = oopDesc::load_heap_oop(next_addr);                           \
 288       T disc_oop = oopDesc::load_heap_oop(disc_addr);                           \
 289       assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop),          \
 290            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \
 291                    "discovered field", (oopDesc*)obj));                                   \
 292     )                                                                           \
 293   }                                                                             \
 294   /* treat next as normal oop */                                                \
 295   if (contains(next_addr)) {                                                    \
 296     SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
 297     closure->do_oop##nv_suffix(next_addr);                                      \
 298   }                                                                             \
 299   return size;                                                                  \
 300 
 301 
 302 template <class T> bool contains(T *t) { return true; }
 303 
 304 // Macro to define InstanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
 305 // all closures.  Macros calling macros above for each oop size.
 306 
 307 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
 308                                                                                 \
 309 int InstanceRefKlass::                                                          \
 310 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
 311   /* Get size before changing pointers */                                       \
 312   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 313                                                                                 \
 314   int size = InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
 315                                                                                 \
 316   if (UseCompressedOops) {                                                      \
 317     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
 318   } else {                                                                      \
 319     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
 320   }                                                                             \
 321 }
 322 
 323 #if INCLUDE_ALL_GCS
 324 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
 325                                                                                 \
 326 int InstanceRefKlass::                                                          \
 327 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {        \
 328   /* Get size before changing pointers */                                       \
 329   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 330                                                                                 \
 331   int size = InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
 332                                                                                 \
 333   if (UseCompressedOops) {                                                      \
 334     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
 335   } else {                                                                      \
 336     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
 337   }                                                                             \
 338 }
 339 #endif // INCLUDE_ALL_GCS
 340 
 341 
 342 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
 343                                                                                 \
 344 int InstanceRefKlass::                                                          \
 345 oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
 346                                OopClosureType* closure,                         \
 347                                MemRegion mr) {                                  \
 348   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 349                                                                                 \
 350   int size = InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
 351   if (UseCompressedOops) {                                                      \
 352     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
 353   } else {                                                                      \
 354     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
 355   }                                                                             \
 356 }
 357 
 358 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
 359 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
 360 #if INCLUDE_ALL_GCS
 361 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 362 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 363 #endif // INCLUDE_ALL_GCS
 364 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
 365 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
 366 
 367 #if INCLUDE_ALL_GCS
 368 template <class T>
 369 void specialized_oop_push_contents(InstanceRefKlass *ref,
 370                                    PSPromotionManager* pm, oop obj) {
 371   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 372   if (PSScavenge::should_scavenge(referent_addr)) {
 373     ReferenceProcessor* rp = PSScavenge::reference_processor();
 374     if (rp->discover_reference(obj, ref->reference_type())) {
 375       // reference already enqueued, referent and next will be traversed later
 376       ref->InstanceKlass::oop_push_contents(pm, obj);
 377       return;
 378     } else {
 379       // treat referent as normal oop
 380       pm->claim_or_forward_depth(referent_addr);
 381     }
 382   }
 383   // Treat discovered as normal oop, if ref is not "active",
 384   // i.e. if next is non-NULL.
 385   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);




  30 #include "gc_interface/collectedHeap.inline.hpp"
  31 #include "memory/genCollectedHeap.hpp"
  32 #include "memory/genOopClosures.inline.hpp"
  33 #include "oops/instanceRefKlass.hpp"
  34 #include "oops/oop.inline.hpp"
  35 #include "utilities/preserveException.hpp"
  36 #include "utilities/macros.hpp"
  37 #if INCLUDE_ALL_GCS
  38 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  39 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  40 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  41 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  42 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  43 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  44 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  45 #include "oops/oop.pcgc.inline.hpp"
  46 #endif // INCLUDE_ALL_GCS
  47 
  48 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  49 
  50 InstanceRefKlass::InstanceRefKlass(int vtable_len, int itable_len, int static_field_size, int nonstatic_oop_map_size, ReferenceType rt, AccessFlags access_flags, bool is_anonymous)
  51     : InstanceKlass(vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt, access_flags, is_anonymous, _instance_ref) {}
  52 
  53 template <class T>
  54 void specialized_oop_follow_contents(InstanceRefKlass* ref, oop obj) {
  55   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
  56   T heap_oop = oopDesc::load_heap_oop(referent_addr);
  57   debug_only(
  58     if(TraceReferenceGC && PrintGCDetails) {
  59       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
  60     }
  61   )
  62   if (!oopDesc::is_null(heap_oop)) {
  63     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
  64     if (!referent->is_gc_marked() &&
  65         MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
  66       // reference was discovered, referent will be traversed later
  67       ref->InstanceKlass::oop_follow_contents(obj);
  68       debug_only(
  69         if(TraceReferenceGC && PrintGCDetails) {
  70           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
  71         }
  72       )


 229   MarkSweep::adjust_pointer(referent_addr);
 230   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 231   MarkSweep::adjust_pointer(next_addr);
 232   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 233   MarkSweep::adjust_pointer(discovered_addr);
 234   debug_only(trace_reference_gc("InstanceRefKlass::oop_adjust_pointers", obj,
 235                                 referent_addr, next_addr, discovered_addr);)
 236 }
 237 
 238 int InstanceRefKlass::oop_adjust_pointers(oop obj) {
 239   int size = size_helper();
 240   InstanceKlass::oop_adjust_pointers(obj);
 241 
 242   if (UseCompressedOops) {
 243     specialized_oop_adjust_pointers<narrowOop>(this, obj);
 244   } else {
 245     specialized_oop_adjust_pointers<oop>(this, obj);
 246   }
 247   return size;
 248 }
























































































































 249 
 250 #if INCLUDE_ALL_GCS
 251 template <class T>
 252 void specialized_oop_push_contents(InstanceRefKlass *ref,
 253                                    PSPromotionManager* pm, oop obj) {
 254   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 255   if (PSScavenge::should_scavenge(referent_addr)) {
 256     ReferenceProcessor* rp = PSScavenge::reference_processor();
 257     if (rp->discover_reference(obj, ref->reference_type())) {
 258       // reference already enqueued, referent and next will be traversed later
 259       ref->InstanceKlass::oop_push_contents(pm, obj);
 260       return;
 261     } else {
 262       // treat referent as normal oop
 263       pm->claim_or_forward_depth(referent_addr);
 264     }
 265   }
 266   // Treat discovered as normal oop, if ref is not "active",
 267   // i.e. if next is non-NULL.
 268   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);