1 /*
   2  * Copyright (c) 1997, 2009, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 # include "incls/_precompiled.incl"
  26 # include "incls/_instanceRefKlass.cpp.incl"
  27 
  28 template <class T>
  29 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
  30   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
  31   T heap_oop = oopDesc::load_heap_oop(referent_addr);
  32   debug_only(
  33     if(TraceReferenceGC && PrintGCDetails) {
  34       gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
  35     }
  36   )
  37   if (!oopDesc::is_null(heap_oop)) {
  38     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
  39     if (!referent->is_gc_marked() &&
  40         MarkSweep::ref_processor()->
  41           discover_reference(obj, ref->reference_type())) {
  42       // reference already enqueued, referent will be traversed later
  43       ref->instanceKlass::oop_follow_contents(obj);
  44       debug_only(
  45         if(TraceReferenceGC && PrintGCDetails) {
  46           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
  47         }
  48       )
  49       return;
  50     } else {
  51       // treat referent as normal oop
  52       debug_only(
  53         if(TraceReferenceGC && PrintGCDetails) {
  54           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
  55         }
  56       )
  57       MarkSweep::mark_and_push(referent_addr);
  58     }
  59   }
  60   // treat next as normal oop.  next is a link in the pending list.
  61   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
  62   debug_only(
  63     if(TraceReferenceGC && PrintGCDetails) {
  64       gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
  65     }
  66   )
  67   MarkSweep::mark_and_push(next_addr);
  68   ref->instanceKlass::oop_follow_contents(obj);
  69 }
  70 
  71 void instanceRefKlass::oop_follow_contents(oop obj) {
  72   if (UseCompressedOops) {
  73     specialized_oop_follow_contents<narrowOop>(this, obj);
  74   } else {
  75     specialized_oop_follow_contents<oop>(this, obj);
  76   }
  77 }
  78 
  79 #ifndef SERIALGC
  80 template <class T>
  81 void specialized_oop_follow_contents(instanceRefKlass* ref,
  82                                      ParCompactionManager* cm,
  83                                      oop obj) {
  84   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
  85   T heap_oop = oopDesc::load_heap_oop(referent_addr);
  86   debug_only(
  87     if(TraceReferenceGC && PrintGCDetails) {
  88       gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
  89     }
  90   )
  91   if (!oopDesc::is_null(heap_oop)) {
  92     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
  93     if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
  94         PSParallelCompact::ref_processor()->
  95           discover_reference(obj, ref->reference_type())) {
  96       // reference already enqueued, referent will be traversed later
  97       ref->instanceKlass::oop_follow_contents(cm, obj);
  98       debug_only(
  99         if(TraceReferenceGC && PrintGCDetails) {
 100           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
 101         }
 102       )
 103       return;
 104     } else {
 105       // treat referent as normal oop
 106       debug_only(
 107         if(TraceReferenceGC && PrintGCDetails) {
 108           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
 109         }
 110       )
 111       PSParallelCompact::mark_and_push(cm, referent_addr);
 112     }
 113   }
 114   // treat next as normal oop.  next is a link in the pending list.
 115   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 116   debug_only(
 117     if(TraceReferenceGC && PrintGCDetails) {
 118       gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
 119     }
 120   )
 121   PSParallelCompact::mark_and_push(cm, next_addr);
 122   ref->instanceKlass::oop_follow_contents(cm, obj);
 123 }
 124 
 125 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
 126                                            oop obj) {
 127   if (UseCompressedOops) {
 128     specialized_oop_follow_contents<narrowOop>(this, cm, obj);
 129   } else {
 130     specialized_oop_follow_contents<oop>(this, cm, obj);
 131   }
 132 }
 133 #endif // SERIALGC
 134 
 135 #ifdef ASSERT
 136 template <class T> void trace_reference_gc(const char *s, oop obj,
 137                                            T* referent_addr,
 138                                            T* next_addr,
 139                                            T* discovered_addr) {
 140   if(TraceReferenceGC && PrintGCDetails) {
 141     gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
 142     gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
 143          INTPTR_FORMAT, referent_addr,
 144          referent_addr ?
 145            (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
 146     gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
 147          INTPTR_FORMAT, next_addr,
 148          next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
 149     gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
 150          INTPTR_FORMAT, discovered_addr,
 151          discovered_addr ?
 152            (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
 153   }
 154 }
 155 #endif
 156 
 157 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
 158   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 159   MarkSweep::adjust_pointer(referent_addr);
 160   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 161   MarkSweep::adjust_pointer(next_addr);
 162   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 163   MarkSweep::adjust_pointer(discovered_addr);
 164   debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
 165                                 referent_addr, next_addr, discovered_addr);)
 166 }
 167 
 168 int instanceRefKlass::oop_adjust_pointers(oop obj) {
 169   int size = size_helper();
 170   instanceKlass::oop_adjust_pointers(obj);
 171 
 172   if (UseCompressedOops) {
 173     specialized_oop_adjust_pointers<narrowOop>(this, obj);
 174   } else {
 175     specialized_oop_adjust_pointers<oop>(this, obj);
 176   }
 177   return size;
 178 }
 179 
 180 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
 181   if (closure->apply_to_weak_ref_discovered_field()) {                          \
 182     T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);           \
 183     closure->do_oop##nv_suffix(disc_addr);                                      \
 184   }                                                                             \
 185                                                                                 \
 186   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
 187   T heap_oop = oopDesc::load_heap_oop(referent_addr);                           \
 188   if (!oopDesc::is_null(heap_oop) && contains(referent_addr)) {                 \
 189     ReferenceProcessor* rp = closure->_ref_processor;                           \
 190     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);                 \
 191     if (!referent->is_gc_marked() && (rp != NULL) &&                            \
 192         rp->discover_reference(obj, reference_type())) {                        \
 193       return size;                                                              \
 194     } else {                                                                    \
 195       /* treat referent as normal oop */                                        \
 196       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
 197       closure->do_oop##nv_suffix(referent_addr);                                \
 198     }                                                                           \
 199   }                                                                             \
 200   /* treat next as normal oop */                                                \
 201   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
 202   if (contains(next_addr)) {                                                    \
 203     SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
 204     closure->do_oop##nv_suffix(next_addr);                                      \
 205   }                                                                             \
 206   return size;                                                                  \
 207 
 208 
 209 template <class T> bool contains(T *t) { return true; }
 210 
 211 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
 212 // all closures.  Macros calling macros above for each oop size.
 213 
 214 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
 215                                                                                 \
 216 int instanceRefKlass::                                                          \
 217 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
 218   /* Get size before changing pointers */                                       \
 219   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 220                                                                                 \
 221   int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
 222                                                                                 \
 223   if (UseCompressedOops) {                                                      \
 224     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
 225   } else {                                                                      \
 226     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
 227   }                                                                             \
 228 }
 229 
 230 #ifndef SERIALGC
 231 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
 232                                                                                 \
 233 int instanceRefKlass::                                                          \
 234 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {        \
 235   /* Get size before changing pointers */                                       \
 236   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 237                                                                                 \
 238   int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
 239                                                                                 \
 240   if (UseCompressedOops) {                                                      \
 241     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
 242   } else {                                                                      \
 243     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
 244   }                                                                             \
 245 }
 246 #endif // !SERIALGC
 247 
 248 
 249 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
 250                                                                                 \
 251 int instanceRefKlass::                                                          \
 252 oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
 253                                OopClosureType* closure,                         \
 254                                MemRegion mr) {                                  \
 255   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 256                                                                                 \
 257   int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
 258   if (UseCompressedOops) {                                                      \
 259     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
 260   } else {                                                                      \
 261     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
 262   }                                                                             \
 263 }
 264 
 265 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
 266 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
 267 #ifndef SERIALGC
 268 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 269 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 270 #endif // SERIALGC
 271 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
 272 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
 273 
 274 #ifndef SERIALGC
 275 template <class T>
 276 void specialized_oop_push_contents(instanceRefKlass *ref,
 277                                    PSPromotionManager* pm, oop obj) {
 278   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 279   if (PSScavenge::should_scavenge(referent_addr)) {
 280     ReferenceProcessor* rp = PSScavenge::reference_processor();
 281     if (rp->discover_reference(obj, ref->reference_type())) {
 282       // reference already enqueued, referent and next will be traversed later
 283       ref->instanceKlass::oop_push_contents(pm, obj);
 284       return;
 285     } else {
 286       // treat referent as normal oop
 287       pm->claim_or_forward_depth(referent_addr);
 288     }
 289   }
 290   // treat next as normal oop
 291   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 292   if (PSScavenge::should_scavenge(next_addr)) {
 293     pm->claim_or_forward_depth(next_addr);
 294   }
 295   ref->instanceKlass::oop_push_contents(pm, obj);
 296 }
 297 
 298 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
 299   if (UseCompressedOops) {
 300     specialized_oop_push_contents<narrowOop>(this, pm, obj);
 301   } else {
 302     specialized_oop_push_contents<oop>(this, pm, obj);
 303   }
 304 }
 305 
 306 template <class T>
 307 void specialized_oop_update_pointers(instanceRefKlass *ref,
 308                                     ParCompactionManager* cm, oop obj) {
 309   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 310   PSParallelCompact::adjust_pointer(referent_addr);
 311   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 312   PSParallelCompact::adjust_pointer(next_addr);
 313   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 314   PSParallelCompact::adjust_pointer(discovered_addr);
 315   debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
 316                                 referent_addr, next_addr, discovered_addr);)
 317 }
 318 
 319 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
 320   instanceKlass::oop_update_pointers(cm, obj);
 321   if (UseCompressedOops) {
 322     specialized_oop_update_pointers<narrowOop>(this, cm, obj);
 323   } else {
 324     specialized_oop_update_pointers<oop>(this, cm, obj);
 325   }
 326   return size_helper();
 327 }
 328 
 329 
 330 template <class T> void
 331 specialized_oop_update_pointers(ParCompactionManager* cm, oop obj,
 332                                 HeapWord* beg_addr, HeapWord* end_addr) {
 333   T* p;
 334   T* referent_addr = p = (T*)java_lang_ref_Reference::referent_addr(obj);
 335   PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
 336   T* next_addr = p = (T*)java_lang_ref_Reference::next_addr(obj);
 337   PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
 338   T* discovered_addr = p = (T*)java_lang_ref_Reference::discovered_addr(obj);
 339   PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
 340   debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
 341                                 referent_addr, next_addr, discovered_addr);)
 342 }
 343 
 344 int
 345 instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
 346                                       HeapWord* beg_addr, HeapWord* end_addr) {
 347   instanceKlass::oop_update_pointers(cm, obj, beg_addr, end_addr);
 348   if (UseCompressedOops) {
 349     specialized_oop_update_pointers<narrowOop>(cm, obj, beg_addr, end_addr);
 350   } else {
 351     specialized_oop_update_pointers<oop>(cm, obj, beg_addr, end_addr);
 352   }
 353   return size_helper();
 354 }
 355 #endif // SERIALGC
 356 
 357 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
 358   // Clear the nonstatic oop-map entries corresponding to referent
 359   // and nextPending field.  They are treated specially by the
 360   // garbage collector.
 361   // The discovered field is used only by the garbage collector
 362   // and is also treated specially.
 363   instanceKlass* ik = instanceKlass::cast(k);
 364 
 365   // Check that we have the right class
 366   debug_only(static bool first_time = true);
 367   assert(k == SystemDictionary::Reference_klass() && first_time,
 368          "Invalid update of maps");
 369   debug_only(first_time = false);
 370   assert(ik->nonstatic_oop_map_count() == 1, "just checking");
 371 
 372   OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
 373 
 374   // Check that the current map is (2,4) - currently points at field with
 375   // offset 2 (words) and has 4 map entries.
 376   debug_only(int offset = java_lang_ref_Reference::referent_offset);
 377   debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
 378     java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
 379 
 380   if (UseSharedSpaces) {
 381     assert(map->offset() == java_lang_ref_Reference::queue_offset &&
 382            map->count() == 1, "just checking");
 383   } else {
 384     assert(map->offset() == offset && map->count() == count,
 385            "just checking");
 386 
 387     // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
 388     map->set_offset(java_lang_ref_Reference::queue_offset);
 389     map->set_count(1);
 390   }
 391 }
 392 
 393 
 394 // Verification
 395 
 396 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
 397   instanceKlass::oop_verify_on(obj, st);
 398   // Verify referent field
 399   oop referent = java_lang_ref_Reference::referent(obj);
 400 
 401   // We should make this general to all heaps
 402   GenCollectedHeap* gch = NULL;
 403   if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
 404     gch = GenCollectedHeap::heap();
 405 
 406   if (referent != NULL) {
 407     guarantee(referent->is_oop(), "referent field heap failed");
 408     if (gch != NULL && !gch->is_in_youngest(obj)) {
 409       // We do a specific remembered set check here since the referent
 410       // field is not part of the oop mask and therefore skipped by the
 411       // regular verify code.
 412       if (UseCompressedOops) {
 413         narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
 414         obj->verify_old_oop(referent_addr, true);
 415       } else {
 416         oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
 417         obj->verify_old_oop(referent_addr, true);
 418       }
 419     }
 420   }
 421   // Verify next field
 422   oop next = java_lang_ref_Reference::next(obj);
 423   if (next != NULL) {
 424     guarantee(next->is_oop(), "next field verify failed");
 425     guarantee(next->is_instanceRef(), "next field verify failed");
 426     if (gch != NULL && !gch->is_in_youngest(obj)) {
 427       // We do a specific remembered set check here since the next field is
 428       // not part of the oop mask and therefore skipped by the regular
 429       // verify code.
 430       if (UseCompressedOops) {
 431         narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
 432         obj->verify_old_oop(next_addr, true);
 433       } else {
 434         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
 435         obj->verify_old_oop(next_addr, true);
 436       }
 437     }
 438   }
 439 }
 440 
 441 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
 442   // we may enter this with pending exception set
 443   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
 444   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
 445   ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
 446   assert(ObjectSynchronizer::current_thread_holds_lock(
 447            JavaThread::current(), h_lock),
 448          "Locking should have succeeded");
 449   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
 450 }
 451 
 452 void instanceRefKlass::release_and_notify_pending_list_lock(
 453   BasicLock *pending_list_basic_lock) {
 454   // we may enter this with pending exception set
 455   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
 456   //
 457   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
 458   assert(ObjectSynchronizer::current_thread_holds_lock(
 459            JavaThread::current(), h_lock),
 460          "Lock should be held");
 461   // Notify waiters on pending lists lock if there is any reference.
 462   if (java_lang_ref_Reference::pending_list() != NULL) {
 463     ObjectSynchronizer::notifyall(h_lock, THREAD);
 464   }
 465   ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
 466   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
 467 }