1 /*
   2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "gc_implementation/shared/markSweep.inline.hpp"
  29 #include "gc_interface/collectedHeap.hpp"
  30 #include "gc_interface/collectedHeap.inline.hpp"
  31 #include "memory/genCollectedHeap.hpp"
  32 #include "memory/genOopClosures.inline.hpp"
  33 #include "oops/instanceRefKlass.hpp"
  34 #include "oops/oop.inline.hpp"
  35 #include "utilities/preserveException.hpp"
  36 #include "utilities/macros.hpp"
  37 #if INCLUDE_ALL_GCS
  38 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  39 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  40 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  41 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  42 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  43 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  44 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  45 #include "oops/oop.pcgc.inline.hpp"
  46 #endif // INCLUDE_ALL_GCS
  47 
  48 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  49 
  50 InstanceRefKlass::InstanceRefKlass(int vtable_len, int itable_len, int static_field_size, int nonstatic_oop_map_size, ReferenceType rt, AccessFlags access_flags, bool is_anonymous)
  51     : InstanceKlass(vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt, access_flags, is_anonymous, _instance_ref) {}
  52 
  53 template <class T>
  54 void specialized_oop_follow_contents(InstanceRefKlass* ref, oop obj) {
  55   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
  56   T heap_oop = oopDesc::load_heap_oop(referent_addr);
  57   debug_only(
  58     if(TraceReferenceGC && PrintGCDetails) {
  59       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
  60     }
  61   )
  62   if (!oopDesc::is_null(heap_oop)) {
  63     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
  64     if (!referent->is_gc_marked() &&
  65         MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
  66       // reference was discovered, referent will be traversed later
  67       ref->InstanceKlass::oop_follow_contents(obj);
  68       debug_only(
  69         if(TraceReferenceGC && PrintGCDetails) {
  70           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
  71         }
  72       )
  73       return;
  74     } else {
  75       // treat referent as normal oop
  76       debug_only(
  77         if(TraceReferenceGC && PrintGCDetails) {
  78           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
  79         }
  80       )
  81       MarkSweep::mark_and_push(referent_addr);
  82     }
  83   }
  84   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
  85   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
  86     // Treat discovered as normal oop, if ref is not "active",
  87     // i.e. if next is non-NULL.
  88     T  next_oop = oopDesc::load_heap_oop(next_addr);
  89     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
  90       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
  91       debug_only(
  92         if(TraceReferenceGC && PrintGCDetails) {
  93           gclog_or_tty->print_cr("   Process discovered as normal "
  94                                  INTPTR_FORMAT, discovered_addr);
  95         }
  96       )
  97       MarkSweep::mark_and_push(discovered_addr);
  98     }
  99   } else {
 100 #ifdef ASSERT
 101     // In the case of older JDKs which do not use the discovered
 102     // field for the pending list, an inactive ref (next != NULL)
 103     // must always have a NULL discovered field.
 104     oop next = oopDesc::load_decode_heap_oop(next_addr);
 105     oop discovered = java_lang_ref_Reference::discovered(obj);
 106     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
 107            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
 108                    (oopDesc*)obj));
 109 #endif
 110   }
 111   // treat next as normal oop.  next is a link in the reference queue.
 112   debug_only(
 113     if(TraceReferenceGC && PrintGCDetails) {
 114       gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
 115     }
 116   )
 117   MarkSweep::mark_and_push(next_addr);
 118   ref->InstanceKlass::oop_follow_contents(obj);
 119 }
 120 
 121 void InstanceRefKlass::oop_follow_contents(oop obj) {
 122   if (UseCompressedOops) {
 123     specialized_oop_follow_contents<narrowOop>(this, obj);
 124   } else {
 125     specialized_oop_follow_contents<oop>(this, obj);
 126   }
 127 }
 128 
 129 #if INCLUDE_ALL_GCS
 130 template <class T>
 131 void specialized_oop_follow_contents(InstanceRefKlass* ref,
 132                                      ParCompactionManager* cm,
 133                                      oop obj) {
 134   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 135   T heap_oop = oopDesc::load_heap_oop(referent_addr);
 136   debug_only(
 137     if(TraceReferenceGC && PrintGCDetails) {
 138       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
 139     }
 140   )
 141   if (!oopDesc::is_null(heap_oop)) {
 142     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
 143     if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
 144         PSParallelCompact::ref_processor()->
 145           discover_reference(obj, ref->reference_type())) {
 146       // reference already enqueued, referent will be traversed later
 147       ref->InstanceKlass::oop_follow_contents(cm, obj);
 148       debug_only(
 149         if(TraceReferenceGC && PrintGCDetails) {
 150           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
 151         }
 152       )
 153       return;
 154     } else {
 155       // treat referent as normal oop
 156       debug_only(
 157         if(TraceReferenceGC && PrintGCDetails) {
 158           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
 159         }
 160       )
 161       PSParallelCompact::mark_and_push(cm, referent_addr);
 162     }
 163   }
 164   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 165   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
 166     // Treat discovered as normal oop, if ref is not "active",
 167     // i.e. if next is non-NULL.
 168     T  next_oop = oopDesc::load_heap_oop(next_addr);
 169     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
 170       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 171       debug_only(
 172         if(TraceReferenceGC && PrintGCDetails) {
 173           gclog_or_tty->print_cr("   Process discovered as normal "
 174                                  INTPTR_FORMAT, discovered_addr);
 175         }
 176       )
 177       PSParallelCompact::mark_and_push(cm, discovered_addr);
 178     }
 179   } else {
 180 #ifdef ASSERT
 181     // In the case of older JDKs which do not use the discovered
 182     // field for the pending list, an inactive ref (next != NULL)
 183     // must always have a NULL discovered field.
 184     T next = oopDesc::load_heap_oop(next_addr);
 185     oop discovered = java_lang_ref_Reference::discovered(obj);
 186     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
 187            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
 188                    (oopDesc*)obj));
 189 #endif
 190   }
 191   PSParallelCompact::mark_and_push(cm, next_addr);
 192   ref->InstanceKlass::oop_follow_contents(cm, obj);
 193 }
 194 
 195 void InstanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
 196                                            oop obj) {
 197   if (UseCompressedOops) {
 198     specialized_oop_follow_contents<narrowOop>(this, cm, obj);
 199   } else {
 200     specialized_oop_follow_contents<oop>(this, cm, obj);
 201   }
 202 }
 203 #endif // INCLUDE_ALL_GCS
 204 
 205 #ifdef ASSERT
 206 template <class T> void trace_reference_gc(const char *s, oop obj,
 207                                            T* referent_addr,
 208                                            T* next_addr,
 209                                            T* discovered_addr) {
 210   if(TraceReferenceGC && PrintGCDetails) {
 211     gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
 212     gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
 213          INTPTR_FORMAT, referent_addr,
 214          referent_addr ?
 215            (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
 216     gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
 217          INTPTR_FORMAT, next_addr,
 218          next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
 219     gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
 220          INTPTR_FORMAT, discovered_addr,
 221          discovered_addr ?
 222            (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
 223   }
 224 }
 225 #endif
 226 
 227 template <class T> void specialized_oop_adjust_pointers(InstanceRefKlass *ref, oop obj) {
 228   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 229   MarkSweep::adjust_pointer(referent_addr);
 230   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 231   MarkSweep::adjust_pointer(next_addr);
 232   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 233   MarkSweep::adjust_pointer(discovered_addr);
 234   debug_only(trace_reference_gc("InstanceRefKlass::oop_adjust_pointers", obj,
 235                                 referent_addr, next_addr, discovered_addr);)
 236 }
 237 
 238 int InstanceRefKlass::oop_adjust_pointers(oop obj) {
 239   int size = size_helper();
 240   InstanceKlass::oop_adjust_pointers(obj);
 241 
 242   if (UseCompressedOops) {
 243     specialized_oop_adjust_pointers<narrowOop>(this, obj);
 244   } else {
 245     specialized_oop_adjust_pointers<oop>(this, obj);
 246   }
 247   return size;
 248 }
 249 
 250 #if INCLUDE_ALL_GCS
 251 template <class T>
 252 void specialized_oop_push_contents(InstanceRefKlass *ref,
 253                                    PSPromotionManager* pm, oop obj) {
 254   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 255   if (PSScavenge::should_scavenge(referent_addr)) {
 256     ReferenceProcessor* rp = PSScavenge::reference_processor();
 257     if (rp->discover_reference(obj, ref->reference_type())) {
 258       // reference already enqueued, referent and next will be traversed later
 259       ref->InstanceKlass::oop_push_contents(pm, obj);
 260       return;
 261     } else {
 262       // treat referent as normal oop
 263       pm->claim_or_forward_depth(referent_addr);
 264     }
 265   }
 266   // Treat discovered as normal oop, if ref is not "active",
 267   // i.e. if next is non-NULL.
 268   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 269   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
 270     T  next_oop = oopDesc::load_heap_oop(next_addr);
 271     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
 272       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 273       debug_only(
 274         if(TraceReferenceGC && PrintGCDetails) {
 275           gclog_or_tty->print_cr("   Process discovered as normal "
 276                                  INTPTR_FORMAT, discovered_addr);
 277         }
 278       )
 279       if (PSScavenge::should_scavenge(discovered_addr)) {
 280         pm->claim_or_forward_depth(discovered_addr);
 281       }
 282     }
 283   } else {
 284 #ifdef ASSERT
 285     // In the case of older JDKs which do not use the discovered
 286     // field for the pending list, an inactive ref (next != NULL)
 287     // must always have a NULL discovered field.
 288     oop next = oopDesc::load_decode_heap_oop(next_addr);
 289     oop discovered = java_lang_ref_Reference::discovered(obj);
 290     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
 291            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
 292                    (oopDesc*)obj));
 293 #endif
 294   }
 295 
 296   // Treat next as normal oop;  next is a link in the reference queue.
 297   if (PSScavenge::should_scavenge(next_addr)) {
 298     pm->claim_or_forward_depth(next_addr);
 299   }
 300   ref->InstanceKlass::oop_push_contents(pm, obj);
 301 }
 302 
 303 void InstanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
 304   if (UseCompressedOops) {
 305     specialized_oop_push_contents<narrowOop>(this, pm, obj);
 306   } else {
 307     specialized_oop_push_contents<oop>(this, pm, obj);
 308   }
 309 }
 310 
 311 template <class T>
 312 void specialized_oop_update_pointers(InstanceRefKlass *ref,
 313                                     ParCompactionManager* cm, oop obj) {
 314   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 315   PSParallelCompact::adjust_pointer(referent_addr);
 316   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 317   PSParallelCompact::adjust_pointer(next_addr);
 318   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 319   PSParallelCompact::adjust_pointer(discovered_addr);
 320   debug_only(trace_reference_gc("InstanceRefKlass::oop_update_ptrs", obj,
 321                                 referent_addr, next_addr, discovered_addr);)
 322 }
 323 
 324 int InstanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
 325   InstanceKlass::oop_update_pointers(cm, obj);
 326   if (UseCompressedOops) {
 327     specialized_oop_update_pointers<narrowOop>(this, cm, obj);
 328   } else {
 329     specialized_oop_update_pointers<oop>(this, cm, obj);
 330   }
 331   return size_helper();
 332 }
 333 #endif // INCLUDE_ALL_GCS
 334 
 335 void InstanceRefKlass::update_nonstatic_oop_maps(Klass* k) {
 336   // Clear the nonstatic oop-map entries corresponding to referent
 337   // and nextPending field.  They are treated specially by the
 338   // garbage collector.
 339   // The discovered field is used only by the garbage collector
 340   // and is also treated specially.
 341   InstanceKlass* ik = InstanceKlass::cast(k);
 342 
 343   // Check that we have the right class
 344   debug_only(static bool first_time = true);
 345   assert(k == SystemDictionary::Reference_klass() && first_time,
 346          "Invalid update of maps");
 347   debug_only(first_time = false);
 348   assert(ik->nonstatic_oop_map_count() == 1, "just checking");
 349 
 350   OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
 351 
 352   // Check that the current map is (2,4) - currently points at field with
 353   // offset 2 (words) and has 4 map entries.
 354   debug_only(int offset = java_lang_ref_Reference::referent_offset);
 355   debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
 356     java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
 357 
 358   if (UseSharedSpaces) {
 359     assert(map->offset() == java_lang_ref_Reference::queue_offset &&
 360            map->count() == 1, "just checking");
 361   } else {
 362     assert(map->offset() == offset && map->count() == count,
 363            "just checking");
 364 
 365     // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
 366     map->set_offset(java_lang_ref_Reference::queue_offset);
 367     map->set_count(1);
 368   }
 369 }
 370 
 371 
 372 // Verification
 373 
 374 void InstanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
 375   InstanceKlass::oop_verify_on(obj, st);
 376   // Verify referent field
 377   oop referent = java_lang_ref_Reference::referent(obj);
 378 
 379   // We should make this general to all heaps
 380   GenCollectedHeap* gch = NULL;
 381   if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
 382     gch = GenCollectedHeap::heap();
 383 
 384   if (referent != NULL) {
 385     guarantee(referent->is_oop(), "referent field heap failed");
 386   }
 387   // Verify next field
 388   oop next = java_lang_ref_Reference::next(obj);
 389   if (next != NULL) {
 390     guarantee(next->is_oop(), "next field verify failed");
 391     guarantee(next->is_instanceRef(), "next field verify failed");
 392   }
 393 }
 394 
 395 bool InstanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
 396   if (java_lang_ref_Reference::pending_list_lock() == NULL) return false;
 397   Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
 398   return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
 399 }
 400 
 401 void InstanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
 402   // we may enter this with pending exception set
 403   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
 404 
 405   // Create a HandleMark in case we retry a GC multiple times.
 406   // Each time we attempt the GC, we allocate the handle below
 407   // to hold the pending list lock. We want to free this handle.
 408   HandleMark hm;
 409 
 410   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
 411   ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
 412   assert(ObjectSynchronizer::current_thread_holds_lock(
 413            JavaThread::current(), h_lock),
 414          "Locking should have succeeded");
 415   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
 416 }
 417 
 418 void InstanceRefKlass::release_and_notify_pending_list_lock(
 419   BasicLock *pending_list_basic_lock) {
 420   // we may enter this with pending exception set
 421   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
 422 
 423   // Create a HandleMark in case we retry a GC multiple times.
 424   // Each time we attempt the GC, we allocate the handle below
 425   // to hold the pending list lock. We want to free this handle.
 426   HandleMark hm;
 427 
 428   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
 429   assert(ObjectSynchronizer::current_thread_holds_lock(
 430            JavaThread::current(), h_lock),
 431          "Lock should be held");
 432   // Notify waiters on pending lists lock if there is any reference.
 433   if (java_lang_ref_Reference::pending_list() != NULL) {
 434     ObjectSynchronizer::notifyall(h_lock, THREAD);
 435   }
 436   ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
 437   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
 438 }