1 /*
   2  * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "gc_implementation/shared/markSweep.inline.hpp"
  29 #include "gc_interface/collectedHeap.hpp"
  30 #include "gc_interface/collectedHeap.inline.hpp"
  31 #include "memory/genCollectedHeap.hpp"
  32 #include "memory/genOopClosures.inline.hpp"
  33 #include "oops/instanceRefKlass.hpp"
  34 #include "oops/oop.inline.hpp"
  35 #include "utilities/preserveException.hpp"
  36 #ifndef SERIALGC
  37 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  38 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  39 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  40 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  41 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  42 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  43 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  44 #include "oops/oop.pcgc.inline.hpp"
  45 #endif
  46 
  47 template <class T>
  48 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
  49   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
  50   T heap_oop = oopDesc::load_heap_oop(referent_addr);
  51   debug_only(
  52     if(TraceReferenceGC && PrintGCDetails) {
  53       gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
  54     }
  55   )
  56   if (!oopDesc::is_null(heap_oop)) {
  57     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
  58     if (!referent->is_gc_marked() &&
  59         MarkSweep::ref_processor()->
  60           discover_reference(obj, ref->reference_type())) {
  61       // reference already enqueued, referent will be traversed later
  62       ref->instanceKlass::oop_follow_contents(obj);
  63       debug_only(
  64         if(TraceReferenceGC && PrintGCDetails) {
  65           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
  66         }
  67       )
  68       return;
  69     } else {
  70       // treat referent as normal oop
  71       debug_only(
  72         if(TraceReferenceGC && PrintGCDetails) {
  73           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
  74         }
  75       )
  76       MarkSweep::mark_and_push(referent_addr);
  77     }
  78   }
  79   // treat next as normal oop.  next is a link in the pending list.
  80   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
  81   debug_only(
  82     if(TraceReferenceGC && PrintGCDetails) {
  83       gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
  84     }
  85   )
  86   MarkSweep::mark_and_push(next_addr);
  87   ref->instanceKlass::oop_follow_contents(obj);
  88 }
  89 
  90 void instanceRefKlass::oop_follow_contents(oop obj) {
  91   if (UseCompressedOops) {
  92     specialized_oop_follow_contents<narrowOop>(this, obj);
  93   } else {
  94     specialized_oop_follow_contents<oop>(this, obj);
  95   }
  96 }
  97 
  98 #ifndef SERIALGC
  99 template <class T>
 100 void specialized_oop_follow_contents(instanceRefKlass* ref,
 101                                      ParCompactionManager* cm,
 102                                      oop obj) {
 103   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 104   T heap_oop = oopDesc::load_heap_oop(referent_addr);
 105   debug_only(
 106     if(TraceReferenceGC && PrintGCDetails) {
 107       gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
 108     }
 109   )
 110   if (!oopDesc::is_null(heap_oop)) {
 111     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
 112     if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
 113         PSParallelCompact::ref_processor()->
 114           discover_reference(obj, ref->reference_type())) {
 115       // reference already enqueued, referent will be traversed later
 116       ref->instanceKlass::oop_follow_contents(cm, obj);
 117       debug_only(
 118         if(TraceReferenceGC && PrintGCDetails) {
 119           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, obj);
 120         }
 121       )
 122       return;
 123     } else {
 124       // treat referent as normal oop
 125       debug_only(
 126         if(TraceReferenceGC && PrintGCDetails) {
 127           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, obj);
 128         }
 129       )
 130       PSParallelCompact::mark_and_push(cm, referent_addr);
 131     }
 132   }
 133   // treat next as normal oop.  next is a link in the pending list.
 134   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 135   debug_only(
 136     if(TraceReferenceGC && PrintGCDetails) {
 137       gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
 138     }
 139   )
 140   PSParallelCompact::mark_and_push(cm, next_addr);
 141   ref->instanceKlass::oop_follow_contents(cm, obj);
 142 }
 143 
 144 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
 145                                            oop obj) {
 146   if (UseCompressedOops) {
 147     specialized_oop_follow_contents<narrowOop>(this, cm, obj);
 148   } else {
 149     specialized_oop_follow_contents<oop>(this, cm, obj);
 150   }
 151 }
 152 #endif // SERIALGC
 153 
 154 #ifdef ASSERT
 155 template <class T> void trace_reference_gc(const char *s, oop obj,
 156                                            T* referent_addr,
 157                                            T* next_addr,
 158                                            T* discovered_addr) {
 159   if(TraceReferenceGC && PrintGCDetails) {
 160     gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
 161     gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
 162          INTPTR_FORMAT, referent_addr,
 163          referent_addr ?
 164            (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
 165     gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
 166          INTPTR_FORMAT, next_addr,
 167          next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
 168     gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
 169          INTPTR_FORMAT, discovered_addr,
 170          discovered_addr ?
 171            (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
 172   }
 173 }
 174 #endif
 175 
 176 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
 177   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 178   MarkSweep::adjust_pointer(referent_addr);
 179   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 180   MarkSweep::adjust_pointer(next_addr);
 181   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 182   MarkSweep::adjust_pointer(discovered_addr);
 183   debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
 184                                 referent_addr, next_addr, discovered_addr);)
 185 }
 186 
 187 int instanceRefKlass::oop_adjust_pointers(oop obj) {
 188   int size = size_helper();
 189   instanceKlass::oop_adjust_pointers(obj);
 190 
 191   if (UseCompressedOops) {
 192     specialized_oop_adjust_pointers<narrowOop>(this, obj);
 193   } else {
 194     specialized_oop_adjust_pointers<oop>(this, obj);
 195   }
 196   return size;
 197 }
 198 
 199 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
 200   if (closure->apply_to_weak_ref_discovered_field()) {                          \
 201     T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);           \
 202     closure->do_oop##nv_suffix(disc_addr);                                      \
 203   }                                                                             \
 204                                                                                 \
 205   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
 206   T heap_oop = oopDesc::load_heap_oop(referent_addr);                           \
 207   if (!oopDesc::is_null(heap_oop) && contains(referent_addr)) {                 \
 208     ReferenceProcessor* rp = closure->_ref_processor;                           \
 209     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);                 \
 210     if (!referent->is_gc_marked() && (rp != NULL) &&                            \
 211         rp->discover_reference(obj, reference_type())) {                        \
 212       return size;                                                              \
 213     } else {                                                                    \
 214       /* treat referent as normal oop */                                        \
 215       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
 216       closure->do_oop##nv_suffix(referent_addr);                                \
 217     }                                                                           \
 218   }                                                                             \
 219   /* treat next as normal oop */                                                \
 220   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
 221   if (contains(next_addr)) {                                                    \
 222     SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
 223     closure->do_oop##nv_suffix(next_addr);                                      \
 224   }                                                                             \
 225   return size;                                                                  \
 226 
 227 
 228 template <class T> bool contains(T *t) { return true; }
 229 
 230 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
 231 // all closures.  Macros calling macros above for each oop size.
 232 
 233 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
 234                                                                                 \
 235 int instanceRefKlass::                                                          \
 236 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
 237   /* Get size before changing pointers */                                       \
 238   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 239                                                                                 \
 240   int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
 241                                                                                 \
 242   if (UseCompressedOops) {                                                      \
 243     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
 244   } else {                                                                      \
 245     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
 246   }                                                                             \
 247 }
 248 
 249 #ifndef SERIALGC
 250 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
 251                                                                                 \
 252 int instanceRefKlass::                                                          \
 253 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {        \
 254   /* Get size before changing pointers */                                       \
 255   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 256                                                                                 \
 257   int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
 258                                                                                 \
 259   if (UseCompressedOops) {                                                      \
 260     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
 261   } else {                                                                      \
 262     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
 263   }                                                                             \
 264 }
 265 #endif // !SERIALGC
 266 
 267 
 268 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
 269                                                                                 \
 270 int instanceRefKlass::                                                          \
 271 oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
 272                                OopClosureType* closure,                         \
 273                                MemRegion mr) {                                  \
 274   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
 275                                                                                 \
 276   int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
 277   if (UseCompressedOops) {                                                      \
 278     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
 279   } else {                                                                      \
 280     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
 281   }                                                                             \
 282 }
 283 
 284 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
 285 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
 286 #ifndef SERIALGC
 287 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 288 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 289 #endif // SERIALGC
 290 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
 291 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
 292 
 293 #ifndef SERIALGC
 294 template <class T>
 295 void specialized_oop_push_contents(instanceRefKlass *ref,
 296                                    PSPromotionManager* pm, oop obj) {
 297   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 298   if (PSScavenge::should_scavenge(referent_addr)) {
 299     ReferenceProcessor* rp = PSScavenge::reference_processor();
 300     if (rp->discover_reference(obj, ref->reference_type())) {
 301       // reference already enqueued, referent and next will be traversed later
 302       ref->instanceKlass::oop_push_contents(pm, obj);
 303       return;
 304     } else {
 305       // treat referent as normal oop
 306       pm->claim_or_forward_depth(referent_addr);
 307     }
 308   }
 309   // treat next as normal oop
 310   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 311   if (PSScavenge::should_scavenge(next_addr)) {
 312     pm->claim_or_forward_depth(next_addr);
 313   }
 314   ref->instanceKlass::oop_push_contents(pm, obj);
 315 }
 316 
 317 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
 318   if (UseCompressedOops) {
 319     specialized_oop_push_contents<narrowOop>(this, pm, obj);
 320   } else {
 321     specialized_oop_push_contents<oop>(this, pm, obj);
 322   }
 323 }
 324 
 325 template <class T>
 326 void specialized_oop_update_pointers(instanceRefKlass *ref,
 327                                     ParCompactionManager* cm, oop obj) {
 328   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
 329   PSParallelCompact::adjust_pointer(referent_addr);
 330   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
 331   PSParallelCompact::adjust_pointer(next_addr);
 332   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
 333   PSParallelCompact::adjust_pointer(discovered_addr);
 334   debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
 335                                 referent_addr, next_addr, discovered_addr);)
 336 }
 337 
 338 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
 339   instanceKlass::oop_update_pointers(cm, obj);
 340   if (UseCompressedOops) {
 341     specialized_oop_update_pointers<narrowOop>(this, cm, obj);
 342   } else {
 343     specialized_oop_update_pointers<oop>(this, cm, obj);
 344   }
 345   return size_helper();
 346 }
 347 
 348 
 349 template <class T> void
 350 specialized_oop_update_pointers(ParCompactionManager* cm, oop obj,
 351                                 HeapWord* beg_addr, HeapWord* end_addr) {
 352   T* p;
 353   T* referent_addr = p = (T*)java_lang_ref_Reference::referent_addr(obj);
 354   PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
 355   T* next_addr = p = (T*)java_lang_ref_Reference::next_addr(obj);
 356   PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
 357   T* discovered_addr = p = (T*)java_lang_ref_Reference::discovered_addr(obj);
 358   PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
 359   debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
 360                                 referent_addr, next_addr, discovered_addr);)
 361 }
 362 
 363 int
 364 instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
 365                                       HeapWord* beg_addr, HeapWord* end_addr) {
 366   instanceKlass::oop_update_pointers(cm, obj, beg_addr, end_addr);
 367   if (UseCompressedOops) {
 368     specialized_oop_update_pointers<narrowOop>(cm, obj, beg_addr, end_addr);
 369   } else {
 370     specialized_oop_update_pointers<oop>(cm, obj, beg_addr, end_addr);
 371   }
 372   return size_helper();
 373 }
 374 #endif // SERIALGC
 375 
 376 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
 377   // Clear the nonstatic oop-map entries corresponding to referent
 378   // and nextPending field.  They are treated specially by the
 379   // garbage collector.
 380   // The discovered field is used only by the garbage collector
 381   // and is also treated specially.
 382   instanceKlass* ik = instanceKlass::cast(k);
 383 
 384   // Check that we have the right class
 385   debug_only(static bool first_time = true);
 386   assert(k == SystemDictionary::Reference_klass() && first_time,
 387          "Invalid update of maps");
 388   debug_only(first_time = false);
 389   assert(ik->nonstatic_oop_map_count() == 1, "just checking");
 390 
 391   OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
 392 
 393   // Check that the current map is (2,4) - currently points at field with
 394   // offset 2 (words) and has 4 map entries.
 395   debug_only(int offset = java_lang_ref_Reference::referent_offset);
 396   debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
 397     java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
 398 
 399   if (UseSharedSpaces) {
 400     assert(map->offset() == java_lang_ref_Reference::queue_offset &&
 401            map->count() == 1, "just checking");
 402   } else {
 403     assert(map->offset() == offset && map->count() == count,
 404            "just checking");
 405 
 406     // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
 407     map->set_offset(java_lang_ref_Reference::queue_offset);
 408     map->set_count(1);
 409   }
 410 }
 411 
 412 
 413 // Verification
 414 
 415 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
 416   instanceKlass::oop_verify_on(obj, st);
 417   // Verify referent field
 418   oop referent = java_lang_ref_Reference::referent(obj);
 419 
 420   // We should make this general to all heaps
 421   GenCollectedHeap* gch = NULL;
 422   if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
 423     gch = GenCollectedHeap::heap();
 424 
 425   if (referent != NULL) {
 426     guarantee(referent->is_oop(), "referent field heap failed");
 427     if (gch != NULL && !gch->is_in_youngest(obj)) {
 428       // We do a specific remembered set check here since the referent
 429       // field is not part of the oop mask and therefore skipped by the
 430       // regular verify code.
 431       if (UseCompressedOops) {
 432         narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
 433         obj->verify_old_oop(referent_addr, true);
 434       } else {
 435         oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
 436         obj->verify_old_oop(referent_addr, true);
 437       }
 438     }
 439   }
 440   // Verify next field
 441   oop next = java_lang_ref_Reference::next(obj);
 442   if (next != NULL) {
 443     guarantee(next->is_oop(), "next field verify failed");
 444     guarantee(next->is_instanceRef(), "next field verify failed");
 445     if (gch != NULL && !gch->is_in_youngest(obj)) {
 446       // We do a specific remembered set check here since the next field is
 447       // not part of the oop mask and therefore skipped by the regular
 448       // verify code.
 449       if (UseCompressedOops) {
 450         narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
 451         obj->verify_old_oop(next_addr, true);
 452       } else {
 453         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
 454         obj->verify_old_oop(next_addr, true);
 455       }
 456     }
 457   }
 458 }
 459 
 460 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
 461   // we may enter this with pending exception set
 462   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
 463   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
 464   ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
 465   assert(ObjectSynchronizer::current_thread_holds_lock(
 466            JavaThread::current(), h_lock),
 467          "Locking should have succeeded");
 468   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
 469 }
 470 
 471 void instanceRefKlass::release_and_notify_pending_list_lock(
 472   BasicLock *pending_list_basic_lock) {
 473   // we may enter this with pending exception set
 474   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
 475   //
 476   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
 477   assert(ObjectSynchronizer::current_thread_holds_lock(
 478            JavaThread::current(), h_lock),
 479          "Lock should be held");
 480   // Notify waiters on pending lists lock if there is any reference.
 481   if (java_lang_ref_Reference::pending_list() != NULL) {
 482     ObjectSynchronizer::notifyall(h_lock, THREAD);
 483   }
 484   ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
 485   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
 486 }