1 /*
   2  * Copyright (c) 1998, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "gc/shared/oopStorage.inline.hpp"
  27 #include "logging/log.hpp"
  28 #include "memory/iterator.hpp"
  29 #include "oops/access.inline.hpp"
  30 #include "oops/oop.inline.hpp"
  31 #include "runtime/handles.inline.hpp"
  32 #include "runtime/jniHandles.inline.hpp"
  33 #include "runtime/mutexLocker.hpp"
  34 #include "runtime/thread.inline.hpp"
  35 #include "trace/traceMacros.hpp"
  36 #include "utilities/align.hpp"
  37 #include "utilities/debug.hpp"
  38 
  39 OopStorage* JNIHandles::_global_handles = NULL;
  40 OopStorage* JNIHandles::_weak_global_handles = NULL;
  41 
  42 
  43 jobject JNIHandles::make_local(oop obj) {
  44   if (obj == NULL) {
  45     return NULL;                // ignore null handles
  46   } else {
  47     Thread* thread = Thread::current();
  48     assert(oopDesc::is_oop(obj), "not an oop");
  49     assert(!current_thread_in_native(), "must not be in native");
  50     return thread->active_handles()->allocate_handle(obj);
  51   }
  52 }
  53 
  54 
  55 // optimized versions
  56 
  57 jobject JNIHandles::make_local(Thread* thread, oop obj) {
  58   if (obj == NULL) {
  59     return NULL;                // ignore null handles
  60   } else {
  61     assert(oopDesc::is_oop(obj), "not an oop");
  62     assert(thread->is_Java_thread(), "not a Java thread");
  63     assert(!current_thread_in_native(), "must not be in native");
  64     return thread->active_handles()->allocate_handle(obj);
  65   }
  66 }
  67 
  68 
  69 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
  70   if (obj == NULL) {
  71     return NULL;                // ignore null handles
  72   } else {
  73     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
  74     assert(oopDesc::is_oop(obj), "not an oop");
  75     assert(!current_thread_in_native(), "must not be in native");
  76     return thread->active_handles()->allocate_handle(obj);
  77   }
  78 }
  79 
  80 
  81 static void report_handle_allocation_failure(AllocFailType alloc_failmode,
  82                                              const char* handle_kind) {
  83   if (alloc_failmode == AllocFailStrategy::EXIT_OOM) {
  84     // Fake size value, since we don't know the min allocation size here.
  85     vm_exit_out_of_memory(sizeof(oop), OOM_MALLOC_ERROR,
  86                           "Cannot create %s JNI handle", handle_kind);
  87   } else {
  88     assert(alloc_failmode == AllocFailStrategy::RETURN_NULL, "invariant");
  89   }
  90 }
  91 
  92 jobject JNIHandles::make_global(Handle obj, AllocFailType alloc_failmode) {
  93   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
  94   assert(!current_thread_in_native(), "must not be in native");
  95   jobject res = NULL;
  96   if (!obj.is_null()) {
  97     // ignore null handles
  98     assert(oopDesc::is_oop(obj()), "not an oop");
  99     oop* ptr = _global_handles->allocate();
 100     // Return NULL on allocation failure.
 101     if (ptr != NULL) {
 102       *ptr = obj();
 103       res = reinterpret_cast<jobject>(ptr);
 104     } else {
 105       report_handle_allocation_failure(alloc_failmode, "global");
 106     }
 107   } else {
 108     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 109   }
 110 
 111   return res;
 112 }
 113 
 114 
 115 jobject JNIHandles::make_weak_global(Handle obj, AllocFailType alloc_failmode) {
 116   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
 117   assert(!current_thread_in_native(), "must not be in native");
 118   jobject res = NULL;
 119   if (!obj.is_null()) {
 120     // ignore null handles
 121     assert(oopDesc::is_oop(obj()), "not an oop");
 122     oop* ptr = _weak_global_handles->allocate();
 123     // Return NULL on allocation failure.
 124     if (ptr != NULL) {
 125       *ptr = obj();
 126       char* tptr = reinterpret_cast<char*>(ptr) + weak_tag_value;
 127       res = reinterpret_cast<jobject>(tptr);
 128     } else {
 129       report_handle_allocation_failure(alloc_failmode, "weak global");
 130     }
 131   } else {
 132     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 133   }
 134   return res;
 135 }
 136 
 137 // Resolve some erroneous cases to NULL, rather than treating them as
 138 // possibly unchecked errors.  In particular, deleted handles are
 139 // treated as NULL (though a deleted and later reallocated handle
 140 // isn't detected).
 141 oop JNIHandles::resolve_external_guard(jobject handle) {
 142   oop result = NULL;
 143   if (handle != NULL) {
 144     result = resolve_impl<true /* external_guard */ >(handle);
 145   }
 146   return result;
 147 }
 148 
 149 oop JNIHandles::resolve_jweak(jweak handle) {
 150   assert(handle != NULL, "precondition");
 151   assert(is_jweak(handle), "precondition");
 152   char* ptr = reinterpret_cast<char*>(handle) - weak_tag_value;
 153   oop* oopptr = reinterpret_cast<oop*>(ptr);
 154   return RootAccess<ON_PHANTOM_OOP_REF>::oop_load(oopptr);
 155 }
 156 
 157 bool JNIHandles::is_global_weak_cleared(jweak handle) {
 158   assert(handle != NULL, "precondition");
 159   assert(is_jweak(handle), "not a weak handle");
 160   return jweak_ref(handle) == NULL;
 161 }
 162 
 163 void JNIHandles::destroy_global(jobject handle) {
 164   if (handle != NULL) {
 165     assert(!is_jweak(handle), "wrong method for detroying jweak");
 166     jobject_ref(handle) = NULL;
 167     _global_handles->release(&jobject_ref(handle));
 168   }
 169 }
 170 
 171 
 172 void JNIHandles::destroy_weak_global(jobject handle) {
 173   if (handle != NULL) {
 174     assert(is_jweak(handle), "JNI handle not jweak");
 175     jweak_ref(handle) = NULL;
 176     _weak_global_handles->release(&jweak_ref(handle));
 177   }
 178 }
 179 
 180 
 181 void JNIHandles::oops_do(OopClosure* f) {
 182   _global_handles->oops_do(f);
 183 }
 184 
 185 
 186 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
 187   _weak_global_handles->weak_oops_do(is_alive, f);
 188 }
 189 
 190 
 191 void JNIHandles::weak_oops_do(OopClosure* f) {
 192   _weak_global_handles->weak_oops_do(f);
 193 }
 194 
 195 
 196 void JNIHandles::initialize() {
 197   _global_handles = new OopStorage("JNI Global",
 198                                    JNIGlobalAlloc_lock,
 199                                    JNIGlobalActive_lock);
 200   _weak_global_handles = new OopStorage("JNI Weak",
 201                                         JNIWeakAlloc_lock,
 202                                         JNIWeakActive_lock);
 203 }
 204 
 205 
 206 inline bool is_storage_handle(const OopStorage* storage, const oop* ptr) {
 207   return storage->allocation_status(ptr) == OopStorage::ALLOCATED_ENTRY;
 208 }
 209 
 210 
 211 jobjectRefType JNIHandles::handle_type(Thread* thread, jobject handle) {
 212   assert(handle != NULL, "precondition");
 213   jobjectRefType result = JNIInvalidRefType;
 214   if (is_jweak(handle)) {
 215     if (is_storage_handle(_weak_global_handles, &jweak_ref(handle))) {
 216       result = JNIWeakGlobalRefType;
 217     }
 218   } else {
 219     switch (_global_handles->allocation_status(&jobject_ref(handle))) {
 220     case OopStorage::ALLOCATED_ENTRY:
 221       result = JNIGlobalRefType;
 222       break;
 223 
 224     case OopStorage::UNALLOCATED_ENTRY:
 225       break;                    // Invalid global handle
 226 
 227     case OopStorage::INVALID_ENTRY:
 228       // Not in global storage.  Might be a local handle.
 229       if (is_local_handle(thread, handle) ||
 230           (thread->is_Java_thread() &&
 231            is_frame_handle((JavaThread*)thread, handle))) {
 232         result = JNILocalRefType;
 233       }
 234       break;
 235 
 236     default:
 237       ShouldNotReachHere();
 238     }
 239   }
 240   return result;
 241 }
 242 
 243 
 244 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
 245   assert(handle != NULL, "precondition");
 246   JNIHandleBlock* block = thread->active_handles();
 247 
 248   // Look back past possible native calls to jni_PushLocalFrame.
 249   while (block != NULL) {
 250     if (block->chain_contains(handle)) {
 251       return true;
 252     }
 253     block = block->pop_frame_link();
 254   }
 255   return false;
 256 }
 257 
 258 
 259 // Determine if the handle is somewhere in the current thread's stack.
 260 // We easily can't isolate any particular stack frame the handle might
 261 // come from, so we'll check the whole stack.
 262 
 263 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject handle) {
 264   assert(handle != NULL, "precondition");
 265   // If there is no java frame, then this must be top level code, such
 266   // as the java command executable, in which case, this type of handle
 267   // is not permitted.
 268   return (thr->has_last_Java_frame() &&
 269          (void*)handle < (void*)thr->stack_base() &&
 270          (void*)handle >= (void*)thr->last_Java_sp());
 271 }
 272 
 273 
 274 bool JNIHandles::is_global_handle(jobject handle) {
 275   assert(handle != NULL, "precondition");
 276   return !is_jweak(handle) && is_storage_handle(_global_handles, &jobject_ref(handle));
 277 }
 278 
 279 
 280 bool JNIHandles::is_weak_global_handle(jobject handle) {
 281   assert(handle != NULL, "precondition");
 282   return is_jweak(handle) && is_storage_handle(_weak_global_handles, &jweak_ref(handle));
 283 }
 284 
 285 size_t JNIHandles::global_handle_memory_usage() {
 286   return _global_handles->total_memory_usage();
 287 }
 288 
 289 size_t JNIHandles::weak_global_handle_memory_usage() {
 290   return _weak_global_handles->total_memory_usage();
 291 }
 292 
 293 
 294 // We assume this is called at a safepoint: no lock is needed.
 295 void JNIHandles::print_on(outputStream* st) {
 296   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
 297   assert(_global_handles != NULL && _weak_global_handles != NULL,
 298          "JNIHandles not initialized");
 299 
 300   st->print_cr("JNI global refs: " SIZE_FORMAT ", weak refs: " SIZE_FORMAT,
 301                _global_handles->allocation_count(),
 302                _weak_global_handles->allocation_count());
 303   st->cr();
 304   st->flush();
 305 }
 306 
 307 class VerifyJNIHandles: public OopClosure {
 308 public:
 309   virtual void do_oop(oop* root) {
 310     (*root)->verify();
 311   }
 312   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
 313 };
 314 
 315 void JNIHandles::verify() {
 316   VerifyJNIHandles verify_handle;
 317 
 318   oops_do(&verify_handle);
 319   weak_oops_do(&verify_handle);
 320 }
 321 
 322 // This method is implemented here to avoid circular includes between
 323 // jniHandles.hpp and thread.hpp.
 324 bool JNIHandles::current_thread_in_native() {
 325   Thread* thread = Thread::current();
 326   return (thread->is_Java_thread() &&
 327           JavaThread::current()->thread_state() == _thread_in_native);
 328 }
 329 
 330 
 331 void jni_handles_init() {
 332   JNIHandles::initialize();
 333 }
 334 
 335 
 336 int             JNIHandleBlock::_blocks_allocated     = 0;
 337 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
 338 #ifndef PRODUCT
 339 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
 340 #endif
 341 
 342 
 343 #ifdef ASSERT
 344 void JNIHandleBlock::zap() {
 345   // Zap block values
 346   _top = 0;
 347   for (int index = 0; index < block_size_in_oops; index++) {
 348     _handles[index] = NULL;
 349   }
 350 }
 351 #endif // ASSERT
 352 
 353 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
 354   assert(thread == NULL || thread == Thread::current(), "sanity check");
 355   JNIHandleBlock* block;
 356   // Check the thread-local free list for a block so we don't
 357   // have to acquire a mutex.
 358   if (thread != NULL && thread->free_handle_block() != NULL) {
 359     block = thread->free_handle_block();
 360     thread->set_free_handle_block(block->_next);
 361   }
 362   else {
 363     // locking with safepoint checking introduces a potential deadlock:
 364     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 365     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 366     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 367     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 368                      Mutex::_no_safepoint_check_flag);
 369     if (_block_free_list == NULL) {
 370       // Allocate new block
 371       block = new JNIHandleBlock();
 372       _blocks_allocated++;
 373       block->zap();
 374       #ifndef PRODUCT
 375       // Link new block to list of all allocated blocks
 376       block->_block_list_link = _block_list;
 377       _block_list = block;
 378       #endif
 379     } else {
 380       // Get block from free list
 381       block = _block_free_list;
 382       _block_free_list = _block_free_list->_next;
 383     }
 384   }
 385   block->_top = 0;
 386   block->_next = NULL;
 387   block->_pop_frame_link = NULL;
 388   block->_planned_capacity = block_size_in_oops;
 389   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
 390   debug_only(block->_last = NULL);
 391   debug_only(block->_free_list = NULL);
 392   debug_only(block->_allocate_before_rebuild = -1);
 393   return block;
 394 }
 395 
 396 
 397 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
 398   assert(thread == NULL || thread == Thread::current(), "sanity check");
 399   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
 400   // Put returned block at the beginning of the thread-local free list.
 401   // Note that if thread == NULL, we use it as an implicit argument that
 402   // we _don't_ want the block to be kept on the free_handle_block.
 403   // See for instance JavaThread::exit().
 404   if (thread != NULL ) {
 405     block->zap();
 406     JNIHandleBlock* freelist = thread->free_handle_block();
 407     block->_pop_frame_link = NULL;
 408     thread->set_free_handle_block(block);
 409 
 410     // Add original freelist to end of chain
 411     if ( freelist != NULL ) {
 412       while ( block->_next != NULL ) block = block->_next;
 413       block->_next = freelist;
 414     }
 415     block = NULL;
 416   }
 417   if (block != NULL) {
 418     // Return blocks to free list
 419     // locking with safepoint checking introduces a potential deadlock:
 420     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 421     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 422     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 423     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 424                      Mutex::_no_safepoint_check_flag);
 425     while (block != NULL) {
 426       block->zap();
 427       JNIHandleBlock* next = block->_next;
 428       block->_next = _block_free_list;
 429       _block_free_list = block;
 430       block = next;
 431     }
 432   }
 433   if (pop_frame_link != NULL) {
 434     // As a sanity check we release blocks pointed to by the pop_frame_link.
 435     // This should never happen (only if PopLocalFrame is not called the
 436     // correct number of times).
 437     release_block(pop_frame_link, thread);
 438   }
 439 }
 440 
 441 
 442 void JNIHandleBlock::oops_do(OopClosure* f) {
 443   JNIHandleBlock* current_chain = this;
 444   // Iterate over chain of blocks, followed by chains linked through the
 445   // pop frame links.
 446   while (current_chain != NULL) {
 447     for (JNIHandleBlock* current = current_chain; current != NULL;
 448          current = current->_next) {
 449       assert(current == current_chain || current->pop_frame_link() == NULL,
 450         "only blocks first in chain should have pop frame link set");
 451       for (int index = 0; index < current->_top; index++) {
 452         oop* root = &(current->_handles)[index];
 453         oop value = *root;
 454         // traverse heap pointers only, not deleted handles or free list
 455         // pointers
 456         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 457           f->do_oop(root);
 458         }
 459       }
 460       // the next handle block is valid only if current block is full
 461       if (current->_top < block_size_in_oops) {
 462         break;
 463       }
 464     }
 465     current_chain = current_chain->pop_frame_link();
 466   }
 467 }
 468 
 469 
 470 jobject JNIHandleBlock::allocate_handle(oop obj) {
 471   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
 472   if (_top == 0) {
 473     // This is the first allocation or the initial block got zapped when
 474     // entering a native function. If we have any following blocks they are
 475     // not valid anymore.
 476     for (JNIHandleBlock* current = _next; current != NULL;
 477          current = current->_next) {
 478       assert(current->_last == NULL, "only first block should have _last set");
 479       assert(current->_free_list == NULL,
 480              "only first block should have _free_list set");
 481       if (current->_top == 0) {
 482         // All blocks after the first clear trailing block are already cleared.
 483 #ifdef ASSERT
 484         for (current = current->_next; current != NULL; current = current->_next) {
 485           assert(current->_top == 0, "trailing blocks must already be cleared");
 486         }
 487 #endif
 488         break;
 489       }
 490       current->_top = 0;
 491       current->zap();
 492     }
 493     // Clear initial block
 494     _free_list = NULL;
 495     _allocate_before_rebuild = 0;
 496     _last = this;
 497     zap();
 498   }
 499 
 500   // Try last block
 501   if (_last->_top < block_size_in_oops) {
 502     oop* handle = &(_last->_handles)[_last->_top++];
 503     *handle = obj;
 504     return (jobject) handle;
 505   }
 506 
 507   // Try free list
 508   if (_free_list != NULL) {
 509     oop* handle = _free_list;
 510     _free_list = (oop*) *_free_list;
 511     *handle = obj;
 512     return (jobject) handle;
 513   }
 514   // Check if unused block follow last
 515   if (_last->_next != NULL) {
 516     // update last and retry
 517     _last = _last->_next;
 518     return allocate_handle(obj);
 519   }
 520 
 521   // No space available, we have to rebuild free list or expand
 522   if (_allocate_before_rebuild == 0) {
 523       rebuild_free_list();        // updates _allocate_before_rebuild counter
 524   } else {
 525     // Append new block
 526     Thread* thread = Thread::current();
 527     Handle obj_handle(thread, obj);
 528     // This can block, so we need to preserve obj across call.
 529     _last->_next = JNIHandleBlock::allocate_block(thread);
 530     _last = _last->_next;
 531     _allocate_before_rebuild--;
 532     obj = obj_handle();
 533   }
 534   return allocate_handle(obj);  // retry
 535 }
 536 
 537 void JNIHandleBlock::rebuild_free_list() {
 538   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
 539   int free = 0;
 540   int blocks = 0;
 541   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 542     for (int index = 0; index < current->_top; index++) {
 543       oop* handle = &(current->_handles)[index];
 544       if (*handle == NULL) {
 545         // this handle was cleared out by a delete call, reuse it
 546         *handle = (oop) _free_list;
 547         _free_list = handle;
 548         free++;
 549       }
 550     }
 551     // we should not rebuild free list if there are unused handles at the end
 552     assert(current->_top == block_size_in_oops, "just checking");
 553     blocks++;
 554   }
 555   // Heuristic: if more than half of the handles are free we rebuild next time
 556   // as well, otherwise we append a corresponding number of new blocks before
 557   // attempting a free list rebuild again.
 558   int total = blocks * block_size_in_oops;
 559   int extra = total - 2*free;
 560   if (extra > 0) {
 561     // Not as many free handles as we would like - compute number of new blocks to append
 562     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
 563   }
 564 }
 565 
 566 
 567 bool JNIHandleBlock::contains(jobject handle) const {
 568   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
 569 }
 570 
 571 
 572 bool JNIHandleBlock::chain_contains(jobject handle) const {
 573   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
 574     if (current->contains(handle)) {
 575       return true;
 576     }
 577   }
 578   return false;
 579 }
 580 
 581 
 582 size_t JNIHandleBlock::length() const {
 583   size_t result = 1;
 584   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
 585     result++;
 586   }
 587   return result;
 588 }
 589 
 590 class CountJNIHandleClosure: public OopClosure {
 591 private:
 592   int _count;
 593 public:
 594   CountJNIHandleClosure(): _count(0) {}
 595   virtual void do_oop(oop* ooph) { _count++; }
 596   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
 597   int count() { return _count; }
 598 };
 599 
 600 const size_t JNIHandleBlock::get_number_of_live_handles() {
 601   CountJNIHandleClosure counter;
 602   oops_do(&counter);
 603   return counter.count();
 604 }
 605 
 606 // This method is not thread-safe, i.e., must be called while holding a lock on the
 607 // structure.
 608 size_t JNIHandleBlock::memory_usage() const {
 609   return length() * sizeof(JNIHandleBlock);
 610 }
 611 
 612 
 613 #ifndef PRODUCT
 614 
 615 bool JNIHandles::is_local_handle(jobject handle) {
 616   return JNIHandleBlock::any_contains(handle);
 617 }
 618 
 619 bool JNIHandleBlock::any_contains(jobject handle) {
 620   assert(handle != NULL, "precondition");
 621   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
 622     if (current->contains(handle)) {
 623       return true;
 624     }
 625   }
 626   return false;
 627 }
 628 
 629 void JNIHandleBlock::print_statistics() {
 630   int used_blocks = 0;
 631   int free_blocks = 0;
 632   int used_handles = 0;
 633   int free_handles = 0;
 634   JNIHandleBlock* block = _block_list;
 635   while (block != NULL) {
 636     if (block->_top > 0) {
 637       used_blocks++;
 638     } else {
 639       free_blocks++;
 640     }
 641     used_handles += block->_top;
 642     free_handles += (block_size_in_oops - block->_top);
 643     block = block->_block_list_link;
 644   }
 645   tty->print_cr("JNIHandleBlocks statistics");
 646   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
 647   tty->print_cr("- blocks in use:    %d", used_blocks);
 648   tty->print_cr("- blocks free:      %d", free_blocks);
 649   tty->print_cr("- handles in use:   %d", used_handles);
 650   tty->print_cr("- handles free:     %d", free_handles);
 651 }
 652 
 653 #endif