1 /*
   2  * Copyright (c) 1998, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "oops/oop.inline.hpp"
  28 #include "prims/jvmtiExport.hpp"
  29 #include "runtime/jniHandles.hpp"
  30 #include "runtime/mutexLocker.hpp"
  31 #include "runtime/thread.inline.hpp"
  32 
  33 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  34 
  35 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
  36 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
  37 oop             JNIHandles::_deleted_handle       = NULL;
  38 
  39 
  40 jobject JNIHandles::make_local(oop obj) {
  41   if (obj == NULL) {
  42     return NULL;                // ignore null handles
  43   } else {
  44     Thread* thread = Thread::current();
  45     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  46     return thread->active_handles()->allocate_handle(obj);
  47   }
  48 }
  49 
  50 
  51 // optimized versions
  52 
  53 jobject JNIHandles::make_local(Thread* thread, oop obj) {
  54   if (obj == NULL) {
  55     return NULL;                // ignore null handles
  56   } else {
  57     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  58     return thread->active_handles()->allocate_handle(obj);
  59   }
  60 }
  61 
  62 
  63 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
  64   if (obj == NULL) {
  65     return NULL;                // ignore null handles
  66   } else {
  67     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
  68     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  69     return thread->active_handles()->allocate_handle(obj);
  70   }
  71 }
  72 
  73 
  74 jobject JNIHandles::make_global(Handle obj) {
  75   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
  76   jobject res = NULL;
  77   if (!obj.is_null()) {
  78     // ignore null handles
  79     MutexLocker ml(JNIGlobalHandle_lock);
  80     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
  81     res = _global_handles->allocate_handle(obj());
  82   } else {
  83     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
  84   }
  85 
  86   return res;
  87 }
  88 
  89 
  90 jobject JNIHandles::make_weak_global(Handle obj) {
  91   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
  92   jobject res = NULL;
  93   if (!obj.is_null()) {
  94     // ignore null handles
  95     MutexLocker ml(JNIGlobalHandle_lock);
  96     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
  97     res = _weak_global_handles->allocate_handle(obj());
  98   } else {
  99     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 100   }
 101   return res;
 102 }
 103 
 104 
 105 void JNIHandles::destroy_global(jobject handle) {
 106   if (handle != NULL) {
 107     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
 108     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
 109   }
 110 }
 111 
 112 
 113 void JNIHandles::destroy_weak_global(jobject handle) {
 114   if (handle != NULL) {
 115     assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle");
 116     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
 117   }
 118 }
 119 
 120 
 121 void JNIHandles::oops_do(OopClosure* f) {
 122   f->do_oop(&_deleted_handle);
 123   _global_handles->oops_do(f);
 124 }
 125 
 126 
 127 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
 128   _weak_global_handles->weak_oops_do(is_alive, f);
 129 }
 130 
 131 
 132 void JNIHandles::initialize() {
 133   _global_handles      = JNIHandleBlock::allocate_block();
 134   _weak_global_handles = JNIHandleBlock::allocate_block();
 135   EXCEPTION_MARK;
 136   // We will never reach the CATCH below since Exceptions::_throw will cause
 137   // the VM to exit if an exception is thrown during initialization
 138   Klass* k      = SystemDictionary::Object_klass();
 139   _deleted_handle = InstanceKlass::cast(k)->allocate_instance(CATCH);
 140 }
 141 
 142 
 143 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
 144   JNIHandleBlock* block = thread->active_handles();
 145 
 146   // Look back past possible native calls to jni_PushLocalFrame.
 147   while (block != NULL) {
 148     if (block->chain_contains(handle)) {
 149       return true;
 150     }
 151     block = block->pop_frame_link();
 152   }
 153   return false;
 154 }
 155 
 156 
 157 // Determine if the handle is somewhere in the current thread's stack.
 158 // We easily can't isolate any particular stack frame the handle might
 159 // come from, so we'll check the whole stack.
 160 
 161 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
 162   // If there is no java frame, then this must be top level code, such
 163   // as the java command executable, in which case, this type of handle
 164   // is not permitted.
 165   return (thr->has_last_Java_frame() &&
 166          (void*)obj < (void*)thr->stack_base() &&
 167          (void*)obj >= (void*)thr->last_Java_sp());
 168 }
 169 
 170 
 171 bool JNIHandles::is_global_handle(jobject handle) {
 172   return _global_handles->chain_contains(handle);
 173 }
 174 
 175 
 176 bool JNIHandles::is_weak_global_handle(jobject handle) {
 177   return _weak_global_handles->chain_contains(handle);
 178 }
 179 
 180 long JNIHandles::global_handle_memory_usage() {
 181   return _global_handles->memory_usage();
 182 }
 183 
 184 long JNIHandles::weak_global_handle_memory_usage() {
 185   return _weak_global_handles->memory_usage();
 186 }
 187 
 188 
 189 class AlwaysAliveClosure: public BoolObjectClosure {
 190 public:
 191   bool do_object_b(oop obj) { return true; }
 192 };
 193 
 194 class CountHandleClosure: public OopClosure {
 195 private:
 196   int _count;
 197 public:
 198   CountHandleClosure(): _count(0) {}
 199   virtual void do_oop(oop* ooph) {
 200     if (*ooph != JNIHandles::deleted_handle()) {
 201       _count++;
 202     }
 203   }
 204   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
 205   int count() { return _count; }
 206 };
 207 
 208 // We assume this is called at a safepoint: no lock is needed.
 209 void JNIHandles::print_on(outputStream* st) {
 210   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
 211   assert(_global_handles != NULL && _weak_global_handles != NULL,
 212          "JNIHandles not initialized");
 213 
 214   CountHandleClosure global_handle_count;
 215   AlwaysAliveClosure always_alive;
 216   oops_do(&global_handle_count);
 217   weak_oops_do(&always_alive, &global_handle_count);
 218 
 219   st->print_cr("JNI global references: %d", global_handle_count.count());
 220   st->cr();
 221   st->flush();
 222 }
 223 
 224 class VerifyHandleClosure: public OopClosure {
 225 public:
 226   virtual void do_oop(oop* root) {
 227     (*root)->verify();
 228   }
 229   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
 230 };
 231 
 232 void JNIHandles::verify() {
 233   VerifyHandleClosure verify_handle;
 234   AlwaysAliveClosure always_alive;
 235 
 236   oops_do(&verify_handle);
 237   weak_oops_do(&always_alive, &verify_handle);
 238 }
 239 
 240 
 241 
 242 void jni_handles_init() {
 243   JNIHandles::initialize();
 244 }
 245 
 246 
 247 int             JNIHandleBlock::_blocks_allocated     = 0;
 248 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
 249 #ifndef PRODUCT
 250 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
 251 #endif
 252 
 253 
 254 void JNIHandleBlock::zap() {
 255   // Zap block values
 256   _top  = 0;
 257   for (int index = 0; index < block_size_in_oops; index++) {
 258     _handles[index] = badJNIHandle;
 259   }
 260 }
 261 
 262 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
 263   assert(thread == NULL || thread == Thread::current(), "sanity check");
 264   JNIHandleBlock* block;
 265   // Check the thread-local free list for a block so we don't
 266   // have to acquire a mutex.
 267   if (thread != NULL && thread->free_handle_block() != NULL) {
 268     block = thread->free_handle_block();
 269     thread->set_free_handle_block(block->_next);
 270   }
 271   else {
 272     // locking with safepoint checking introduces a potential deadlock:
 273     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 274     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 275     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 276     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 277                      Mutex::_no_safepoint_check_flag);
 278     if (_block_free_list == NULL) {
 279       // Allocate new block
 280       block = new JNIHandleBlock();
 281       _blocks_allocated++;
 282       if (TraceJNIHandleAllocation) {
 283         tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)",
 284                       block, _blocks_allocated);
 285       }
 286       if (ZapJNIHandleArea) block->zap();
 287       #ifndef PRODUCT
 288       // Link new block to list of all allocated blocks
 289       block->_block_list_link = _block_list;
 290       _block_list = block;
 291       #endif
 292     } else {
 293       // Get block from free list
 294       block = _block_free_list;
 295       _block_free_list = _block_free_list->_next;
 296     }
 297   }
 298   block->_top  = 0;
 299   block->_next = NULL;
 300   block->_pop_frame_link = NULL;
 301   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
 302   debug_only(block->_last = NULL);
 303   debug_only(block->_free_list = NULL);
 304   debug_only(block->_allocate_before_rebuild = -1);
 305   return block;
 306 }
 307 
 308 
 309 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
 310   assert(thread == NULL || thread == Thread::current(), "sanity check");
 311   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
 312   // Put returned block at the beginning of the thread-local free list.
 313   // Note that if thread == NULL, we use it as an implicit argument that
 314   // we _don't_ want the block to be kept on the free_handle_block.
 315   // See for instance JavaThread::exit().
 316   if (thread != NULL ) {
 317     if (ZapJNIHandleArea) block->zap();
 318     JNIHandleBlock* freelist = thread->free_handle_block();
 319     block->_pop_frame_link = NULL;
 320     thread->set_free_handle_block(block);
 321 
 322     // Add original freelist to end of chain
 323     if ( freelist != NULL ) {
 324       while ( block->_next != NULL ) block = block->_next;
 325       block->_next = freelist;
 326     }
 327     block = NULL;
 328   }
 329   if (block != NULL) {
 330     // Return blocks to free list
 331     // locking with safepoint checking introduces a potential deadlock:
 332     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 333     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 334     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 335     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 336                      Mutex::_no_safepoint_check_flag);
 337     while (block != NULL) {
 338       if (ZapJNIHandleArea) block->zap();
 339       JNIHandleBlock* next = block->_next;
 340       block->_next = _block_free_list;
 341       _block_free_list = block;
 342       block = next;
 343     }
 344   }
 345   if (pop_frame_link != NULL) {
 346     // As a sanity check we release blocks pointed to by the pop_frame_link.
 347     // This should never happen (only if PopLocalFrame is not called the
 348     // correct number of times).
 349     release_block(pop_frame_link, thread);
 350   }
 351 }
 352 
 353 
 354 void JNIHandleBlock::oops_do(OopClosure* f) {
 355   JNIHandleBlock* current_chain = this;
 356   // Iterate over chain of blocks, followed by chains linked through the
 357   // pop frame links.
 358   while (current_chain != NULL) {
 359     for (JNIHandleBlock* current = current_chain; current != NULL;
 360          current = current->_next) {
 361       assert(current == current_chain || current->pop_frame_link() == NULL,
 362         "only blocks first in chain should have pop frame link set");
 363       for (int index = 0; index < current->_top; index++) {
 364         oop* root = &(current->_handles)[index];
 365         oop value = *root;
 366         // traverse heap pointers only, not deleted handles or free list
 367         // pointers
 368         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 369           f->do_oop(root);
 370         }
 371       }
 372       // the next handle block is valid only if current block is full
 373       if (current->_top < block_size_in_oops) {
 374         break;
 375       }
 376     }
 377     current_chain = current_chain->pop_frame_link();
 378   }
 379 }
 380 
 381 
 382 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
 383                                   OopClosure* f) {
 384   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 385     assert(current->pop_frame_link() == NULL,
 386       "blocks holding weak global JNI handles should not have pop frame link set");
 387     for (int index = 0; index < current->_top; index++) {
 388       oop* root = &(current->_handles)[index];
 389       oop value = *root;
 390       // traverse heap pointers only, not deleted handles or free list pointers
 391       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 392         if (is_alive->do_object_b(value)) {
 393           // The weakly referenced object is alive, update pointer
 394           f->do_oop(root);
 395         } else {
 396           // The weakly referenced object is not alive, clear the reference by storing NULL
 397           if (TraceReferenceGC) {
 398             tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root);
 399           }
 400           *root = NULL;
 401         }
 402       }
 403     }
 404     // the next handle block is valid only if current block is full
 405     if (current->_top < block_size_in_oops) {
 406       break;
 407     }
 408   }
 409 
 410   /*
 411    * JVMTI data structures may also contain weak oops.  The iteration of them
 412    * is placed here so that we don't need to add it to each of the collectors.
 413    */
 414   JvmtiExport::weak_oops_do(is_alive, f);
 415 }
 416 
 417 
 418 jobject JNIHandleBlock::allocate_handle(oop obj) {
 419   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
 420   if (_top == 0) {
 421     // This is the first allocation or the initial block got zapped when
 422     // entering a native function. If we have any following blocks they are
 423     // not valid anymore.
 424     for (JNIHandleBlock* current = _next; current != NULL;
 425          current = current->_next) {
 426       assert(current->_last == NULL, "only first block should have _last set");
 427       assert(current->_free_list == NULL,
 428              "only first block should have _free_list set");
 429       current->_top = 0;
 430       if (ZapJNIHandleArea) current->zap();
 431     }
 432     // Clear initial block
 433     _free_list = NULL;
 434     _allocate_before_rebuild = 0;
 435     _last = this;
 436     if (ZapJNIHandleArea) zap();
 437   }
 438 
 439   // Try last block
 440   if (_last->_top < block_size_in_oops) {
 441     oop* handle = &(_last->_handles)[_last->_top++];
 442     *handle = obj;
 443     return (jobject) handle;
 444   }
 445 
 446   // Try free list
 447   if (_free_list != NULL) {
 448     oop* handle = _free_list;
 449     _free_list = (oop*) *_free_list;
 450     *handle = obj;
 451     return (jobject) handle;
 452   }
 453   // Check if unused block follow last
 454   if (_last->_next != NULL) {
 455     // update last and retry
 456     _last = _last->_next;
 457     return allocate_handle(obj);
 458   }
 459 
 460   // No space available, we have to rebuild free list or expand
 461   if (_allocate_before_rebuild == 0) {
 462       rebuild_free_list();        // updates _allocate_before_rebuild counter
 463   } else {
 464     // Append new block
 465     Thread* thread = Thread::current();
 466     Handle obj_handle(thread, obj);
 467     // This can block, so we need to preserve obj across call.
 468     _last->_next = JNIHandleBlock::allocate_block(thread);
 469     _last = _last->_next;
 470     _allocate_before_rebuild--;
 471     obj = obj_handle();
 472   }
 473   return allocate_handle(obj);  // retry
 474 }
 475 
 476 
 477 void JNIHandleBlock::rebuild_free_list() {
 478   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
 479   int free = 0;
 480   int blocks = 0;
 481   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 482     for (int index = 0; index < current->_top; index++) {
 483       oop* handle = &(current->_handles)[index];
 484       if (*handle ==  JNIHandles::deleted_handle()) {
 485         // this handle was cleared out by a delete call, reuse it
 486         *handle = (oop) _free_list;
 487         _free_list = handle;
 488         free++;
 489       }
 490     }
 491     // we should not rebuild free list if there are unused handles at the end
 492     assert(current->_top == block_size_in_oops, "just checking");
 493     blocks++;
 494   }
 495   // Heuristic: if more than half of the handles are free we rebuild next time
 496   // as well, otherwise we append a corresponding number of new blocks before
 497   // attempting a free list rebuild again.
 498   int total = blocks * block_size_in_oops;
 499   int extra = total - 2*free;
 500   if (extra > 0) {
 501     // Not as many free handles as we would like - compute number of new blocks to append
 502     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
 503   }
 504   if (TraceJNIHandleAllocation) {
 505     tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d",
 506       this, blocks, total-free, free, _allocate_before_rebuild);
 507   }
 508 }
 509 
 510 
 511 bool JNIHandleBlock::contains(jobject handle) const {
 512   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
 513 }
 514 
 515 
 516 bool JNIHandleBlock::chain_contains(jobject handle) const {
 517   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
 518     if (current->contains(handle)) {
 519       return true;
 520     }
 521   }
 522   return false;
 523 }
 524 
 525 
 526 int JNIHandleBlock::length() const {
 527   int result = 1;
 528   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
 529     result++;
 530   }
 531   return result;
 532 }
 533 
 534 // This method is not thread-safe, i.e., must be called while holding a lock on the
 535 // structure.
 536 long JNIHandleBlock::memory_usage() const {
 537   return length() * sizeof(JNIHandleBlock);
 538 }
 539 
 540 
 541 #ifndef PRODUCT
 542 
 543 bool JNIHandleBlock::any_contains(jobject handle) {
 544   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
 545     if (current->contains(handle)) {
 546       return true;
 547     }
 548   }
 549   return false;
 550 }
 551 
 552 void JNIHandleBlock::print_statistics() {
 553   int used_blocks = 0;
 554   int free_blocks = 0;
 555   int used_handles = 0;
 556   int free_handles = 0;
 557   JNIHandleBlock* block = _block_list;
 558   while (block != NULL) {
 559     if (block->_top > 0) {
 560       used_blocks++;
 561     } else {
 562       free_blocks++;
 563     }
 564     used_handles += block->_top;
 565     free_handles += (block_size_in_oops - block->_top);
 566     block = block->_block_list_link;
 567   }
 568   tty->print_cr("JNIHandleBlocks statistics");
 569   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
 570   tty->print_cr("- blocks in use:    %d", used_blocks);
 571   tty->print_cr("- blocks free:      %d", free_blocks);
 572   tty->print_cr("- handles in use:   %d", used_handles);
 573   tty->print_cr("- handles free:     %d", free_handles);
 574 }
 575 
 576 #endif