1 /*
   2  * Copyright (c) 1998, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "logging/log.hpp"
  28 #include "memory/iterator.hpp"
  29 #include "oops/oop.inline.hpp"
  30 #include "prims/jvmtiExport.hpp"
  31 #include "runtime/jniHandles.hpp"
  32 #include "runtime/mutexLocker.hpp"
  33 #include "runtime/thread.inline.hpp"
  34 #include "utilities/align.hpp"
  35 
  36 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
  37 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
  38 oop             JNIHandles::_deleted_handle       = NULL;
  39 
  40 
  41 jobject JNIHandles::make_local(oop obj) {
  42   if (obj == NULL) {
  43     return NULL;                // ignore null handles
  44   } else {
  45     Thread* thread = Thread::current();
  46     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  47     return thread->active_handles()->allocate_handle(obj);
  48   }
  49 }
  50 
  51 
  52 // optimized versions
  53 
  54 jobject JNIHandles::make_local(Thread* thread, oop obj) {
  55   if (obj == NULL) {
  56     return NULL;                // ignore null handles
  57   } else {
  58     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  59     return thread->active_handles()->allocate_handle(obj);
  60   }
  61 }
  62 
  63 
  64 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
  65   if (obj == NULL) {
  66     return NULL;                // ignore null handles
  67   } else {
  68     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
  69     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  70     return thread->active_handles()->allocate_handle(obj);
  71   }
  72 }
  73 
  74 
  75 jobject JNIHandles::make_global(Handle obj) {
  76   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
  77   jobject res = NULL;
  78   if (!obj.is_null()) {
  79     // ignore null handles
  80     MutexLocker ml(JNIGlobalHandle_lock);
  81     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
  82     res = _global_handles->allocate_handle(obj());
  83   } else {
  84     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
  85   }
  86 
  87   return res;
  88 }
  89 
  90 
  91 jobject JNIHandles::make_weak_global(Handle obj) {
  92   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
  93   jobject res = NULL;
  94   if (!obj.is_null()) {
  95     // ignore null handles
  96     {
  97       MutexLocker ml(JNIGlobalHandle_lock);
  98       assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
  99       res = _weak_global_handles->allocate_handle(obj());
 100     }
 101     // Add weak tag.
 102     assert(is_aligned(res, weak_tag_alignment), "invariant");
 103     char* tptr = reinterpret_cast<char*>(res) + weak_tag_value;
 104     res = reinterpret_cast<jobject>(tptr);
 105   } else {
 106     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 107   }
 108   return res;
 109 }
 110 
 111 template<bool external_guard>
 112 oop JNIHandles::resolve_jweak(jweak handle) {
 113   assert(is_jweak(handle), "precondition");
 114   oop result = jweak_ref(handle);
 115   result = guard_value<external_guard>(result);
 116 #if INCLUDE_ALL_GCS
 117   oopDesc::bs()->keep_alive_barrier(result);
 118 #endif
 119   return result;
 120 }
 121 
 122 template oop JNIHandles::resolve_jweak<true>(jweak);
 123 template oop JNIHandles::resolve_jweak<false>(jweak);
 124 
 125 void JNIHandles::destroy_global(jobject handle) {
 126   if (handle != NULL) {
 127     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
 128     jobject_ref(handle) = deleted_handle();
 129   }
 130 }
 131 
 132 
 133 void JNIHandles::destroy_weak_global(jobject handle) {
 134   if (handle != NULL) {
 135     jweak_ref(handle) = deleted_handle();
 136   }
 137 }
 138 
 139 
 140 void JNIHandles::oops_do(OopClosure* f) {
 141   f->do_oop(&_deleted_handle);
 142   _global_handles->oops_do(f);
 143 }
 144 
 145 
 146 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
 147   _weak_global_handles->weak_oops_do(is_alive, f);
 148 }
 149 
 150 
 151 void JNIHandles::weak_oops_do(OopClosure* f) {
 152   AlwaysTrueClosure always_true;
 153   weak_oops_do(&always_true, f);
 154 }
 155 
 156 
 157 void JNIHandles::initialize() {
 158   _global_handles      = JNIHandleBlock::allocate_block();
 159   _weak_global_handles = JNIHandleBlock::allocate_block();
 160   EXCEPTION_MARK;
 161   // We will never reach the CATCH below since Exceptions::_throw will cause
 162   // the VM to exit if an exception is thrown during initialization
 163   Klass* k      = SystemDictionary::Object_klass();
 164   _deleted_handle = InstanceKlass::cast(k)->allocate_instance(CATCH);
 165 }
 166 
 167 
 168 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
 169   JNIHandleBlock* block = thread->active_handles();
 170 
 171   // Look back past possible native calls to jni_PushLocalFrame.
 172   while (block != NULL) {
 173     if (block->chain_contains(handle)) {
 174       return true;
 175     }
 176     block = block->pop_frame_link();
 177   }
 178   return false;
 179 }
 180 
 181 
 182 // Determine if the handle is somewhere in the current thread's stack.
 183 // We easily can't isolate any particular stack frame the handle might
 184 // come from, so we'll check the whole stack.
 185 
 186 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
 187   // If there is no java frame, then this must be top level code, such
 188   // as the java command executable, in which case, this type of handle
 189   // is not permitted.
 190   return (thr->has_last_Java_frame() &&
 191          (void*)obj < (void*)thr->stack_base() &&
 192          (void*)obj >= (void*)thr->last_Java_sp());
 193 }
 194 
 195 
 196 bool JNIHandles::is_global_handle(jobject handle) {
 197   return _global_handles->chain_contains(handle);
 198 }
 199 
 200 
 201 bool JNIHandles::is_weak_global_handle(jobject handle) {
 202   return _weak_global_handles->chain_contains(handle);
 203 }
 204 
 205 long JNIHandles::global_handle_memory_usage() {
 206   return _global_handles->memory_usage();
 207 }
 208 
 209 long JNIHandles::weak_global_handle_memory_usage() {
 210   return _weak_global_handles->memory_usage();
 211 }
 212 
 213 
 214 class CountHandleClosure: public OopClosure {
 215 private:
 216   int _count;
 217 public:
 218   CountHandleClosure(): _count(0) {}
 219   virtual void do_oop(oop* ooph) {
 220     if (! oopDesc::equals(*ooph, JNIHandles::deleted_handle())) {
 221       _count++;
 222     }
 223   }
 224   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
 225   int count() { return _count; }
 226 };
 227 
 228 // We assume this is called at a safepoint: no lock is needed.
 229 void JNIHandles::print_on(outputStream* st) {
 230   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
 231   assert(_global_handles != NULL && _weak_global_handles != NULL,
 232          "JNIHandles not initialized");
 233 
 234   CountHandleClosure global_handle_count;
 235   oops_do(&global_handle_count);
 236   weak_oops_do(&global_handle_count);
 237 
 238   st->print_cr("JNI global references: %d", global_handle_count.count());
 239   st->cr();
 240   st->flush();
 241 }
 242 
 243 class VerifyHandleClosure: public OopClosure {
 244 public:
 245   virtual void do_oop(oop* root) {
 246     (*root)->verify();
 247   }
 248   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
 249 };
 250 
 251 void JNIHandles::verify() {
 252   VerifyHandleClosure verify_handle;
 253 
 254   oops_do(&verify_handle);
 255   weak_oops_do(&verify_handle);
 256 }
 257 
 258 
 259 
 260 void jni_handles_init() {
 261   JNIHandles::initialize();
 262 }
 263 
 264 
 265 int             JNIHandleBlock::_blocks_allocated     = 0;
 266 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
 267 #ifndef PRODUCT
 268 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
 269 #endif
 270 
 271 
 272 void JNIHandleBlock::zap() {
 273   // Zap block values
 274   _top = 0;
 275   for (int index = 0; index < block_size_in_oops; index++) {
 276     _handles[index] = badJNIHandle;
 277   }
 278 }
 279 
 280 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
 281   assert(thread == NULL || thread == Thread::current(), "sanity check");
 282   JNIHandleBlock* block;
 283   // Check the thread-local free list for a block so we don't
 284   // have to acquire a mutex.
 285   if (thread != NULL && thread->free_handle_block() != NULL) {
 286     block = thread->free_handle_block();
 287     thread->set_free_handle_block(block->_next);
 288   }
 289   else {
 290     // locking with safepoint checking introduces a potential deadlock:
 291     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 292     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 293     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 294     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 295                      Mutex::_no_safepoint_check_flag);
 296     if (_block_free_list == NULL) {
 297       // Allocate new block
 298       block = new JNIHandleBlock();
 299       _blocks_allocated++;
 300       if (ZapJNIHandleArea) block->zap();
 301       #ifndef PRODUCT
 302       // Link new block to list of all allocated blocks
 303       block->_block_list_link = _block_list;
 304       _block_list = block;
 305       #endif
 306     } else {
 307       // Get block from free list
 308       block = _block_free_list;
 309       _block_free_list = _block_free_list->_next;
 310     }
 311   }
 312   block->_top = 0;
 313   block->_next = NULL;
 314   block->_pop_frame_link = NULL;
 315   block->_planned_capacity = block_size_in_oops;
 316   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
 317   debug_only(block->_last = NULL);
 318   debug_only(block->_free_list = NULL);
 319   debug_only(block->_allocate_before_rebuild = -1);
 320   return block;
 321 }
 322 
 323 
 324 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
 325   assert(thread == NULL || thread == Thread::current(), "sanity check");
 326   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
 327   // Put returned block at the beginning of the thread-local free list.
 328   // Note that if thread == NULL, we use it as an implicit argument that
 329   // we _don't_ want the block to be kept on the free_handle_block.
 330   // See for instance JavaThread::exit().
 331   if (thread != NULL ) {
 332     if (ZapJNIHandleArea) block->zap();
 333     JNIHandleBlock* freelist = thread->free_handle_block();
 334     block->_pop_frame_link = NULL;
 335     thread->set_free_handle_block(block);
 336 
 337     // Add original freelist to end of chain
 338     if ( freelist != NULL ) {
 339       while ( block->_next != NULL ) block = block->_next;
 340       block->_next = freelist;
 341     }
 342     block = NULL;
 343   }
 344   if (block != NULL) {
 345     // Return blocks to free list
 346     // locking with safepoint checking introduces a potential deadlock:
 347     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 348     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 349     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 350     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 351                      Mutex::_no_safepoint_check_flag);
 352     while (block != NULL) {
 353       if (ZapJNIHandleArea) block->zap();
 354       JNIHandleBlock* next = block->_next;
 355       block->_next = _block_free_list;
 356       _block_free_list = block;
 357       block = next;
 358     }
 359   }
 360   if (pop_frame_link != NULL) {
 361     // As a sanity check we release blocks pointed to by the pop_frame_link.
 362     // This should never happen (only if PopLocalFrame is not called the
 363     // correct number of times).
 364     release_block(pop_frame_link, thread);
 365   }
 366 }
 367 
 368 
 369 void JNIHandleBlock::oops_do(OopClosure* f) {
 370   JNIHandleBlock* current_chain = this;
 371   // Iterate over chain of blocks, followed by chains linked through the
 372   // pop frame links.
 373   while (current_chain != NULL) {
 374     for (JNIHandleBlock* current = current_chain; current != NULL;
 375          current = current->_next) {
 376       assert(current == current_chain || current->pop_frame_link() == NULL,
 377         "only blocks first in chain should have pop frame link set");
 378       for (int index = 0; index < current->_top; index++) {
 379         oop* root = &(current->_handles)[index];
 380         oop value = *root;
 381         // traverse heap pointers only, not deleted handles or free list
 382         // pointers
 383         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 384           f->do_oop(root);
 385         }
 386       }
 387       // the next handle block is valid only if current block is full
 388       if (current->_top < block_size_in_oops) {
 389         break;
 390       }
 391     }
 392     current_chain = current_chain->pop_frame_link();
 393   }
 394 }
 395 
 396 
 397 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
 398                                   OopClosure* f) {
 399   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 400     assert(current->pop_frame_link() == NULL,
 401       "blocks holding weak global JNI handles should not have pop frame link set");
 402     for (int index = 0; index < current->_top; index++) {
 403       oop* root = &(current->_handles)[index];
 404       oop value = *root;
 405       // traverse heap pointers only, not deleted handles or free list pointers
 406       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 407         if (is_alive->do_object_b(value)) {
 408           // The weakly referenced object is alive, update pointer
 409           f->do_oop(root);
 410         } else {
 411           // The weakly referenced object is not alive, clear the reference by storing NULL
 412           log_develop_trace(gc, ref)("Clearing JNI weak reference (" INTPTR_FORMAT ")", p2i(root));
 413           *root = NULL;
 414         }
 415       }
 416     }
 417     // the next handle block is valid only if current block is full
 418     if (current->_top < block_size_in_oops) {
 419       break;
 420     }
 421   }
 422 
 423   /*
 424    * JVMTI data structures may also contain weak oops.  The iteration of them
 425    * is placed here so that we don't need to add it to each of the collectors.
 426    */
 427   JvmtiExport::weak_oops_do(is_alive, f);
 428 }
 429 
 430 
 431 jobject JNIHandleBlock::allocate_handle(oop obj) {
 432   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
 433   if (_top == 0) {
 434     // This is the first allocation or the initial block got zapped when
 435     // entering a native function. If we have any following blocks they are
 436     // not valid anymore.
 437     for (JNIHandleBlock* current = _next; current != NULL;
 438          current = current->_next) {
 439       assert(current->_last == NULL, "only first block should have _last set");
 440       assert(current->_free_list == NULL,
 441              "only first block should have _free_list set");
 442       if (current->_top == 0) {
 443         // All blocks after the first clear trailing block are already cleared.
 444 #ifdef ASSERT
 445         for (current = current->_next; current != NULL; current = current->_next) {
 446           assert(current->_top == 0, "trailing blocks must already be cleared");
 447         }
 448 #endif
 449         break;
 450       }
 451       current->_top = 0;
 452       if (ZapJNIHandleArea) current->zap();
 453     }
 454     // Clear initial block
 455     _free_list = NULL;
 456     _allocate_before_rebuild = 0;
 457     _last = this;
 458     if (ZapJNIHandleArea) zap();
 459   }
 460 
 461   // Try last block
 462   if (_last->_top < block_size_in_oops) {
 463     oop* handle = &(_last->_handles)[_last->_top++];
 464     *handle = obj;
 465     return (jobject) handle;
 466   }
 467 
 468   // Try free list
 469   if (_free_list != NULL) {
 470     oop* handle = _free_list;
 471     _free_list = (oop*) *_free_list;
 472     *handle = obj;
 473     return (jobject) handle;
 474   }
 475   // Check if unused block follow last
 476   if (_last->_next != NULL) {
 477     // update last and retry
 478     _last = _last->_next;
 479     return allocate_handle(obj);
 480   }
 481 
 482   // No space available, we have to rebuild free list or expand
 483   if (_allocate_before_rebuild == 0) {
 484       rebuild_free_list();        // updates _allocate_before_rebuild counter
 485   } else {
 486     // Append new block
 487     Thread* thread = Thread::current();
 488     Handle obj_handle(thread, obj);
 489     // This can block, so we need to preserve obj across call.
 490     _last->_next = JNIHandleBlock::allocate_block(thread);
 491     _last = _last->_next;
 492     _allocate_before_rebuild--;
 493     obj = obj_handle();
 494   }
 495   return allocate_handle(obj);  // retry
 496 }
 497 
 498 void JNIHandleBlock::release_handle(jobject h) {
 499   if (h != NULL) {
 500     assert(chain_contains(h), "does not contain the JNI handle");
 501     // Mark the handle as deleted, allocate will reuse it
 502     *((oop*)h) = JNIHandles::deleted_handle();
 503   }
 504 }
 505 
 506 
 507 void JNIHandleBlock::rebuild_free_list() {
 508   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
 509   int free = 0;
 510   int blocks = 0;
 511   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 512     for (int index = 0; index < current->_top; index++) {
 513       oop* handle = &(current->_handles)[index];
 514       if (oopDesc::equals(*handle, JNIHandles::deleted_handle())) {
 515         // this handle was cleared out by a delete call, reuse it
 516         *handle = (oop) _free_list;
 517         _free_list = handle;
 518         free++;
 519       }
 520     }
 521     // we should not rebuild free list if there are unused handles at the end
 522     assert(current->_top == block_size_in_oops, "just checking");
 523     blocks++;
 524   }
 525   // Heuristic: if more than half of the handles are free we rebuild next time
 526   // as well, otherwise we append a corresponding number of new blocks before
 527   // attempting a free list rebuild again.
 528   int total = blocks * block_size_in_oops;
 529   int extra = total - 2*free;
 530   if (extra > 0) {
 531     // Not as many free handles as we would like - compute number of new blocks to append
 532     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
 533   }
 534 }
 535 
 536 
 537 bool JNIHandleBlock::contains(jobject handle) const {
 538   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
 539 }
 540 
 541 
 542 bool JNIHandleBlock::chain_contains(jobject handle) const {
 543   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
 544     if (current->contains(handle)) {
 545       return true;
 546     }
 547   }
 548   return false;
 549 }
 550 
 551 
 552 int JNIHandleBlock::length() const {
 553   int result = 1;
 554   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
 555     result++;
 556   }
 557   return result;
 558 }
 559 
 560 const size_t JNIHandleBlock::get_number_of_live_handles() {
 561   CountHandleClosure counter;
 562   oops_do(&counter);
 563   return counter.count();
 564 }
 565 
 566 // This method is not thread-safe, i.e., must be called while holding a lock on the
 567 // structure.
 568 long JNIHandleBlock::memory_usage() const {
 569   return length() * sizeof(JNIHandleBlock);
 570 }
 571 
 572 
 573 #ifndef PRODUCT
 574 
 575 bool JNIHandleBlock::any_contains(jobject handle) {
 576   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
 577     if (current->contains(handle)) {
 578       return true;
 579     }
 580   }
 581   return false;
 582 }
 583 
 584 void JNIHandleBlock::print_statistics() {
 585   int used_blocks = 0;
 586   int free_blocks = 0;
 587   int used_handles = 0;
 588   int free_handles = 0;
 589   JNIHandleBlock* block = _block_list;
 590   while (block != NULL) {
 591     if (block->_top > 0) {
 592       used_blocks++;
 593     } else {
 594       free_blocks++;
 595     }
 596     used_handles += block->_top;
 597     free_handles += (block_size_in_oops - block->_top);
 598     block = block->_block_list_link;
 599   }
 600   tty->print_cr("JNIHandleBlocks statistics");
 601   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
 602   tty->print_cr("- blocks in use:    %d", used_blocks);
 603   tty->print_cr("- blocks free:      %d", free_blocks);
 604   tty->print_cr("- handles in use:   %d", used_handles);
 605   tty->print_cr("- handles free:     %d", free_handles);
 606 }
 607 
 608 #endif