1 /*
   2  * Copyright (c) 1998, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "logging/log.hpp"
  28 #include "memory/iterator.hpp"
  29 #include "oops/oop.inline.hpp"
  30 #include "runtime/jniHandles.hpp"
  31 #include "runtime/mutexLocker.hpp"
  32 #include "runtime/thread.inline.hpp"
  33 #include "trace/traceMacros.hpp"
  34 #include "utilities/align.hpp"
  35 #if INCLUDE_ALL_GCS
  36 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
  37 #endif
  38 
  39 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
  40 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
  41 oop             JNIHandles::_deleted_handle       = NULL;
  42 
  43 
  44 jobject JNIHandles::make_local(oop obj) {
  45   if (obj == NULL) {
  46     return NULL;                // ignore null handles
  47   } else {
  48     Thread* thread = Thread::current();
  49     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  50     assert(JavaThread::current()->thread_state() != _thread_in_native, "must not be in native");
  51     return thread->active_handles()->allocate_handle(obj);
  52   }
  53 }
  54 
  55 
  56 // optimized versions
  57 
  58 jobject JNIHandles::make_local(Thread* thread, oop obj) {
  59   if (obj == NULL) {
  60     return NULL;                // ignore null handles
  61   } else {
  62     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  63     assert(thread->is_Java_thread(), "not a Java thread");
  64     assert(((JavaThread *)thread)->thread_state() != _thread_in_native, "must not be in native");
  65     return thread->active_handles()->allocate_handle(obj);
  66   }
  67 }
  68 
  69 
  70 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
  71   if (obj == NULL) {
  72     return NULL;                // ignore null handles
  73   } else {
  74     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
  75     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  76     assert(thread->thread_state() != _thread_in_native, "must not be in native");
  77     return thread->active_handles()->allocate_handle(obj);
  78   }
  79 }
  80 
  81 
  82 jobject JNIHandles::make_global(Handle obj) {
  83   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
  84   assert(JavaThread::current()->thread_state() != _thread_in_native, "must not be in native");
  85   jobject res = NULL;
  86   if (!obj.is_null()) {
  87     // ignore null handles
  88     MutexLocker ml(JNIGlobalHandle_lock);
  89     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
  90     res = _global_handles->allocate_handle(obj());
  91   } else {
  92     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
  93   }
  94 
  95   return res;
  96 }
  97 
  98 
  99 jobject JNIHandles::make_weak_global(Handle obj) {
 100   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
 101   assert(JavaThread::current()->thread_state() != _thread_in_native, "must not be in native");
 102   jobject res = NULL;
 103   if (!obj.is_null()) {
 104     // ignore null handles
 105     {
 106       MutexLocker ml(JNIGlobalHandle_lock);
 107       assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
 108       res = _weak_global_handles->allocate_handle(obj());
 109     }
 110     // Add weak tag.
 111     assert(is_aligned(res, weak_tag_alignment), "invariant");
 112     char* tptr = reinterpret_cast<char*>(res) + weak_tag_value;
 113     res = reinterpret_cast<jobject>(tptr);
 114   } else {
 115     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 116   }
 117   return res;
 118 }
 119 
 120 template<bool external_guard>
 121 oop JNIHandles::resolve_jweak(jweak handle) {
 122   assert(is_jweak(handle), "precondition");
 123   oop result = jweak_ref(handle);
 124   result = guard_value<external_guard>(result);
 125 #if INCLUDE_ALL_GCS
 126   if (result != NULL && UseG1GC) {
 127     G1SATBCardTableModRefBS::enqueue(result);
 128   }
 129 #endif // INCLUDE_ALL_GCS
 130   return result;
 131 }
 132 
 133 template oop JNIHandles::resolve_jweak<true>(jweak);
 134 template oop JNIHandles::resolve_jweak<false>(jweak);
 135 
 136 bool JNIHandles::is_global_weak_cleared(jweak handle) {
 137   assert(is_jweak(handle), "not a weak handle");
 138   return guard_value<false>(jweak_ref(handle)) == NULL;
 139 }
 140 
 141 void JNIHandles::destroy_global(jobject handle) {
 142   if (handle != NULL) {
 143     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
 144     jobject_ref(handle) = deleted_handle();
 145   }
 146 }
 147 
 148 
 149 void JNIHandles::destroy_weak_global(jobject handle) {
 150   if (handle != NULL) {
 151     jweak_ref(handle) = deleted_handle();
 152   }
 153 }
 154 
 155 
 156 void JNIHandles::oops_do(OopClosure* f) {
 157   f->do_oop(&_deleted_handle);
 158   _global_handles->oops_do(f);
 159 }
 160 
 161 
 162 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
 163   _weak_global_handles->weak_oops_do(is_alive, f);
 164 }
 165 
 166 
 167 void JNIHandles::weak_oops_do(OopClosure* f) {
 168   AlwaysTrueClosure always_true;
 169   weak_oops_do(&always_true, f);
 170 }
 171 
 172 
 173 void JNIHandles::initialize() {
 174   _global_handles      = JNIHandleBlock::allocate_block();
 175   _weak_global_handles = JNIHandleBlock::allocate_block();
 176   EXCEPTION_MARK;
 177   // We will never reach the CATCH below since Exceptions::_throw will cause
 178   // the VM to exit if an exception is thrown during initialization
 179   Klass* k      = SystemDictionary::Object_klass();
 180   _deleted_handle = InstanceKlass::cast(k)->allocate_instance(CATCH);
 181 }
 182 
 183 
 184 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
 185   JNIHandleBlock* block = thread->active_handles();
 186 
 187   // Look back past possible native calls to jni_PushLocalFrame.
 188   while (block != NULL) {
 189     if (block->chain_contains(handle)) {
 190       return true;
 191     }
 192     block = block->pop_frame_link();
 193   }
 194   return false;
 195 }
 196 
 197 
 198 // Determine if the handle is somewhere in the current thread's stack.
 199 // We easily can't isolate any particular stack frame the handle might
 200 // come from, so we'll check the whole stack.
 201 
 202 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
 203   // If there is no java frame, then this must be top level code, such
 204   // as the java command executable, in which case, this type of handle
 205   // is not permitted.
 206   return (thr->has_last_Java_frame() &&
 207          (void*)obj < (void*)thr->stack_base() &&
 208          (void*)obj >= (void*)thr->last_Java_sp());
 209 }
 210 
 211 
 212 bool JNIHandles::is_global_handle(jobject handle) {
 213   return _global_handles->chain_contains(handle);
 214 }
 215 
 216 
 217 bool JNIHandles::is_weak_global_handle(jobject handle) {
 218   return _weak_global_handles->chain_contains(handle);
 219 }
 220 
 221 long JNIHandles::global_handle_memory_usage() {
 222   return _global_handles->memory_usage();
 223 }
 224 
 225 long JNIHandles::weak_global_handle_memory_usage() {
 226   return _weak_global_handles->memory_usage();
 227 }
 228 
 229 
 230 class CountHandleClosure: public OopClosure {
 231 private:
 232   int _count;
 233 public:
 234   CountHandleClosure(): _count(0) {}
 235   virtual void do_oop(oop* ooph) {
 236     if (*ooph != JNIHandles::deleted_handle()) {
 237       _count++;
 238     }
 239   }
 240   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
 241   int count() { return _count; }
 242 };
 243 
 244 // We assume this is called at a safepoint: no lock is needed.
 245 void JNIHandles::print_on(outputStream* st) {
 246   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
 247   assert(_global_handles != NULL && _weak_global_handles != NULL,
 248          "JNIHandles not initialized");
 249 
 250   CountHandleClosure global_handle_count;
 251   oops_do(&global_handle_count);
 252   weak_oops_do(&global_handle_count);
 253 
 254   st->print_cr("JNI global references: %d", global_handle_count.count());
 255   st->cr();
 256   st->flush();
 257 }
 258 
 259 class VerifyHandleClosure: public OopClosure {
 260 public:
 261   virtual void do_oop(oop* root) {
 262     (*root)->verify();
 263   }
 264   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
 265 };
 266 
 267 void JNIHandles::verify() {
 268   VerifyHandleClosure verify_handle;
 269 
 270   oops_do(&verify_handle);
 271   weak_oops_do(&verify_handle);
 272 }
 273 
 274 
 275 
 276 void jni_handles_init() {
 277   JNIHandles::initialize();
 278 }
 279 
 280 
 281 int             JNIHandleBlock::_blocks_allocated     = 0;
 282 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
 283 #ifndef PRODUCT
 284 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
 285 #endif
 286 
 287 
 288 void JNIHandleBlock::zap() {
 289   // Zap block values
 290   _top = 0;
 291   for (int index = 0; index < block_size_in_oops; index++) {
 292     _handles[index] = badJNIHandle;
 293   }
 294 }
 295 
 296 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
 297   assert(thread == NULL || thread == Thread::current(), "sanity check");
 298   JNIHandleBlock* block;
 299   // Check the thread-local free list for a block so we don't
 300   // have to acquire a mutex.
 301   if (thread != NULL && thread->free_handle_block() != NULL) {
 302     block = thread->free_handle_block();
 303     thread->set_free_handle_block(block->_next);
 304   }
 305   else {
 306     // locking with safepoint checking introduces a potential deadlock:
 307     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 308     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 309     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 310     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 311                      Mutex::_no_safepoint_check_flag);
 312     if (_block_free_list == NULL) {
 313       // Allocate new block
 314       block = new JNIHandleBlock();
 315       _blocks_allocated++;
 316       if (ZapJNIHandleArea) block->zap();
 317       #ifndef PRODUCT
 318       // Link new block to list of all allocated blocks
 319       block->_block_list_link = _block_list;
 320       _block_list = block;
 321       #endif
 322     } else {
 323       // Get block from free list
 324       block = _block_free_list;
 325       _block_free_list = _block_free_list->_next;
 326     }
 327   }
 328   block->_top = 0;
 329   block->_next = NULL;
 330   block->_pop_frame_link = NULL;
 331   block->_planned_capacity = block_size_in_oops;
 332   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
 333   debug_only(block->_last = NULL);
 334   debug_only(block->_free_list = NULL);
 335   debug_only(block->_allocate_before_rebuild = -1);
 336   return block;
 337 }
 338 
 339 
 340 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
 341   assert(thread == NULL || thread == Thread::current(), "sanity check");
 342   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
 343   // Put returned block at the beginning of the thread-local free list.
 344   // Note that if thread == NULL, we use it as an implicit argument that
 345   // we _don't_ want the block to be kept on the free_handle_block.
 346   // See for instance JavaThread::exit().
 347   if (thread != NULL ) {
 348     if (ZapJNIHandleArea) block->zap();
 349     JNIHandleBlock* freelist = thread->free_handle_block();
 350     block->_pop_frame_link = NULL;
 351     thread->set_free_handle_block(block);
 352 
 353     // Add original freelist to end of chain
 354     if ( freelist != NULL ) {
 355       while ( block->_next != NULL ) block = block->_next;
 356       block->_next = freelist;
 357     }
 358     block = NULL;
 359   }
 360   if (block != NULL) {
 361     // Return blocks to free list
 362     // locking with safepoint checking introduces a potential deadlock:
 363     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 364     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 365     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 366     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 367                      Mutex::_no_safepoint_check_flag);
 368     while (block != NULL) {
 369       if (ZapJNIHandleArea) block->zap();
 370       JNIHandleBlock* next = block->_next;
 371       block->_next = _block_free_list;
 372       _block_free_list = block;
 373       block = next;
 374     }
 375   }
 376   if (pop_frame_link != NULL) {
 377     // As a sanity check we release blocks pointed to by the pop_frame_link.
 378     // This should never happen (only if PopLocalFrame is not called the
 379     // correct number of times).
 380     release_block(pop_frame_link, thread);
 381   }
 382 }
 383 
 384 
 385 void JNIHandleBlock::oops_do(OopClosure* f) {
 386   JNIHandleBlock* current_chain = this;
 387   // Iterate over chain of blocks, followed by chains linked through the
 388   // pop frame links.
 389   while (current_chain != NULL) {
 390     for (JNIHandleBlock* current = current_chain; current != NULL;
 391          current = current->_next) {
 392       assert(current == current_chain || current->pop_frame_link() == NULL,
 393         "only blocks first in chain should have pop frame link set");
 394       for (int index = 0; index < current->_top; index++) {
 395         oop* root = &(current->_handles)[index];
 396         oop value = *root;
 397         // traverse heap pointers only, not deleted handles or free list
 398         // pointers
 399         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 400           f->do_oop(root);
 401         }
 402       }
 403       // the next handle block is valid only if current block is full
 404       if (current->_top < block_size_in_oops) {
 405         break;
 406       }
 407     }
 408     current_chain = current_chain->pop_frame_link();
 409   }
 410 }
 411 
 412 
 413 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
 414                                   OopClosure* f) {
 415   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 416     assert(current->pop_frame_link() == NULL,
 417       "blocks holding weak global JNI handles should not have pop frame link set");
 418     for (int index = 0; index < current->_top; index++) {
 419       oop* root = &(current->_handles)[index];
 420       oop value = *root;
 421       // traverse heap pointers only, not deleted handles or free list pointers
 422       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 423         if (is_alive->do_object_b(value)) {
 424           // The weakly referenced object is alive, update pointer
 425           f->do_oop(root);
 426         } else {
 427           // The weakly referenced object is not alive, clear the reference by storing NULL
 428           log_develop_trace(gc, ref)("Clearing JNI weak reference (" INTPTR_FORMAT ")", p2i(root));
 429           *root = NULL;
 430         }
 431       }
 432     }
 433     // the next handle block is valid only if current block is full
 434     if (current->_top < block_size_in_oops) {
 435       break;
 436     }
 437   }
 438 }
 439 
 440 
 441 jobject JNIHandleBlock::allocate_handle(oop obj) {
 442   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
 443   if (_top == 0) {
 444     // This is the first allocation or the initial block got zapped when
 445     // entering a native function. If we have any following blocks they are
 446     // not valid anymore.
 447     for (JNIHandleBlock* current = _next; current != NULL;
 448          current = current->_next) {
 449       assert(current->_last == NULL, "only first block should have _last set");
 450       assert(current->_free_list == NULL,
 451              "only first block should have _free_list set");
 452       if (current->_top == 0) {
 453         // All blocks after the first clear trailing block are already cleared.
 454 #ifdef ASSERT
 455         for (current = current->_next; current != NULL; current = current->_next) {
 456           assert(current->_top == 0, "trailing blocks must already be cleared");
 457         }
 458 #endif
 459         break;
 460       }
 461       current->_top = 0;
 462       if (ZapJNIHandleArea) current->zap();
 463     }
 464     // Clear initial block
 465     _free_list = NULL;
 466     _allocate_before_rebuild = 0;
 467     _last = this;
 468     if (ZapJNIHandleArea) zap();
 469   }
 470 
 471   // Try last block
 472   if (_last->_top < block_size_in_oops) {
 473     oop* handle = &(_last->_handles)[_last->_top++];
 474     *handle = obj;
 475     return (jobject) handle;
 476   }
 477 
 478   // Try free list
 479   if (_free_list != NULL) {
 480     oop* handle = _free_list;
 481     _free_list = (oop*) *_free_list;
 482     *handle = obj;
 483     return (jobject) handle;
 484   }
 485   // Check if unused block follow last
 486   if (_last->_next != NULL) {
 487     // update last and retry
 488     _last = _last->_next;
 489     return allocate_handle(obj);
 490   }
 491 
 492   // No space available, we have to rebuild free list or expand
 493   if (_allocate_before_rebuild == 0) {
 494       rebuild_free_list();        // updates _allocate_before_rebuild counter
 495   } else {
 496     // Append new block
 497     Thread* thread = Thread::current();
 498     Handle obj_handle(thread, obj);
 499     // This can block, so we need to preserve obj across call.
 500     _last->_next = JNIHandleBlock::allocate_block(thread);
 501     _last = _last->_next;
 502     _allocate_before_rebuild--;
 503     obj = obj_handle();
 504   }
 505   return allocate_handle(obj);  // retry
 506 }
 507 
 508 void JNIHandleBlock::release_handle(jobject h) {
 509   if (h != NULL) {
 510     assert(chain_contains(h), "does not contain the JNI handle");
 511     // Mark the handle as deleted, allocate will reuse it
 512     *((oop*)h) = JNIHandles::deleted_handle();
 513   }
 514 }
 515 
 516 
 517 void JNIHandleBlock::rebuild_free_list() {
 518   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
 519   int free = 0;
 520   int blocks = 0;
 521   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 522     for (int index = 0; index < current->_top; index++) {
 523       oop* handle = &(current->_handles)[index];
 524       if (*handle ==  JNIHandles::deleted_handle()) {
 525         // this handle was cleared out by a delete call, reuse it
 526         *handle = (oop) _free_list;
 527         _free_list = handle;
 528         free++;
 529       }
 530     }
 531     // we should not rebuild free list if there are unused handles at the end
 532     assert(current->_top == block_size_in_oops, "just checking");
 533     blocks++;
 534   }
 535   // Heuristic: if more than half of the handles are free we rebuild next time
 536   // as well, otherwise we append a corresponding number of new blocks before
 537   // attempting a free list rebuild again.
 538   int total = blocks * block_size_in_oops;
 539   int extra = total - 2*free;
 540   if (extra > 0) {
 541     // Not as many free handles as we would like - compute number of new blocks to append
 542     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
 543   }
 544 }
 545 
 546 
 547 bool JNIHandleBlock::contains(jobject handle) const {
 548   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
 549 }
 550 
 551 
 552 bool JNIHandleBlock::chain_contains(jobject handle) const {
 553   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
 554     if (current->contains(handle)) {
 555       return true;
 556     }
 557   }
 558   return false;
 559 }
 560 
 561 
 562 int JNIHandleBlock::length() const {
 563   int result = 1;
 564   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
 565     result++;
 566   }
 567   return result;
 568 }
 569 
 570 const size_t JNIHandleBlock::get_number_of_live_handles() {
 571   CountHandleClosure counter;
 572   oops_do(&counter);
 573   return counter.count();
 574 }
 575 
 576 // This method is not thread-safe, i.e., must be called while holding a lock on the
 577 // structure.
 578 long JNIHandleBlock::memory_usage() const {
 579   return length() * sizeof(JNIHandleBlock);
 580 }
 581 
 582 
 583 #ifndef PRODUCT
 584 
 585 bool JNIHandleBlock::any_contains(jobject handle) {
 586   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
 587     if (current->contains(handle)) {
 588       return true;
 589     }
 590   }
 591   return false;
 592 }
 593 
 594 void JNIHandleBlock::print_statistics() {
 595   int used_blocks = 0;
 596   int free_blocks = 0;
 597   int used_handles = 0;
 598   int free_handles = 0;
 599   JNIHandleBlock* block = _block_list;
 600   while (block != NULL) {
 601     if (block->_top > 0) {
 602       used_blocks++;
 603     } else {
 604       free_blocks++;
 605     }
 606     used_handles += block->_top;
 607     free_handles += (block_size_in_oops - block->_top);
 608     block = block->_block_list_link;
 609   }
 610   tty->print_cr("JNIHandleBlocks statistics");
 611   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
 612   tty->print_cr("- blocks in use:    %d", used_blocks);
 613   tty->print_cr("- blocks free:      %d", free_blocks);
 614   tty->print_cr("- handles in use:   %d", used_handles);
 615   tty->print_cr("- handles free:     %d", free_handles);
 616 }
 617 
 618 #endif