1 /*
   2  * Copyright (c) 1998, 2011, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "oops/oop.inline.hpp"
  28 #include "prims/jvmtiExport.hpp"
  29 #include "runtime/jniHandles.hpp"
  30 #include "runtime/mutexLocker.hpp"
  31 #ifdef TARGET_OS_FAMILY_linux
  32 # include "thread_linux.inline.hpp"
  33 #endif
  34 #ifdef TARGET_OS_FAMILY_solaris
  35 # include "thread_solaris.inline.hpp"
  36 #endif
  37 #ifdef TARGET_OS_FAMILY_windows
  38 # include "thread_windows.inline.hpp"
  39 #endif
  40 #ifdef TARGET_OS_FAMILY_bsd
  41 # include "thread_bsd.inline.hpp"
  42 #endif
  43 
  44 
  45 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
  46 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
  47 oop             JNIHandles::_deleted_handle       = NULL;
  48 
  49 
  50 jobject JNIHandles::make_local(oop obj) {
  51   if (obj == NULL) {
  52     return NULL;                // ignore null handles
  53   } else {
  54     Thread* thread = Thread::current();
  55     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  56     return thread->active_handles()->allocate_handle(obj);
  57   }
  58 }
  59 
  60 
  61 // optimized versions
  62 
  63 jobject JNIHandles::make_local(Thread* thread, oop obj) {
  64   if (obj == NULL) {
  65     return NULL;                // ignore null handles
  66   } else {
  67     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  68     return thread->active_handles()->allocate_handle(obj);
  69   }
  70 }
  71 
  72 
  73 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
  74   if (obj == NULL) {
  75     return NULL;                // ignore null handles
  76   } else {
  77     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
  78     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
  79     return thread->active_handles()->allocate_handle(obj);
  80   }
  81 }
  82 
  83 
  84 jobject JNIHandles::make_global(Handle obj) {
  85   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
  86   jobject res = NULL;
  87   if (!obj.is_null()) {
  88     // ignore null handles
  89     MutexLocker ml(JNIGlobalHandle_lock);
  90     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
  91     res = _global_handles->allocate_handle(obj());
  92   } else {
  93     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
  94   }
  95 
  96   return res;
  97 }
  98 
  99 
 100 jobject JNIHandles::make_weak_global(Handle obj) {
 101   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
 102   jobject res = NULL;
 103   if (!obj.is_null()) {
 104     // ignore null handles
 105     MutexLocker ml(JNIGlobalHandle_lock);
 106     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
 107     res = _weak_global_handles->allocate_handle(obj());
 108   } else {
 109     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 110   }
 111   return res;
 112 }
 113 
 114 jmethodID JNIHandles::make_jmethod_id(methodHandle mh) {
 115   return (jmethodID) make_weak_global(mh);
 116 }
 117 
 118 
 119 
 120 void JNIHandles::change_method_associated_with_jmethod_id(jmethodID jmid, methodHandle mh) {
 121   MutexLocker ml(JNIGlobalHandle_lock); // Is this necessary?
 122   Handle obj = (Handle)mh;
 123   oop* jobj = (oop*)jmid;
 124   *jobj = obj();
 125 }
 126 
 127 
 128 void JNIHandles::destroy_global(jobject handle) {
 129   if (handle != NULL) {
 130     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
 131     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
 132   }
 133 }
 134 
 135 
 136 void JNIHandles::destroy_weak_global(jobject handle) {
 137   if (handle != NULL) {
 138     assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle");
 139     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
 140   }
 141 }
 142 
 143 void JNIHandles::destroy_jmethod_id(jmethodID mid) {
 144   destroy_weak_global((jobject)mid);
 145 }
 146 
 147 
 148 void JNIHandles::oops_do(OopClosure* f) {
 149   f->do_oop(&_deleted_handle);
 150   _global_handles->oops_do(f);
 151 }
 152 
 153 
 154 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
 155   _weak_global_handles->weak_oops_do(is_alive, f);
 156 }
 157 
 158 
 159 void JNIHandles::initialize() {
 160   _global_handles      = JNIHandleBlock::allocate_block();
 161   _weak_global_handles = JNIHandleBlock::allocate_block();
 162   EXCEPTION_MARK;
 163   // We will never reach the CATCH below since Exceptions::_throw will cause
 164   // the VM to exit if an exception is thrown during initialization
 165   klassOop k      = SystemDictionary::Object_klass();
 166   _deleted_handle = instanceKlass::cast(k)->allocate_permanent_instance(CATCH);
 167 }
 168 
 169 
 170 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
 171   JNIHandleBlock* block = thread->active_handles();
 172 
 173   // Look back past possible native calls to jni_PushLocalFrame.
 174   while (block != NULL) {
 175     if (block->chain_contains(handle)) {
 176       return true;
 177     }
 178     block = block->pop_frame_link();
 179   }
 180   return false;
 181 }
 182 
 183 
 184 // Determine if the handle is somewhere in the current thread's stack.
 185 // We easily can't isolate any particular stack frame the handle might
 186 // come from, so we'll check the whole stack.
 187 
 188 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
 189   // If there is no java frame, then this must be top level code, such
 190   // as the java command executable, in which case, this type of handle
 191   // is not permitted.
 192   return (thr->has_last_Java_frame() &&
 193          (void*)obj < (void*)thr->stack_base() &&
 194          (void*)obj >= (void*)thr->last_Java_sp());
 195 }
 196 
 197 
 198 bool JNIHandles::is_global_handle(jobject handle) {
 199   return _global_handles->chain_contains(handle);
 200 }
 201 
 202 
 203 bool JNIHandles::is_weak_global_handle(jobject handle) {
 204   return _weak_global_handles->chain_contains(handle);
 205 }
 206 
 207 long JNIHandles::global_handle_memory_usage() {
 208   return _global_handles->memory_usage();
 209 }
 210 
 211 long JNIHandles::weak_global_handle_memory_usage() {
 212   return _weak_global_handles->memory_usage();
 213 }
 214 
 215 
 216 class AlwaysAliveClosure: public BoolObjectClosure {
 217 public:
 218   bool do_object_b(oop obj) { return true; }
 219   void do_object(oop obj) { assert(false, "Don't call"); }
 220 };
 221 
 222 class CountHandleClosure: public OopClosure {
 223 private:
 224   int _count;
 225 public:
 226   CountHandleClosure(): _count(0) {}
 227   virtual void do_oop(oop* unused) {
 228     _count++;
 229   }
 230   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
 231   int count() { return _count; }
 232 };
 233 
 234 // We assume this is called at a safepoint: no lock is needed.
 235 void JNIHandles::print_on(outputStream* st) {
 236   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
 237   assert(_global_handles != NULL && _weak_global_handles != NULL,
 238          "JNIHandles not initialized");
 239 
 240   CountHandleClosure global_handle_count;
 241   AlwaysAliveClosure always_alive;
 242   oops_do(&global_handle_count);
 243   weak_oops_do(&always_alive, &global_handle_count);
 244 
 245   st->print_cr("JNI global references: %d", global_handle_count.count());
 246   st->cr();
 247   st->flush();
 248 }
 249 
 250 class VerifyHandleClosure: public OopClosure {
 251 public:
 252   virtual void do_oop(oop* root) {
 253     (*root)->verify();
 254   }
 255   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
 256 };
 257 
 258 void JNIHandles::verify() {
 259   VerifyHandleClosure verify_handle;
 260   AlwaysAliveClosure always_alive;
 261 
 262   oops_do(&verify_handle);
 263   weak_oops_do(&always_alive, &verify_handle);
 264 }
 265 
 266 
 267 
 268 void jni_handles_init() {
 269   JNIHandles::initialize();
 270 }
 271 
 272 
 273 int             JNIHandleBlock::_blocks_allocated     = 0;
 274 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
 275 #ifndef PRODUCT
 276 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
 277 #endif
 278 
 279 
 280 void JNIHandleBlock::zap() {
 281   // Zap block values
 282   _top  = 0;
 283   for (int index = 0; index < block_size_in_oops; index++) {
 284     _handles[index] = badJNIHandle;
 285   }
 286 }
 287 
 288 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
 289   assert(thread == NULL || thread == Thread::current(), "sanity check");
 290   JNIHandleBlock* block;
 291   // Check the thread-local free list for a block so we don't
 292   // have to acquire a mutex.
 293   if (thread != NULL && thread->free_handle_block() != NULL) {
 294     block = thread->free_handle_block();
 295     thread->set_free_handle_block(block->_next);
 296   }
 297   else {
 298     // locking with safepoint checking introduces a potential deadlock:
 299     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 300     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 301     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 302     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 303                      Mutex::_no_safepoint_check_flag);
 304     if (_block_free_list == NULL) {
 305       // Allocate new block
 306       block = new JNIHandleBlock();
 307       _blocks_allocated++;
 308       if (TraceJNIHandleAllocation) {
 309         tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)",
 310                       block, _blocks_allocated);
 311       }
 312       if (ZapJNIHandleArea) block->zap();
 313       #ifndef PRODUCT
 314       // Link new block to list of all allocated blocks
 315       block->_block_list_link = _block_list;
 316       _block_list = block;
 317       #endif
 318     } else {
 319       // Get block from free list
 320       block = _block_free_list;
 321       _block_free_list = _block_free_list->_next;
 322     }
 323   }
 324   block->_top  = 0;
 325   block->_next = NULL;
 326   block->_pop_frame_link = NULL;
 327   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
 328   debug_only(block->_last = NULL);
 329   debug_only(block->_free_list = NULL);
 330   debug_only(block->_allocate_before_rebuild = -1);
 331   return block;
 332 }
 333 
 334 
 335 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
 336   assert(thread == NULL || thread == Thread::current(), "sanity check");
 337   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
 338   // Put returned block at the beginning of the thread-local free list.
 339   // Note that if thread == NULL, we use it as an implicit argument that
 340   // we _don't_ want the block to be kept on the free_handle_block.
 341   // See for instance JavaThread::exit().
 342   if (thread != NULL ) {
 343     if (ZapJNIHandleArea) block->zap();
 344     JNIHandleBlock* freelist = thread->free_handle_block();
 345     block->_pop_frame_link = NULL;
 346     thread->set_free_handle_block(block);
 347 
 348     // Add original freelist to end of chain
 349     if ( freelist != NULL ) {
 350       while ( block->_next != NULL ) block = block->_next;
 351       block->_next = freelist;
 352     }
 353     block = NULL;
 354   }
 355   if (block != NULL) {
 356     // Return blocks to free list
 357     // locking with safepoint checking introduces a potential deadlock:
 358     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
 359     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
 360     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
 361     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
 362                      Mutex::_no_safepoint_check_flag);
 363     while (block != NULL) {
 364       if (ZapJNIHandleArea) block->zap();
 365       JNIHandleBlock* next = block->_next;
 366       block->_next = _block_free_list;
 367       _block_free_list = block;
 368       block = next;
 369     }
 370   }
 371   if (pop_frame_link != NULL) {
 372     // As a sanity check we release blocks pointed to by the pop_frame_link.
 373     // This should never happen (only if PopLocalFrame is not called the
 374     // correct number of times).
 375     release_block(pop_frame_link, thread);
 376   }
 377 }
 378 
 379 
 380 void JNIHandleBlock::oops_do(OopClosure* f) {
 381   JNIHandleBlock* current_chain = this;
 382   // Iterate over chain of blocks, followed by chains linked through the
 383   // pop frame links.
 384   while (current_chain != NULL) {
 385     for (JNIHandleBlock* current = current_chain; current != NULL;
 386          current = current->_next) {
 387       assert(current == current_chain || current->pop_frame_link() == NULL,
 388         "only blocks first in chain should have pop frame link set");
 389       for (int index = 0; index < current->_top; index++) {
 390         oop* root = &(current->_handles)[index];
 391         oop value = *root;
 392         // traverse heap pointers only, not deleted handles or free list
 393         // pointers
 394         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 395           f->do_oop(root);
 396         }
 397       }
 398       // the next handle block is valid only if current block is full
 399       if (current->_top < block_size_in_oops) {
 400         break;
 401       }
 402     }
 403     current_chain = current_chain->pop_frame_link();
 404   }
 405 }
 406 
 407 
 408 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
 409                                   OopClosure* f) {
 410   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 411     assert(current->pop_frame_link() == NULL,
 412       "blocks holding weak global JNI handles should not have pop frame link set");
 413     for (int index = 0; index < current->_top; index++) {
 414       oop* root = &(current->_handles)[index];
 415       oop value = *root;
 416       // traverse heap pointers only, not deleted handles or free list pointers
 417       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
 418         if (is_alive->do_object_b(value)) {
 419           // The weakly referenced object is alive, update pointer
 420           f->do_oop(root);
 421         } else {
 422           // The weakly referenced object is not alive, clear the reference by storing NULL
 423           if (TraceReferenceGC) {
 424             tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root);
 425           }
 426           *root = NULL;
 427         }
 428       }
 429     }
 430     // the next handle block is valid only if current block is full
 431     if (current->_top < block_size_in_oops) {
 432       break;
 433     }
 434   }
 435 
 436   /*
 437    * JVMTI data structures may also contain weak oops.  The iteration of them
 438    * is placed here so that we don't need to add it to each of the collectors.
 439    */
 440   JvmtiExport::weak_oops_do(is_alive, f);
 441 }
 442 
 443 
 444 jobject JNIHandleBlock::allocate_handle(oop obj) {
 445   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
 446   if (_top == 0) {
 447     // This is the first allocation or the initial block got zapped when
 448     // entering a native function. If we have any following blocks they are
 449     // not valid anymore.
 450     for (JNIHandleBlock* current = _next; current != NULL;
 451          current = current->_next) {
 452       assert(current->_last == NULL, "only first block should have _last set");
 453       assert(current->_free_list == NULL,
 454              "only first block should have _free_list set");
 455       current->_top = 0;
 456       if (ZapJNIHandleArea) current->zap();
 457     }
 458     // Clear initial block
 459     _free_list = NULL;
 460     _allocate_before_rebuild = 0;
 461     _last = this;
 462     if (ZapJNIHandleArea) zap();
 463   }
 464 
 465   // Try last block
 466   if (_last->_top < block_size_in_oops) {
 467     oop* handle = &(_last->_handles)[_last->_top++];
 468     *handle = obj;
 469     return (jobject) handle;
 470   }
 471 
 472   // Try free list
 473   if (_free_list != NULL) {
 474     oop* handle = _free_list;
 475     _free_list = (oop*) *_free_list;
 476     *handle = obj;
 477     return (jobject) handle;
 478   }
 479   // Check if unused block follow last
 480   if (_last->_next != NULL) {
 481     // update last and retry
 482     _last = _last->_next;
 483     return allocate_handle(obj);
 484   }
 485 
 486   // No space available, we have to rebuild free list or expand
 487   if (_allocate_before_rebuild == 0) {
 488       rebuild_free_list();        // updates _allocate_before_rebuild counter
 489   } else {
 490     // Append new block
 491     Thread* thread = Thread::current();
 492     Handle obj_handle(thread, obj);
 493     // This can block, so we need to preserve obj accross call.
 494     _last->_next = JNIHandleBlock::allocate_block(thread);
 495     _last = _last->_next;
 496     _allocate_before_rebuild--;
 497     obj = obj_handle();
 498   }
 499   return allocate_handle(obj);  // retry
 500 }
 501 
 502 
 503 void JNIHandleBlock::rebuild_free_list() {
 504   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
 505   int free = 0;
 506   int blocks = 0;
 507   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 508     for (int index = 0; index < current->_top; index++) {
 509       oop* handle = &(current->_handles)[index];
 510       if (*handle ==  JNIHandles::deleted_handle()) {
 511         // this handle was cleared out by a delete call, reuse it
 512         *handle = (oop) _free_list;
 513         _free_list = handle;
 514         free++;
 515       }
 516     }
 517     // we should not rebuild free list if there are unused handles at the end
 518     assert(current->_top == block_size_in_oops, "just checking");
 519     blocks++;
 520   }
 521   // Heuristic: if more than half of the handles are free we rebuild next time
 522   // as well, otherwise we append a corresponding number of new blocks before
 523   // attempting a free list rebuild again.
 524   int total = blocks * block_size_in_oops;
 525   int extra = total - 2*free;
 526   if (extra > 0) {
 527     // Not as many free handles as we would like - compute number of new blocks to append
 528     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
 529   }
 530   if (TraceJNIHandleAllocation) {
 531     tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d",
 532       this, blocks, total-free, free, _allocate_before_rebuild);
 533   }
 534 }
 535 
 536 
 537 bool JNIHandleBlock::contains(jobject handle) const {
 538   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
 539 }
 540 
 541 
 542 bool JNIHandleBlock::chain_contains(jobject handle) const {
 543   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
 544     if (current->contains(handle)) {
 545       return true;
 546     }
 547   }
 548   return false;
 549 }
 550 
 551 
 552 int JNIHandleBlock::length() const {
 553   int result = 1;
 554   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
 555     result++;
 556   }
 557   return result;
 558 }
 559 
 560 // This method is not thread-safe, i.e., must be called whule holding a lock on the
 561 // structure.
 562 long JNIHandleBlock::memory_usage() const {
 563   return length() * sizeof(JNIHandleBlock);
 564 }
 565 
 566 
 567 #ifndef PRODUCT
 568 
 569 bool JNIHandleBlock::any_contains(jobject handle) {
 570   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
 571     if (current->contains(handle)) {
 572       return true;
 573     }
 574   }
 575   return false;
 576 }
 577 
 578 void JNIHandleBlock::print_statistics() {
 579   int used_blocks = 0;
 580   int free_blocks = 0;
 581   int used_handles = 0;
 582   int free_handles = 0;
 583   JNIHandleBlock* block = _block_list;
 584   while (block != NULL) {
 585     if (block->_top > 0) {
 586       used_blocks++;
 587     } else {
 588       free_blocks++;
 589     }
 590     used_handles += block->_top;
 591     free_handles += (block_size_in_oops - block->_top);
 592     block = block->_block_list_link;
 593   }
 594   tty->print_cr("JNIHandleBlocks statistics");
 595   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
 596   tty->print_cr("- blocks in use:    %d", used_blocks);
 597   tty->print_cr("- blocks free:      %d", free_blocks);
 598   tty->print_cr("- handles in use:   %d", used_handles);
 599   tty->print_cr("- handles free:     %d", free_handles);
 600 }
 601 
 602 #endif