1 /*
  2  * Copyright (c) 1998, 2018, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "gc/shared/oopStorage.inline.hpp"
 27 #include "logging/log.hpp"
 28 #include "memory/iterator.hpp"
 29 #include "oops/oop.inline.hpp"
 30 #include "runtime/jniHandles.hpp"
 31 #include "runtime/mutexLocker.hpp"
 32 #include "runtime/thread.inline.hpp"
 33 #include "trace/traceMacros.hpp"
 34 #include "utilities/align.hpp"
 35 #include "utilities/debug.hpp"
 36 #if INCLUDE_ALL_GCS
 37 #include "gc/g1/g1BarrierSet.hpp"
 38 #endif
 39 
 40 OopStorage* JNIHandles::_global_handles = NULL;
 41 OopStorage* JNIHandles::_weak_global_handles = NULL;
 42 
 43 
 44 jobject JNIHandles::make_local(oop obj) {
 45   if (obj == NULL) {
 46     return NULL;                // ignore null handles
 47   } else {
 48     Thread* thread = Thread::current();
 49     assert(oopDesc::is_oop(obj), "not an oop");
 50     assert(!current_thread_in_native(), "must not be in native");
 51     return thread->active_handles()->allocate_handle(obj);
 52   }
 53 }
 54 
 55 
 56 // optimized versions
 57 
 58 jobject JNIHandles::make_local(Thread* thread, oop obj) {
 59   if (obj == NULL) {
 60     return NULL;                // ignore null handles
 61   } else {
 62     assert(oopDesc::is_oop(obj), "not an oop");
 63     assert(thread->is_Java_thread(), "not a Java thread");
 64     assert(!current_thread_in_native(), "must not be in native");
 65     return thread->active_handles()->allocate_handle(obj);
 66   }
 67 }
 68 
 69 
 70 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
 71   if (obj == NULL) {
 72     return NULL;                // ignore null handles
 73   } else {
 74     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
 75     assert(oopDesc::is_oop(obj), "not an oop");
 76     assert(!current_thread_in_native(), "must not be in native");
 77     return thread->active_handles()->allocate_handle(obj);
 78   }
 79 }
 80 
 81 
 82 static void report_handle_allocation_failure(AllocFailType alloc_failmode,
 83                                              const char* handle_kind) {
 84   if (alloc_failmode == AllocFailStrategy::EXIT_OOM) {
 85     // Fake size value, since we don't know the min allocation size here.
 86     vm_exit_out_of_memory(sizeof(oop), OOM_MALLOC_ERROR,
 87                           "Cannot create %s JNI handle", handle_kind);
 88   } else {
 89     assert(alloc_failmode == AllocFailStrategy::RETURN_NULL, "invariant");
 90   }
 91 }
 92 
 93 jobject JNIHandles::make_global(Handle obj, AllocFailType alloc_failmode) {
 94   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
 95   assert(!current_thread_in_native(), "must not be in native");
 96   jobject res = NULL;
 97   if (!obj.is_null()) {
 98     // ignore null handles
 99     assert(oopDesc::is_oop(obj()), "not an oop");
100     oop* ptr = _global_handles->allocate();
101     // Return NULL on allocation failure.
102     if (ptr != NULL) {
103       *ptr = obj();
104       res = reinterpret_cast<jobject>(ptr);
105     } else {
106       report_handle_allocation_failure(alloc_failmode, "global");
107     }
108   } else {
109     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
110   }
111 
112   return res;
113 }
114 
115 
116 jobject JNIHandles::make_weak_global(Handle obj, AllocFailType alloc_failmode) {
117   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
118   assert(!current_thread_in_native(), "must not be in native");
119   jobject res = NULL;
120   if (!obj.is_null()) {
121     // ignore null handles
122     assert(oopDesc::is_oop(obj()), "not an oop");
123     oop* ptr = _weak_global_handles->allocate();
124     // Return NULL on allocation failure.
125     if (ptr != NULL) {
126       *ptr = obj();
127       char* tptr = reinterpret_cast<char*>(ptr) + weak_tag_value;
128       res = reinterpret_cast<jobject>(tptr);
129     } else {
130       report_handle_allocation_failure(alloc_failmode, "weak global");
131     }
132   } else {
133     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
134   }
135   return res;
136 }
137 
138 oop JNIHandles::resolve_jweak(jweak handle) {
139   assert(handle != NULL, "precondition");
140   assert(is_jweak(handle), "precondition");
141   oop result = jweak_ref(handle);
142 #if INCLUDE_ALL_GCS
143   if (result != NULL && UseG1GC) {
144     G1BarrierSet::enqueue(result);
145   }
146 #endif // INCLUDE_ALL_GCS
147   return result;
148 }
149 
150 bool JNIHandles::is_global_weak_cleared(jweak handle) {
151   assert(handle != NULL, "precondition");
152   assert(is_jweak(handle), "not a weak handle");
153   return jweak_ref(handle) == NULL;
154 }
155 
156 void JNIHandles::destroy_global(jobject handle) {
157   if (handle != NULL) {
158     assert(!is_jweak(handle), "wrong method for detroying jweak");
159     jobject_ref(handle) = NULL;
160     _global_handles->release(&jobject_ref(handle));
161   }
162 }
163 
164 
165 void JNIHandles::destroy_weak_global(jobject handle) {
166   if (handle != NULL) {
167     assert(is_jweak(handle), "JNI handle not jweak");
168     jweak_ref(handle) = NULL;
169     _weak_global_handles->release(&jweak_ref(handle));
170   }
171 }
172 
173 
174 void JNIHandles::oops_do(OopClosure* f) {
175   _global_handles->oops_do(f);
176 }
177 
178 
179 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
180   _weak_global_handles->weak_oops_do(is_alive, f);
181 }
182 
183 
184 void JNIHandles::weak_oops_do(OopClosure* f) {
185   _weak_global_handles->weak_oops_do(f);
186 }
187 
188 
189 void JNIHandles::initialize() {
190   _global_handles = new OopStorage("JNI Global",
191                                    JNIGlobalAlloc_lock,
192                                    JNIGlobalActive_lock);
193   _weak_global_handles = new OopStorage("JNI Weak",
194                                         JNIWeakAlloc_lock,
195                                         JNIWeakActive_lock);
196 }
197 
198 
199 inline bool is_storage_handle(const OopStorage* storage, const oop* ptr) {
200   return storage->allocation_status(ptr) == OopStorage::ALLOCATED_ENTRY;
201 }
202 
203 
204 jobjectRefType JNIHandles::handle_type(Thread* thread, jobject handle) {
205   assert(handle != NULL, "precondition");
206   jobjectRefType result = JNIInvalidRefType;
207   if (is_jweak(handle)) {
208     if (is_storage_handle(_weak_global_handles, &jweak_ref(handle))) {
209       result = JNIWeakGlobalRefType;
210     }
211   } else {
212     switch (_global_handles->allocation_status(&jobject_ref(handle))) {
213     case OopStorage::ALLOCATED_ENTRY:
214       result = JNIGlobalRefType;
215       break;
216 
217     case OopStorage::UNALLOCATED_ENTRY:
218       break;                    // Invalid global handle
219 
220     case OopStorage::INVALID_ENTRY:
221       // Not in global storage.  Might be a local handle.
222       if (is_local_handle(thread, handle) ||
223           (thread->is_Java_thread() &&
224            is_frame_handle((JavaThread*)thread, handle))) {
225         result = JNILocalRefType;
226       }
227       break;
228 
229     default:
230       ShouldNotReachHere();
231     }
232   }
233   return result;
234 }
235 
236 
237 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
238   assert(handle != NULL, "precondition");
239   JNIHandleBlock* block = thread->active_handles();
240 
241   // Look back past possible native calls to jni_PushLocalFrame.
242   while (block != NULL) {
243     if (block->chain_contains(handle)) {
244       return true;
245     }
246     block = block->pop_frame_link();
247   }
248   return false;
249 }
250 
251 
252 // Determine if the handle is somewhere in the current thread's stack.
253 // We easily can't isolate any particular stack frame the handle might
254 // come from, so we'll check the whole stack.
255 
256 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject handle) {
257   assert(handle != NULL, "precondition");
258   // If there is no java frame, then this must be top level code, such
259   // as the java command executable, in which case, this type of handle
260   // is not permitted.
261   return (thr->has_last_Java_frame() &&
262          (void*)handle < (void*)thr->stack_base() &&
263          (void*)handle >= (void*)thr->last_Java_sp());
264 }
265 
266 
267 bool JNIHandles::is_global_handle(jobject handle) {
268   assert(handle != NULL, "precondition");
269   return !is_jweak(handle) && is_storage_handle(_global_handles, &jobject_ref(handle));
270 }
271 
272 
273 bool JNIHandles::is_weak_global_handle(jobject handle) {
274   assert(handle != NULL, "precondition");
275   return is_jweak(handle) && is_storage_handle(_weak_global_handles, &jweak_ref(handle));
276 }
277 
278 size_t JNIHandles::global_handle_memory_usage() {
279   return _global_handles->total_memory_usage();
280 }
281 
282 size_t JNIHandles::weak_global_handle_memory_usage() {
283   return _weak_global_handles->total_memory_usage();
284 }
285 
286 
287 // We assume this is called at a safepoint: no lock is needed.
288 void JNIHandles::print_on(outputStream* st) {
289   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
290   assert(_global_handles != NULL && _weak_global_handles != NULL,
291          "JNIHandles not initialized");
292 
293   st->print_cr("JNI global refs: " SIZE_FORMAT ", weak refs: " SIZE_FORMAT,
294                _global_handles->allocation_count(),
295                _weak_global_handles->allocation_count());
296   st->cr();
297   st->flush();
298 }
299 
300 class VerifyJNIHandles: public OopClosure {
301 public:
302   virtual void do_oop(oop* root) {
303     (*root)->verify();
304   }
305   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
306 };
307 
308 void JNIHandles::verify() {
309   VerifyJNIHandles verify_handle;
310 
311   oops_do(&verify_handle);
312   weak_oops_do(&verify_handle);
313 }
314 
315 // This method is implemented here to avoid circular includes between
316 // jniHandles.hpp and thread.hpp.
317 bool JNIHandles::current_thread_in_native() {
318   Thread* thread = Thread::current();
319   return (thread->is_Java_thread() &&
320           JavaThread::current()->thread_state() == _thread_in_native);
321 }
322 
323 
324 void jni_handles_init() {
325   JNIHandles::initialize();
326 }
327 
328 
329 int             JNIHandleBlock::_blocks_allocated     = 0;
330 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
331 #ifndef PRODUCT
332 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
333 #endif
334 
335 
336 #ifdef ASSERT
337 void JNIHandleBlock::zap() {
338   // Zap block values
339   _top = 0;
340   for (int index = 0; index < block_size_in_oops; index++) {
341     _handles[index] = NULL;
342   }
343 }
344 #endif // ASSERT
345 
346 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
347   assert(thread == NULL || thread == Thread::current(), "sanity check");
348   JNIHandleBlock* block;
349   // Check the thread-local free list for a block so we don't
350   // have to acquire a mutex.
351   if (thread != NULL && thread->free_handle_block() != NULL) {
352     block = thread->free_handle_block();
353     thread->set_free_handle_block(block->_next);
354   }
355   else {
356     // locking with safepoint checking introduces a potential deadlock:
357     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
358     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
359     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
360     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
361                      Mutex::_no_safepoint_check_flag);
362     if (_block_free_list == NULL) {
363       // Allocate new block
364       block = new JNIHandleBlock();
365       _blocks_allocated++;
366       block->zap();
367       #ifndef PRODUCT
368       // Link new block to list of all allocated blocks
369       block->_block_list_link = _block_list;
370       _block_list = block;
371       #endif
372     } else {
373       // Get block from free list
374       block = _block_free_list;
375       _block_free_list = _block_free_list->_next;
376     }
377   }
378   block->_top = 0;
379   block->_next = NULL;
380   block->_pop_frame_link = NULL;
381   block->_planned_capacity = block_size_in_oops;
382   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
383   debug_only(block->_last = NULL);
384   debug_only(block->_free_list = NULL);
385   debug_only(block->_allocate_before_rebuild = -1);
386   return block;
387 }
388 
389 
390 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
391   assert(thread == NULL || thread == Thread::current(), "sanity check");
392   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
393   // Put returned block at the beginning of the thread-local free list.
394   // Note that if thread == NULL, we use it as an implicit argument that
395   // we _don't_ want the block to be kept on the free_handle_block.
396   // See for instance JavaThread::exit().
397   if (thread != NULL ) {
398     block->zap();
399     JNIHandleBlock* freelist = thread->free_handle_block();
400     block->_pop_frame_link = NULL;
401     thread->set_free_handle_block(block);
402 
403     // Add original freelist to end of chain
404     if ( freelist != NULL ) {
405       while ( block->_next != NULL ) block = block->_next;
406       block->_next = freelist;
407     }
408     block = NULL;
409   }
410   if (block != NULL) {
411     // Return blocks to free list
412     // locking with safepoint checking introduces a potential deadlock:
413     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
414     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
415     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
416     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
417                      Mutex::_no_safepoint_check_flag);
418     while (block != NULL) {
419       block->zap();
420       JNIHandleBlock* next = block->_next;
421       block->_next = _block_free_list;
422       _block_free_list = block;
423       block = next;
424     }
425   }
426   if (pop_frame_link != NULL) {
427     // As a sanity check we release blocks pointed to by the pop_frame_link.
428     // This should never happen (only if PopLocalFrame is not called the
429     // correct number of times).
430     release_block(pop_frame_link, thread);
431   }
432 }
433 
434 
435 void JNIHandleBlock::oops_do(OopClosure* f) {
436   JNIHandleBlock* current_chain = this;
437   // Iterate over chain of blocks, followed by chains linked through the
438   // pop frame links.
439   while (current_chain != NULL) {
440     for (JNIHandleBlock* current = current_chain; current != NULL;
441          current = current->_next) {
442       assert(current == current_chain || current->pop_frame_link() == NULL,
443         "only blocks first in chain should have pop frame link set");
444       for (int index = 0; index < current->_top; index++) {
445         oop* root = &(current->_handles)[index];
446         oop value = *root;
447         // traverse heap pointers only, not deleted handles or free list
448         // pointers
449         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
450           f->do_oop(root);
451         }
452       }
453       // the next handle block is valid only if current block is full
454       if (current->_top < block_size_in_oops) {
455         break;
456       }
457     }
458     current_chain = current_chain->pop_frame_link();
459   }
460 }
461 
462 
463 jobject JNIHandleBlock::allocate_handle(oop obj) {
464   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
465   if (_top == 0) {
466     // This is the first allocation or the initial block got zapped when
467     // entering a native function. If we have any following blocks they are
468     // not valid anymore.
469     for (JNIHandleBlock* current = _next; current != NULL;
470          current = current->_next) {
471       assert(current->_last == NULL, "only first block should have _last set");
472       assert(current->_free_list == NULL,
473              "only first block should have _free_list set");
474       if (current->_top == 0) {
475         // All blocks after the first clear trailing block are already cleared.
476 #ifdef ASSERT
477         for (current = current->_next; current != NULL; current = current->_next) {
478           assert(current->_top == 0, "trailing blocks must already be cleared");
479         }
480 #endif
481         break;
482       }
483       current->_top = 0;
484       current->zap();
485     }
486     // Clear initial block
487     _free_list = NULL;
488     _allocate_before_rebuild = 0;
489     _last = this;
490     zap();
491   }
492 
493   // Try last block
494   if (_last->_top < block_size_in_oops) {
495     oop* handle = &(_last->_handles)[_last->_top++];
496     *handle = obj;
497     return (jobject) handle;
498   }
499 
500   // Try free list
501   if (_free_list != NULL) {
502     oop* handle = _free_list;
503     _free_list = (oop*) *_free_list;
504     *handle = obj;
505     return (jobject) handle;
506   }
507   // Check if unused block follow last
508   if (_last->_next != NULL) {
509     // update last and retry
510     _last = _last->_next;
511     return allocate_handle(obj);
512   }
513 
514   // No space available, we have to rebuild free list or expand
515   if (_allocate_before_rebuild == 0) {
516       rebuild_free_list();        // updates _allocate_before_rebuild counter
517   } else {
518     // Append new block
519     Thread* thread = Thread::current();
520     Handle obj_handle(thread, obj);
521     // This can block, so we need to preserve obj across call.
522     _last->_next = JNIHandleBlock::allocate_block(thread);
523     _last = _last->_next;
524     _allocate_before_rebuild--;
525     obj = obj_handle();
526   }
527   return allocate_handle(obj);  // retry
528 }
529 
530 void JNIHandleBlock::rebuild_free_list() {
531   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
532   int free = 0;
533   int blocks = 0;
534   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
535     for (int index = 0; index < current->_top; index++) {
536       oop* handle = &(current->_handles)[index];
537       if (*handle == NULL) {
538         // this handle was cleared out by a delete call, reuse it
539         *handle = (oop) _free_list;
540         _free_list = handle;
541         free++;
542       }
543     }
544     // we should not rebuild free list if there are unused handles at the end
545     assert(current->_top == block_size_in_oops, "just checking");
546     blocks++;
547   }
548   // Heuristic: if more than half of the handles are free we rebuild next time
549   // as well, otherwise we append a corresponding number of new blocks before
550   // attempting a free list rebuild again.
551   int total = blocks * block_size_in_oops;
552   int extra = total - 2*free;
553   if (extra > 0) {
554     // Not as many free handles as we would like - compute number of new blocks to append
555     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
556   }
557 }
558 
559 
560 bool JNIHandleBlock::contains(jobject handle) const {
561   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
562 }
563 
564 
565 bool JNIHandleBlock::chain_contains(jobject handle) const {
566   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
567     if (current->contains(handle)) {
568       return true;
569     }
570   }
571   return false;
572 }
573 
574 
575 size_t JNIHandleBlock::length() const {
576   size_t result = 1;
577   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
578     result++;
579   }
580   return result;
581 }
582 
583 class CountJNIHandleClosure: public OopClosure {
584 private:
585   int _count;
586 public:
587   CountJNIHandleClosure(): _count(0) {}
588   virtual void do_oop(oop* ooph) { _count++; }
589   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
590   int count() { return _count; }
591 };
592 
593 const size_t JNIHandleBlock::get_number_of_live_handles() {
594   CountJNIHandleClosure counter;
595   oops_do(&counter);
596   return counter.count();
597 }
598 
599 // This method is not thread-safe, i.e., must be called while holding a lock on the
600 // structure.
601 size_t JNIHandleBlock::memory_usage() const {
602   return length() * sizeof(JNIHandleBlock);
603 }
604 
605 
606 #ifndef PRODUCT
607 
608 bool JNIHandles::is_local_handle(jobject handle) {
609   return JNIHandleBlock::any_contains(handle);
610 }
611 
612 bool JNIHandleBlock::any_contains(jobject handle) {
613   assert(handle != NULL, "precondition");
614   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
615     if (current->contains(handle)) {
616       return true;
617     }
618   }
619   return false;
620 }
621 
622 void JNIHandleBlock::print_statistics() {
623   int used_blocks = 0;
624   int free_blocks = 0;
625   int used_handles = 0;
626   int free_handles = 0;
627   JNIHandleBlock* block = _block_list;
628   while (block != NULL) {
629     if (block->_top > 0) {
630       used_blocks++;
631     } else {
632       free_blocks++;
633     }
634     used_handles += block->_top;
635     free_handles += (block_size_in_oops - block->_top);
636     block = block->_block_list_link;
637   }
638   tty->print_cr("JNIHandleBlocks statistics");
639   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
640   tty->print_cr("- blocks in use:    %d", used_blocks);
641   tty->print_cr("- blocks free:      %d", free_blocks);
642   tty->print_cr("- handles in use:   %d", used_handles);
643   tty->print_cr("- handles free:     %d", free_handles);
644 }
645 
646 #endif