G1BarrierSet_merge

0 /*
1  * Copyright (c) 1998, 2018, Oracle and/or its affiliates. All rights reserved.
2  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3  *
4  * This code is free software; you can redistribute it and/or modify it
5  * under the terms of the GNU General Public License version 2 only, as
6  * published by the Free Software Foundation.
7  *
8  * This code is distributed in the hope that it will be useful, but WITHOUT
9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
11  * version 2 for more details (a copy is included in the LICENSE file that
12  * accompanied this code).
13  *
14  * You should have received a copy of the GNU General Public License version
15  * 2 along with this work; if not, write to the Free Software Foundation,
16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17  *
18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19  * or visit www.oracle.com if you need additional information or have any
20  * questions.
21  *
22  */
23 
24 #include "precompiled.hpp"
25 #include "gc/shared/oopStorage.inline.hpp"
26 #include "logging/log.hpp"
27 #include "memory/iterator.hpp"
28 #include "oops/oop.inline.hpp"
29 #include "runtime/jniHandles.hpp"
30 #include "runtime/mutexLocker.hpp"
31 #include "runtime/thread.inline.hpp"
32 #include "trace/traceMacros.hpp"
33 #include "utilities/align.hpp"
34 #include "utilities/debug.hpp"
35 #if INCLUDE_ALL_GCS
36 #include "gc/g1/g1BarrierSet.hpp"
37 #endif
38 
39 OopStorage* JNIHandles::_global_handles = NULL;
40 OopStorage* JNIHandles::_weak_global_handles = NULL;
41 
42 
43 jobject JNIHandles::make_local(oop obj) {
44   if (obj == NULL) {
45     return NULL;                // ignore null handles
46   } else {
47     Thread* thread = Thread::current();
48     assert(oopDesc::is_oop(obj), "not an oop");
49     assert(!current_thread_in_native(), "must not be in native");
50     return thread->active_handles()->allocate_handle(obj);
51   }
52 }
53 
54 
55 // optimized versions
56 
57 jobject JNIHandles::make_local(Thread* thread, oop obj) {
58   if (obj == NULL) {
59     return NULL;                // ignore null handles
60   } else {
61     assert(oopDesc::is_oop(obj), "not an oop");
62     assert(thread->is_Java_thread(), "not a Java thread");
63     assert(!current_thread_in_native(), "must not be in native");
64     return thread->active_handles()->allocate_handle(obj);
65   }
66 }
67 
68 
69 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
70   if (obj == NULL) {
71     return NULL;                // ignore null handles
72   } else {
73     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
74     assert(oopDesc::is_oop(obj), "not an oop");
75     assert(!current_thread_in_native(), "must not be in native");
76     return thread->active_handles()->allocate_handle(obj);
77   }
78 }
79 
80 
81 static void report_handle_allocation_failure(AllocFailType alloc_failmode,
82                                              const char* handle_kind) {
83   if (alloc_failmode == AllocFailStrategy::EXIT_OOM) {
84     // Fake size value, since we don't know the min allocation size here.
85     vm_exit_out_of_memory(sizeof(oop), OOM_MALLOC_ERROR,
86                           "Cannot create %s JNI handle", handle_kind);
87   } else {
88     assert(alloc_failmode == AllocFailStrategy::RETURN_NULL, "invariant");
89   }
90 }
91 
92 jobject JNIHandles::make_global(Handle obj, AllocFailType alloc_failmode) {
93   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
94   assert(!current_thread_in_native(), "must not be in native");
95   jobject res = NULL;
96   if (!obj.is_null()) {
97     // ignore null handles
98     assert(oopDesc::is_oop(obj()), "not an oop");
99     oop* ptr = _global_handles->allocate();
100     // Return NULL on allocation failure.
101     if (ptr != NULL) {
102       *ptr = obj();
103       res = reinterpret_cast<jobject>(ptr);
104     } else {
105       report_handle_allocation_failure(alloc_failmode, "global");
106     }
107   } else {
108     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
109   }
110 
111   return res;
112 }
113 
114 
115 jobject JNIHandles::make_weak_global(Handle obj, AllocFailType alloc_failmode) {
116   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
117   assert(!current_thread_in_native(), "must not be in native");
118   jobject res = NULL;
119   if (!obj.is_null()) {
120     // ignore null handles
121     assert(oopDesc::is_oop(obj()), "not an oop");
122     oop* ptr = _weak_global_handles->allocate();
123     // Return NULL on allocation failure.
124     if (ptr != NULL) {
125       *ptr = obj();
126       char* tptr = reinterpret_cast<char*>(ptr) + weak_tag_value;
127       res = reinterpret_cast<jobject>(tptr);
128     } else {
129       report_handle_allocation_failure(alloc_failmode, "weak global");
130     }
131   } else {
132     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
133   }
134   return res;
135 }
136 
137 oop JNIHandles::resolve_jweak(jweak handle) {
138   assert(handle != NULL, "precondition");
139   assert(is_jweak(handle), "precondition");
140   oop result = jweak_ref(handle);
141 #if INCLUDE_ALL_GCS
142   if (result != NULL && UseG1GC) {
143     G1BarrierSet::enqueue(result);
144   }
145 #endif // INCLUDE_ALL_GCS
146   return result;
147 }
148 
149 bool JNIHandles::is_global_weak_cleared(jweak handle) {
150   assert(handle != NULL, "precondition");
151   assert(is_jweak(handle), "not a weak handle");
152   return jweak_ref(handle) == NULL;
153 }
154 
155 void JNIHandles::destroy_global(jobject handle) {
156   if (handle != NULL) {
157     assert(!is_jweak(handle), "wrong method for detroying jweak");
158     jobject_ref(handle) = NULL;
159     _global_handles->release(&jobject_ref(handle));
160   }
161 }
162 
163 
164 void JNIHandles::destroy_weak_global(jobject handle) {
165   if (handle != NULL) {
166     assert(is_jweak(handle), "JNI handle not jweak");
167     jweak_ref(handle) = NULL;
168     _weak_global_handles->release(&jweak_ref(handle));
169   }
170 }
171 
172 
173 void JNIHandles::oops_do(OopClosure* f) {
174   _global_handles->oops_do(f);
175 }
176 
177 
178 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
179   _weak_global_handles->weak_oops_do(is_alive, f);
180 }
181 
182 
183 void JNIHandles::weak_oops_do(OopClosure* f) {
184   _weak_global_handles->weak_oops_do(f);
185 }
186 
187 
188 void JNIHandles::initialize() {
189   _global_handles = new OopStorage("JNI Global",
190                                    JNIGlobalAlloc_lock,
191                                    JNIGlobalActive_lock);
192   _weak_global_handles = new OopStorage("JNI Weak",
193                                         JNIWeakAlloc_lock,
194                                         JNIWeakActive_lock);
195 }
196 
197 
198 inline bool is_storage_handle(const OopStorage* storage, const oop* ptr) {
199   return storage->allocation_status(ptr) == OopStorage::ALLOCATED_ENTRY;
200 }
201 
202 
203 jobjectRefType JNIHandles::handle_type(Thread* thread, jobject handle) {
204   assert(handle != NULL, "precondition");
205   jobjectRefType result = JNIInvalidRefType;
206   if (is_jweak(handle)) {
207     if (is_storage_handle(_weak_global_handles, &jweak_ref(handle))) {
208       result = JNIWeakGlobalRefType;
209     }
210   } else {
211     switch (_global_handles->allocation_status(&jobject_ref(handle))) {
212     case OopStorage::ALLOCATED_ENTRY:
213       result = JNIGlobalRefType;
214       break;
215 
216     case OopStorage::UNALLOCATED_ENTRY:
217       break;                    // Invalid global handle
218 
219     case OopStorage::INVALID_ENTRY:
220       // Not in global storage.  Might be a local handle.
221       if (is_local_handle(thread, handle) ||
222           (thread->is_Java_thread() &&
223            is_frame_handle((JavaThread*)thread, handle))) {
224         result = JNILocalRefType;
225       }
226       break;
227 
228     default:
229       ShouldNotReachHere();
230     }
231   }
232   return result;
233 }
234 
235 
236 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
237   assert(handle != NULL, "precondition");
238   JNIHandleBlock* block = thread->active_handles();
239 
240   // Look back past possible native calls to jni_PushLocalFrame.
241   while (block != NULL) {
242     if (block->chain_contains(handle)) {
243       return true;
244     }
245     block = block->pop_frame_link();
246   }
247   return false;
248 }
249 
250 
251 // Determine if the handle is somewhere in the current thread's stack.
252 // We easily can't isolate any particular stack frame the handle might
253 // come from, so we'll check the whole stack.
254 
255 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject handle) {
256   assert(handle != NULL, "precondition");
257   // If there is no java frame, then this must be top level code, such
258   // as the java command executable, in which case, this type of handle
259   // is not permitted.
260   return (thr->has_last_Java_frame() &&
261          (void*)handle < (void*)thr->stack_base() &&
262          (void*)handle >= (void*)thr->last_Java_sp());
263 }
264 
265 
266 bool JNIHandles::is_global_handle(jobject handle) {
267   assert(handle != NULL, "precondition");
268   return !is_jweak(handle) && is_storage_handle(_global_handles, &jobject_ref(handle));
269 }
270 
271 
272 bool JNIHandles::is_weak_global_handle(jobject handle) {
273   assert(handle != NULL, "precondition");
274   return is_jweak(handle) && is_storage_handle(_weak_global_handles, &jweak_ref(handle));
275 }
276 
277 size_t JNIHandles::global_handle_memory_usage() {
278   return _global_handles->total_memory_usage();
279 }
280 
281 size_t JNIHandles::weak_global_handle_memory_usage() {
282   return _weak_global_handles->total_memory_usage();
283 }
284 
285 
286 // We assume this is called at a safepoint: no lock is needed.
287 void JNIHandles::print_on(outputStream* st) {
288   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
289   assert(_global_handles != NULL && _weak_global_handles != NULL,
290          "JNIHandles not initialized");
291 
292   st->print_cr("JNI global refs: " SIZE_FORMAT ", weak refs: " SIZE_FORMAT,
293                _global_handles->allocation_count(),
294                _weak_global_handles->allocation_count());
295   st->cr();
296   st->flush();
297 }
298 
299 class VerifyJNIHandles: public OopClosure {
300 public:
301   virtual void do_oop(oop* root) {
302     (*root)->verify();
303   }
304   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
305 };
306 
307 void JNIHandles::verify() {
308   VerifyJNIHandles verify_handle;
309 
310   oops_do(&verify_handle);
311   weak_oops_do(&verify_handle);
312 }
313 
314 // This method is implemented here to avoid circular includes between
315 // jniHandles.hpp and thread.hpp.
316 bool JNIHandles::current_thread_in_native() {
317   Thread* thread = Thread::current();
318   return (thread->is_Java_thread() &&
319           JavaThread::current()->thread_state() == _thread_in_native);
320 }
321 
322 
323 void jni_handles_init() {
324   JNIHandles::initialize();
325 }
326 
327 
328 int             JNIHandleBlock::_blocks_allocated     = 0;
329 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
330 #ifndef PRODUCT
331 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
332 #endif
333 
334 
335 #ifdef ASSERT
336 void JNIHandleBlock::zap() {
337   // Zap block values
338   _top = 0;
339   for (int index = 0; index < block_size_in_oops; index++) {
340     _handles[index] = NULL;
341   }
342 }
343 #endif // ASSERT
344 
345 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
346   assert(thread == NULL || thread == Thread::current(), "sanity check");
347   JNIHandleBlock* block;
348   // Check the thread-local free list for a block so we don't
349   // have to acquire a mutex.
350   if (thread != NULL && thread->free_handle_block() != NULL) {
351     block = thread->free_handle_block();
352     thread->set_free_handle_block(block->_next);
353   }
354   else {
355     // locking with safepoint checking introduces a potential deadlock:
356     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
357     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
358     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
359     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
360                      Mutex::_no_safepoint_check_flag);
361     if (_block_free_list == NULL) {
362       // Allocate new block
363       block = new JNIHandleBlock();
364       _blocks_allocated++;
365       block->zap();
366       #ifndef PRODUCT
367       // Link new block to list of all allocated blocks
368       block->_block_list_link = _block_list;
369       _block_list = block;
370       #endif
371     } else {
372       // Get block from free list
373       block = _block_free_list;
374       _block_free_list = _block_free_list->_next;
375     }
376   }
377   block->_top = 0;
378   block->_next = NULL;
379   block->_pop_frame_link = NULL;
380   block->_planned_capacity = block_size_in_oops;
381   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
382   debug_only(block->_last = NULL);
383   debug_only(block->_free_list = NULL);
384   debug_only(block->_allocate_before_rebuild = -1);
385   return block;
386 }
387 
388 
389 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
390   assert(thread == NULL || thread == Thread::current(), "sanity check");
391   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
392   // Put returned block at the beginning of the thread-local free list.
393   // Note that if thread == NULL, we use it as an implicit argument that
394   // we _don't_ want the block to be kept on the free_handle_block.
395   // See for instance JavaThread::exit().
396   if (thread != NULL ) {
397     block->zap();
398     JNIHandleBlock* freelist = thread->free_handle_block();
399     block->_pop_frame_link = NULL;
400     thread->set_free_handle_block(block);
401 
402     // Add original freelist to end of chain
403     if ( freelist != NULL ) {
404       while ( block->_next != NULL ) block = block->_next;
405       block->_next = freelist;
406     }
407     block = NULL;
408   }
409   if (block != NULL) {
410     // Return blocks to free list
411     // locking with safepoint checking introduces a potential deadlock:
412     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
413     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
414     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
415     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
416                      Mutex::_no_safepoint_check_flag);
417     while (block != NULL) {
418       block->zap();
419       JNIHandleBlock* next = block->_next;
420       block->_next = _block_free_list;
421       _block_free_list = block;
422       block = next;
423     }
424   }
425   if (pop_frame_link != NULL) {
426     // As a sanity check we release blocks pointed to by the pop_frame_link.
427     // This should never happen (only if PopLocalFrame is not called the
428     // correct number of times).
429     release_block(pop_frame_link, thread);
430   }
431 }
432 
433 
434 void JNIHandleBlock::oops_do(OopClosure* f) {
435   JNIHandleBlock* current_chain = this;
436   // Iterate over chain of blocks, followed by chains linked through the
437   // pop frame links.
438   while (current_chain != NULL) {
439     for (JNIHandleBlock* current = current_chain; current != NULL;
440          current = current->_next) {
441       assert(current == current_chain || current->pop_frame_link() == NULL,
442         "only blocks first in chain should have pop frame link set");
443       for (int index = 0; index < current->_top; index++) {
444         oop* root = &(current->_handles)[index];
445         oop value = *root;
446         // traverse heap pointers only, not deleted handles or free list
447         // pointers
448         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
449           f->do_oop(root);
450         }
451       }
452       // the next handle block is valid only if current block is full
453       if (current->_top < block_size_in_oops) {
454         break;
455       }
456     }
457     current_chain = current_chain->pop_frame_link();
458   }
459 }
460 
461 
462 jobject JNIHandleBlock::allocate_handle(oop obj) {
463   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
464   if (_top == 0) {
465     // This is the first allocation or the initial block got zapped when
466     // entering a native function. If we have any following blocks they are
467     // not valid anymore.
468     for (JNIHandleBlock* current = _next; current != NULL;
469          current = current->_next) {
470       assert(current->_last == NULL, "only first block should have _last set");
471       assert(current->_free_list == NULL,
472              "only first block should have _free_list set");
473       if (current->_top == 0) {
474         // All blocks after the first clear trailing block are already cleared.
475 #ifdef ASSERT
476         for (current = current->_next; current != NULL; current = current->_next) {
477           assert(current->_top == 0, "trailing blocks must already be cleared");
478         }
479 #endif
480         break;
481       }
482       current->_top = 0;
483       current->zap();
484     }
485     // Clear initial block
486     _free_list = NULL;
487     _allocate_before_rebuild = 0;
488     _last = this;
489     zap();
490   }
491 
492   // Try last block
493   if (_last->_top < block_size_in_oops) {
494     oop* handle = &(_last->_handles)[_last->_top++];
495     *handle = obj;
496     return (jobject) handle;
497   }
498 
499   // Try free list
500   if (_free_list != NULL) {
501     oop* handle = _free_list;
502     _free_list = (oop*) *_free_list;
503     *handle = obj;
504     return (jobject) handle;
505   }
506   // Check if unused block follow last
507   if (_last->_next != NULL) {
508     // update last and retry
509     _last = _last->_next;
510     return allocate_handle(obj);
511   }
512 
513   // No space available, we have to rebuild free list or expand
514   if (_allocate_before_rebuild == 0) {
515       rebuild_free_list();        // updates _allocate_before_rebuild counter
516   } else {
517     // Append new block
518     Thread* thread = Thread::current();
519     Handle obj_handle(thread, obj);
520     // This can block, so we need to preserve obj across call.
521     _last->_next = JNIHandleBlock::allocate_block(thread);
522     _last = _last->_next;
523     _allocate_before_rebuild--;
524     obj = obj_handle();
525   }
526   return allocate_handle(obj);  // retry
527 }
528 
529 void JNIHandleBlock::rebuild_free_list() {
530   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
531   int free = 0;
532   int blocks = 0;
533   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
534     for (int index = 0; index < current->_top; index++) {
535       oop* handle = &(current->_handles)[index];
536       if (*handle == NULL) {
537         // this handle was cleared out by a delete call, reuse it
538         *handle = (oop) _free_list;
539         _free_list = handle;
540         free++;
541       }
542     }
543     // we should not rebuild free list if there are unused handles at the end
544     assert(current->_top == block_size_in_oops, "just checking");
545     blocks++;
546   }
547   // Heuristic: if more than half of the handles are free we rebuild next time
548   // as well, otherwise we append a corresponding number of new blocks before
549   // attempting a free list rebuild again.
550   int total = blocks * block_size_in_oops;
551   int extra = total - 2*free;
552   if (extra > 0) {
553     // Not as many free handles as we would like - compute number of new blocks to append
554     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
555   }
556 }
557 
558 
559 bool JNIHandleBlock::contains(jobject handle) const {
560   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
561 }
562 
563 
564 bool JNIHandleBlock::chain_contains(jobject handle) const {
565   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
566     if (current->contains(handle)) {
567       return true;
568     }
569   }
570   return false;
571 }
572 
573 
574 size_t JNIHandleBlock::length() const {
575   size_t result = 1;
576   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
577     result++;
578   }
579   return result;
580 }
581 
582 class CountJNIHandleClosure: public OopClosure {
583 private:
584   int _count;
585 public:
586   CountJNIHandleClosure(): _count(0) {}
587   virtual void do_oop(oop* ooph) { _count++; }
588   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
589   int count() { return _count; }
590 };
591 
592 const size_t JNIHandleBlock::get_number_of_live_handles() {
593   CountJNIHandleClosure counter;
594   oops_do(&counter);
595   return counter.count();
596 }
597 
598 // This method is not thread-safe, i.e., must be called while holding a lock on the
599 // structure.
600 size_t JNIHandleBlock::memory_usage() const {
601   return length() * sizeof(JNIHandleBlock);
602 }
603 
604 
605 #ifndef PRODUCT
606 
607 bool JNIHandles::is_local_handle(jobject handle) {
608   return JNIHandleBlock::any_contains(handle);
609 }
610 
611 bool JNIHandleBlock::any_contains(jobject handle) {
612   assert(handle != NULL, "precondition");
613   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
614     if (current->contains(handle)) {
615       return true;
616     }
617   }
618   return false;
619 }
620 
621 void JNIHandleBlock::print_statistics() {
622   int used_blocks = 0;
623   int free_blocks = 0;
624   int used_handles = 0;
625   int free_handles = 0;
626   JNIHandleBlock* block = _block_list;
627   while (block != NULL) {
628     if (block->_top > 0) {
629       used_blocks++;
630     } else {
631       free_blocks++;
632     }
633     used_handles += block->_top;
634     free_handles += (block_size_in_oops - block->_top);
635     block = block->_block_list_link;
636   }
637   tty->print_cr("JNIHandleBlocks statistics");
638   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
639   tty->print_cr("- blocks in use:    %d", used_blocks);
640   tty->print_cr("- blocks free:      %d", free_blocks);
641   tty->print_cr("- handles in use:   %d", used_handles);
642   tty->print_cr("- handles free:     %d", free_handles);
643 }
644 
645 #endif
--- EOF ---