2554 _total_args_passed = total_args_passed; 2555 _saved_sig = NEW_C_HEAP_ARRAY(BasicType, _total_args_passed, mtCode); 2556 memcpy(_saved_sig, sig_bt, _total_args_passed * sizeof(BasicType)); 2557 } 2558 2559 2560 bool AdapterHandlerEntry::compare_code(unsigned char* buffer, int length, int total_args_passed, BasicType* sig_bt) { 2561 if (length != _code_length) { 2562 return false; 2563 } 2564 for (int i = 0; i < length; i++) { 2565 if (buffer[i] != _saved_code[i]) { 2566 return false; 2567 } 2568 } 2569 return true; 2570 } 2571 #endif 2572 2573 2574 // Create a native wrapper for this native method. The wrapper converts the 2575 // java compiled calling convention to the native convention, handlizes 2576 // arguments, and transitions to native. On return from the native we transition 2577 // back to java blocking if a safepoint is in progress. 2578 nmethod *AdapterHandlerLibrary::create_native_wrapper(methodHandle method, int compile_id) { 2579 ResourceMark rm; 2580 nmethod* nm = NULL; 2581 2582 assert(method->is_native(), "must be native"); 2583 assert(method->is_method_handle_intrinsic() || 2584 method->has_native_function(), "must have something valid to call!"); 2585 2586 { 2587 // perform the work while holding the lock, but perform any printing outside the lock 2588 MutexLocker mu(AdapterHandlerLibrary_lock); 2589 // See if somebody beat us to it 2590 nm = method->code(); 2591 if (nm) { 2592 return nm; 2593 } 2594 2595 ResourceMark rm; 2596 2597 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache 2598 if (buf != NULL) { 2599 CodeBuffer buffer(buf); 2600 double locs_buf[20]; 2601 buffer.insts()->initialize_shared_locs((relocInfo*)locs_buf, sizeof(locs_buf) / sizeof(relocInfo)); 2602 MacroAssembler _masm(&buffer); 2603 2604 // Fill in the signature array, for the calling-convention call. 2605 const int total_args_passed = method->size_of_parameters(); 2606 2607 BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, total_args_passed); 2608 VMRegPair* regs = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed); 2609 int i=0; 2610 if( !method->is_static() ) // Pass in receiver first 2611 sig_bt[i++] = T_OBJECT; 2612 SignatureStream ss(method->signature()); 2613 for( ; !ss.at_return_type(); ss.next()) { 2614 sig_bt[i++] = ss.type(); // Collect remaining bits of signature 2615 if( ss.type() == T_LONG || ss.type() == T_DOUBLE ) 2616 sig_bt[i++] = T_VOID; // Longs & doubles take 2 Java slots 2617 } 2618 assert(i == total_args_passed, ""); 2619 BasicType ret_type = ss.type(); 2620 2621 // Now get the compiled-Java layout as input (or output) arguments. 2622 // NOTE: Stubs for compiled entry points of method handle intrinsics 2623 // are just trampolines so the argument registers must be outgoing ones. 2624 const bool is_outgoing = method->is_method_handle_intrinsic(); 2625 int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed, is_outgoing); 2626 2627 // Generate the compiled-to-native wrapper code 2628 nm = SharedRuntime::generate_native_wrapper(&_masm, 2629 method, 2630 compile_id, 2631 sig_bt, 2632 regs, 2633 ret_type); 2634 } 2635 } 2636 2637 // Must unlock before calling set_code 2638 2639 // Install the generated code. 2640 if (nm != NULL) { 2641 if (PrintCompilation) { 2642 ttyLocker ttyl; 2643 CompileTask::print_compilation(tty, nm, method->is_static() ? "(static)" : ""); 2644 } 2645 method->set_code(method, nm); 2646 nm->post_compiled_method_load_event(); 2647 } else { 2648 // CodeCache is full, disable compilation 2649 CompileBroker::handle_full_code_cache(); 2650 } 2651 return nm; 2652 } 2653 2654 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::block_for_jni_critical(JavaThread* thread)) 2655 assert(thread == JavaThread::current(), "must be"); 2656 // The code is about to enter a JNI lazy critical native method and 2657 // _needs_gc is true, so if this thread is already in a critical 2658 // section then just return, otherwise this thread should block 2659 // until needs_gc has been cleared. 2660 if (thread->in_critical()) { 2661 return; 2662 } 2663 // Lock and unlock a critical section to give the system a chance to block 2664 GC_locker::lock_critical(thread); 2665 GC_locker::unlock_critical(thread); 2666 JRT_END 2667 2668 #ifdef HAVE_DTRACE_H 2669 // Create a dtrace nmethod for this method. The wrapper converts the 2670 // java compiled calling convention to the native convention, makes a dummy call 2671 // (actually nops for the size of the call instruction, which become a trap if | 2554 _total_args_passed = total_args_passed; 2555 _saved_sig = NEW_C_HEAP_ARRAY(BasicType, _total_args_passed, mtCode); 2556 memcpy(_saved_sig, sig_bt, _total_args_passed * sizeof(BasicType)); 2557 } 2558 2559 2560 bool AdapterHandlerEntry::compare_code(unsigned char* buffer, int length, int total_args_passed, BasicType* sig_bt) { 2561 if (length != _code_length) { 2562 return false; 2563 } 2564 for (int i = 0; i < length; i++) { 2565 if (buffer[i] != _saved_code[i]) { 2566 return false; 2567 } 2568 } 2569 return true; 2570 } 2571 #endif 2572 2573 2574 /** 2575 * Create a native wrapper for this native method. The wrapper converts the 2576 * Java-compiled calling convention to the native convention, handles 2577 * arguments, and transitions to native. On return from the native we transition 2578 * back to java blocking if a safepoint is in progress. 2579 */ 2580 void AdapterHandlerLibrary::create_native_wrapper(methodHandle method) { 2581 ResourceMark rm; 2582 nmethod* nm = NULL; 2583 2584 assert(method->is_native(), "must be native"); 2585 assert(method->is_method_handle_intrinsic() || 2586 method->has_native_function(), "must have something valid to call!"); 2587 2588 { 2589 // perform the work while holding the lock, but perform any printing outside the lock 2590 MutexLocker mu(AdapterHandlerLibrary_lock); 2591 // See if somebody beat us to it 2592 nm = method->code(); 2593 if (nm != NULL) { 2594 return; 2595 } 2596 2597 const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci); 2598 if (compile_id == 0) { 2599 return; 2600 } 2601 2602 2603 ResourceMark rm; 2604 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache 2605 if (buf != NULL) { 2606 CodeBuffer buffer(buf); 2607 double locs_buf[20]; 2608 buffer.insts()->initialize_shared_locs((relocInfo*)locs_buf, sizeof(locs_buf) / sizeof(relocInfo)); 2609 MacroAssembler _masm(&buffer); 2610 2611 // Fill in the signature array, for the calling-convention call. 2612 const int total_args_passed = method->size_of_parameters(); 2613 2614 BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, total_args_passed); 2615 VMRegPair* regs = NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed); 2616 int i=0; 2617 if( !method->is_static() ) // Pass in receiver first 2618 sig_bt[i++] = T_OBJECT; 2619 SignatureStream ss(method->signature()); 2620 for( ; !ss.at_return_type(); ss.next()) { 2621 sig_bt[i++] = ss.type(); // Collect remaining bits of signature 2622 if( ss.type() == T_LONG || ss.type() == T_DOUBLE ) 2623 sig_bt[i++] = T_VOID; // Longs & doubles take 2 Java slots 2624 } 2625 assert(i == total_args_passed, ""); 2626 BasicType ret_type = ss.type(); 2627 2628 // Now get the compiled-Java layout as input (or output) arguments. 2629 // NOTE: Stubs for compiled entry points of method handle intrinsics 2630 // are just trampolines so the argument registers must be outgoing ones. 2631 const bool is_outgoing = method->is_method_handle_intrinsic(); 2632 int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed, is_outgoing); 2633 2634 // Generate the compiled-to-native wrapper code 2635 nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type); 2636 2637 if (nm != NULL) { 2638 method->set_code(method, nm); 2639 } 2640 } 2641 } // Unlock AdapterHandlerLibrary_lock 2642 2643 2644 // Install the generated code. 2645 if (nm != NULL) { 2646 if (PrintCompilation) { 2647 ttyLocker ttyl; 2648 CompileTask::print_compilation(tty, nm, method->is_static() ? "(static)" : ""); 2649 } 2650 nm->post_compiled_method_load_event(); 2651 } else { 2652 // CodeCache is full, disable compilation 2653 CompileBroker::handle_full_code_cache(); 2654 } 2655 } 2656 2657 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::block_for_jni_critical(JavaThread* thread)) 2658 assert(thread == JavaThread::current(), "must be"); 2659 // The code is about to enter a JNI lazy critical native method and 2660 // _needs_gc is true, so if this thread is already in a critical 2661 // section then just return, otherwise this thread should block 2662 // until needs_gc has been cleared. 2663 if (thread->in_critical()) { 2664 return; 2665 } 2666 // Lock and unlock a critical section to give the system a chance to block 2667 GC_locker::lock_critical(thread); 2668 GC_locker::unlock_critical(thread); 2669 JRT_END 2670 2671 #ifdef HAVE_DTRACE_H 2672 // Create a dtrace nmethod for this method. The wrapper converts the 2673 // java compiled calling convention to the native convention, makes a dummy call 2674 // (actually nops for the size of the call instruction, which become a trap if |