2768 __ movl( sco_temp, Address(r11_dst_klass, sco_offset)); 2769 assert_clean_int(sco_temp, rax); 2770 2771 // the checkcast_copy loop needs two extra arguments: 2772 assert(c_rarg3 == sco_temp, "#3 already in place"); 2773 // Set up arguments for checkcast_copy_entry. 2774 setup_arg_regs(4); 2775 __ movptr(r8, r11_dst_klass); // dst.klass.element_klass, r8 is c_rarg4 on Linux/Solaris 2776 __ jump(RuntimeAddress(checkcast_copy_entry)); 2777 } 2778 2779 __ BIND(L_failed); 2780 __ xorptr(rax, rax); 2781 __ notptr(rax); // return -1 2782 __ leave(); // required for proper stackwalking of RuntimeStub frame 2783 __ ret(0); 2784 2785 return start; 2786 } 2787 2788 void generate_arraycopy_stubs() { 2789 address entry; 2790 address entry_jbyte_arraycopy; 2791 address entry_jshort_arraycopy; 2792 address entry_jint_arraycopy; 2793 address entry_oop_arraycopy; 2794 address entry_jlong_arraycopy; 2795 address entry_checkcast_arraycopy; 2796 2797 StubRoutines::_jbyte_disjoint_arraycopy = generate_disjoint_byte_copy(false, &entry, 2798 "jbyte_disjoint_arraycopy"); 2799 StubRoutines::_jbyte_arraycopy = generate_conjoint_byte_copy(false, entry, &entry_jbyte_arraycopy, 2800 "jbyte_arraycopy"); 2801 2802 StubRoutines::_jshort_disjoint_arraycopy = generate_disjoint_short_copy(false, &entry, 2803 "jshort_disjoint_arraycopy"); 2804 StubRoutines::_jshort_arraycopy = generate_conjoint_short_copy(false, entry, &entry_jshort_arraycopy, 2805 "jshort_arraycopy"); 2806 2807 StubRoutines::_jint_disjoint_arraycopy = generate_disjoint_int_oop_copy(false, false, &entry, 5760 StubRoutines::_throw_NullPointerException_at_call_entry = 5761 generate_throw_exception("NullPointerException at call throw_exception", 5762 CAST_FROM_FN_PTR(address, 5763 SharedRuntime:: 5764 throw_NullPointerException_at_call)); 5765 5766 // entry points that are platform specific 5767 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup(); 5768 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup(); 5769 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup(); 5770 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup(); 5771 5772 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF); 5773 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000); 5774 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF); 5775 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000); 5776 5777 // support for verify_oop (must happen after universe_init) 5778 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); 5779 5780 // arraycopy stubs used by compilers 5781 generate_arraycopy_stubs(); 5782 5783 // don't bother generating these AES intrinsic stubs unless global flag is set 5784 if (UseAESIntrinsics) { 5785 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // needed by the others 5786 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock(); 5787 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock(); 5788 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt(); 5789 if (VM_Version::supports_vaes() && VM_Version::supports_avx512vl() && VM_Version::supports_avx512dq() ) { 5790 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptVectorAESCrypt(); 5791 } else { 5792 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel(); 5793 } 5794 } 5795 if (UseAESCTRIntrinsics){ 5796 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask(); 5797 StubRoutines::_counterMode_AESCrypt = generate_counterMode_AESCrypt_Parallel(); 5798 } 5799 | 2768 __ movl( sco_temp, Address(r11_dst_klass, sco_offset)); 2769 assert_clean_int(sco_temp, rax); 2770 2771 // the checkcast_copy loop needs two extra arguments: 2772 assert(c_rarg3 == sco_temp, "#3 already in place"); 2773 // Set up arguments for checkcast_copy_entry. 2774 setup_arg_regs(4); 2775 __ movptr(r8, r11_dst_klass); // dst.klass.element_klass, r8 is c_rarg4 on Linux/Solaris 2776 __ jump(RuntimeAddress(checkcast_copy_entry)); 2777 } 2778 2779 __ BIND(L_failed); 2780 __ xorptr(rax, rax); 2781 __ notptr(rax); // return -1 2782 __ leave(); // required for proper stackwalking of RuntimeStub frame 2783 __ ret(0); 2784 2785 return start; 2786 } 2787 2788 address generate_data_cache_writeback() { 2789 bool optimized = VM_Version::supports_clflushopt(); 2790 bool no_evict = VM_Version::supports_clwb(); 2791 2792 const Register src = c_rarg0; // source address 2793 2794 __ align(CodeEntryAlignment); 2795 2796 StubCodeMark mark(this, "StubRoutines", "_data_cache_writeback"); 2797 2798 address start = __ pc(); 2799 __ enter(); 2800 const Address line(src, 0); 2801 __ cache_wb(line); 2802 __ leave(); 2803 __ ret(0); 2804 2805 return start; 2806 } 2807 2808 address generate_data_cache_writeback_sync() { 2809 const Register kind = c_rarg0; // pre or post sync (unused for now) 2810 2811 __ align(CodeEntryAlignment); 2812 2813 StubCodeMark mark(this, "StubRoutines", "_data_cache_writeback_sync"); 2814 2815 // pre and post wbsync are both currently translated to mfence 2816 // so comment out argument dispatch until we need it 2817 2818 // Label skip, done; 2819 address start = __ pc(); 2820 __ enter(); 2821 // __ jcc(Assembler::eq, skip); 2822 __ cache_wbsync(true); 2823 // __ jmp(done); 2824 // __ bind(skip); 2825 // __ cache_wbsync(false); 2826 // __ bind(done); 2827 __ leave(); 2828 __ ret(0); 2829 2830 return start; 2831 } 2832 2833 void generate_arraycopy_stubs() { 2834 address entry; 2835 address entry_jbyte_arraycopy; 2836 address entry_jshort_arraycopy; 2837 address entry_jint_arraycopy; 2838 address entry_oop_arraycopy; 2839 address entry_jlong_arraycopy; 2840 address entry_checkcast_arraycopy; 2841 2842 StubRoutines::_jbyte_disjoint_arraycopy = generate_disjoint_byte_copy(false, &entry, 2843 "jbyte_disjoint_arraycopy"); 2844 StubRoutines::_jbyte_arraycopy = generate_conjoint_byte_copy(false, entry, &entry_jbyte_arraycopy, 2845 "jbyte_arraycopy"); 2846 2847 StubRoutines::_jshort_disjoint_arraycopy = generate_disjoint_short_copy(false, &entry, 2848 "jshort_disjoint_arraycopy"); 2849 StubRoutines::_jshort_arraycopy = generate_conjoint_short_copy(false, entry, &entry_jshort_arraycopy, 2850 "jshort_arraycopy"); 2851 2852 StubRoutines::_jint_disjoint_arraycopy = generate_disjoint_int_oop_copy(false, false, &entry, 5805 StubRoutines::_throw_NullPointerException_at_call_entry = 5806 generate_throw_exception("NullPointerException at call throw_exception", 5807 CAST_FROM_FN_PTR(address, 5808 SharedRuntime:: 5809 throw_NullPointerException_at_call)); 5810 5811 // entry points that are platform specific 5812 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup(); 5813 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup(); 5814 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup(); 5815 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup(); 5816 5817 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF); 5818 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000); 5819 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF); 5820 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000); 5821 5822 // support for verify_oop (must happen after universe_init) 5823 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); 5824 5825 // data cache line writeback 5826 StubRoutines::_data_cache_writeback = generate_data_cache_writeback(); 5827 StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync(); 5828 5829 // arraycopy stubs used by compilers 5830 generate_arraycopy_stubs(); 5831 5832 // don't bother generating these AES intrinsic stubs unless global flag is set 5833 if (UseAESIntrinsics) { 5834 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // needed by the others 5835 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock(); 5836 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock(); 5837 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt(); 5838 if (VM_Version::supports_vaes() && VM_Version::supports_avx512vl() && VM_Version::supports_avx512dq() ) { 5839 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptVectorAESCrypt(); 5840 } else { 5841 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel(); 5842 } 5843 } 5844 if (UseAESCTRIntrinsics){ 5845 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask(); 5846 StubRoutines::_counterMode_AESCrypt = generate_counterMode_AESCrypt_Parallel(); 5847 } 5848 |