2076 StubRoutines::_checkcast_arraycopy_uninit = 2077 generate_checkcast_copy("checkcast_arraycopy_uninit", NULL, /*dest_uninitialized*/true); 2078 2079 StubRoutines::_unsafe_arraycopy = 2080 generate_unsafe_copy("unsafe_arraycopy", 2081 entry_jbyte_arraycopy, 2082 entry_jshort_arraycopy, 2083 entry_jint_arraycopy, 2084 entry_jlong_arraycopy); 2085 2086 StubRoutines::_generic_arraycopy = 2087 generate_generic_copy("generic_arraycopy", 2088 entry_jbyte_arraycopy, 2089 entry_jshort_arraycopy, 2090 entry_jint_arraycopy, 2091 entry_oop_arraycopy, 2092 entry_jlong_arraycopy, 2093 entry_checkcast_arraycopy); 2094 } 2095 2096 void generate_math_stubs() { 2097 { 2098 StubCodeMark mark(this, "StubRoutines", "log10"); 2099 StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc(); 2100 2101 __ fld_d(Address(rsp, 4)); 2102 __ flog10(); 2103 __ ret(0); 2104 } 2105 { 2106 StubCodeMark mark(this, "StubRoutines", "tan"); 2107 StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc(); 2108 2109 __ fld_d(Address(rsp, 4)); 2110 __ trigfunc('t'); 2111 __ ret(0); 2112 } 2113 } 2114 2115 // AES intrinsic stubs 2116 enum {AESBlockSize = 16}; 2117 2118 address generate_key_shuffle_mask() { 2119 __ align(16); 2120 StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask"); 2121 address start = __ pc(); 2122 __ emit_data(0x00010203, relocInfo::none, 0 ); 2123 __ emit_data(0x04050607, relocInfo::none, 0 ); 2124 __ emit_data(0x08090a0b, relocInfo::none, 0 ); 2125 __ emit_data(0x0c0d0e0f, relocInfo::none, 0 ); 2126 return start; 2127 } 2128 2129 address generate_counter_shuffle_mask() { 2130 __ align(16); 2131 StubCodeMark mark(this, "StubRoutines", "counter_shuffle_mask"); 2132 address start = __ pc(); 2133 __ emit_data(0x0c0d0e0f, relocInfo::none, 0); 2134 __ emit_data(0x08090a0b, relocInfo::none, 0); 3517 const XMMRegister x2 = xmm2; 3518 const XMMRegister x3 = xmm3; 3519 3520 const XMMRegister x4 = xmm4; 3521 const XMMRegister x5 = xmm5; 3522 const XMMRegister x6 = xmm6; 3523 const XMMRegister x7 = xmm7; 3524 3525 const Register tmp = rbx; 3526 3527 BLOCK_COMMENT("Entry:"); 3528 __ enter(); // required for proper stackwalking of RuntimeStub frame 3529 __ fast_log(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3530 __ leave(); // required for proper stackwalking of RuntimeStub frame 3531 __ ret(0); 3532 3533 return start; 3534 3535 } 3536 3537 address generate_libmPow() { 3538 address start = __ pc(); 3539 3540 const XMMRegister x0 = xmm0; 3541 const XMMRegister x1 = xmm1; 3542 const XMMRegister x2 = xmm2; 3543 const XMMRegister x3 = xmm3; 3544 3545 const XMMRegister x4 = xmm4; 3546 const XMMRegister x5 = xmm5; 3547 const XMMRegister x6 = xmm6; 3548 const XMMRegister x7 = xmm7; 3549 3550 const Register tmp = rbx; 3551 3552 BLOCK_COMMENT("Entry:"); 3553 __ enter(); // required for proper stackwalking of RuntimeStub frame 3554 __ fast_pow(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3555 __ leave(); // required for proper stackwalking of RuntimeStub frame 3556 __ ret(0); 3613 const XMMRegister x2 = xmm2; 3614 const XMMRegister x3 = xmm3; 3615 3616 const XMMRegister x4 = xmm4; 3617 const XMMRegister x5 = xmm5; 3618 const XMMRegister x6 = xmm6; 3619 const XMMRegister x7 = xmm7; 3620 3621 const Register tmp = rbx; 3622 3623 BLOCK_COMMENT("Entry:"); 3624 __ enter(); // required for proper stackwalking of RuntimeStub frame 3625 __ fast_cos(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3626 __ leave(); // required for proper stackwalking of RuntimeStub frame 3627 __ ret(0); 3628 3629 return start; 3630 3631 } 3632 3633 // Safefetch stubs. 3634 void generate_safefetch(const char* name, int size, address* entry, 3635 address* fault_pc, address* continuation_pc) { 3636 // safefetch signatures: 3637 // int SafeFetch32(int* adr, int errValue); 3638 // intptr_t SafeFetchN (intptr_t* adr, intptr_t errValue); 3639 3640 StubCodeMark mark(this, "StubRoutines", name); 3641 3642 // Entry point, pc or function descriptor. 3643 *entry = __ pc(); 3644 3645 __ movl(rax, Address(rsp, 0x8)); 3646 __ movl(rcx, Address(rsp, 0x4)); 3647 // Load *adr into eax, may fault. 3648 *fault_pc = __ pc(); 3649 switch (size) { 3650 case 4: 3651 // int32_t 3652 __ movl(rax, Address(rcx, 0)); 3836 CAST_FROM_FN_PTR(address, SharedRuntime::d2l)); 3837 3838 // Build this early so it's available for the interpreter 3839 StubRoutines::_throw_StackOverflowError_entry = generate_throw_exception("StackOverflowError throw_exception", 3840 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError)); 3841 StubRoutines::_throw_delayed_StackOverflowError_entry = generate_throw_exception("delayed StackOverflowError throw_exception", 3842 CAST_FROM_FN_PTR(address, SharedRuntime::throw_delayed_StackOverflowError)); 3843 3844 if (UseCRC32Intrinsics) { 3845 // set table address before stub generation which use it 3846 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table; 3847 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32(); 3848 } 3849 3850 if (UseCRC32CIntrinsics) { 3851 bool supports_clmul = VM_Version::supports_clmul(); 3852 StubRoutines::x86::generate_CRC32C_table(supports_clmul); 3853 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; 3854 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); 3855 } 3856 if (VM_Version::supports_sse2()) { 3857 StubRoutines::_dexp = generate_libmExp(); 3858 StubRoutines::_dlog = generate_libmLog(); 3859 StubRoutines::_dpow = generate_libmPow(); 3860 if (UseLibmSinIntrinsic || UseLibmCosIntrinsic) { 3861 StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l(); 3862 StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge(); 3863 } 3864 if (UseLibmSinIntrinsic) { 3865 StubRoutines::_dsin = generate_libmSin(); 3866 } 3867 if (UseLibmCosIntrinsic) { 3868 StubRoutines::_dcos = generate_libmCos(); 3869 } 3870 } 3871 } 3872 3873 3874 void generate_all() { 3875 // Generates all stubs and initializes the entry points 3876 3877 // These entry points require SharedInfo::stack0 to be set up in non-core builds 3878 // and need to be relocatable, so they each fabricate a RuntimeStub internally. 3879 StubRoutines::_throw_AbstractMethodError_entry = generate_throw_exception("AbstractMethodError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError)); 3880 StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError)); 3881 StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call)); 3882 3883 //------------------------------------------------------------------------------------------------------------------------ 3884 // entry points that are platform specific 3885 3886 // support for verify_oop (must happen after universe_init) 3887 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); 3888 3889 // arraycopy stubs used by compilers 3890 generate_arraycopy_stubs(); 3891 3892 generate_math_stubs(); 3893 3894 // don't bother generating these AES intrinsic stubs unless global flag is set 3895 if (UseAESIntrinsics) { 3896 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // might be needed by the others 3897 3898 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock(); 3899 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock(); 3900 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt(); 3901 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel(); 3902 } 3903 3904 if (UseAESCTRIntrinsics) { 3905 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask(); 3906 StubRoutines::_counterMode_AESCrypt = generate_counterMode_AESCrypt_Parallel(); 3907 } 3908 3909 if (UseSHA1Intrinsics) { 3910 StubRoutines::x86::_upper_word_mask_addr = generate_upper_word_mask(); 3911 StubRoutines::x86::_shuffle_byte_flip_mask_addr = generate_shuffle_byte_flip_mask(); 3912 StubRoutines::_sha1_implCompress = generate_sha1_implCompress(false, "sha1_implCompress"); | 2076 StubRoutines::_checkcast_arraycopy_uninit = 2077 generate_checkcast_copy("checkcast_arraycopy_uninit", NULL, /*dest_uninitialized*/true); 2078 2079 StubRoutines::_unsafe_arraycopy = 2080 generate_unsafe_copy("unsafe_arraycopy", 2081 entry_jbyte_arraycopy, 2082 entry_jshort_arraycopy, 2083 entry_jint_arraycopy, 2084 entry_jlong_arraycopy); 2085 2086 StubRoutines::_generic_arraycopy = 2087 generate_generic_copy("generic_arraycopy", 2088 entry_jbyte_arraycopy, 2089 entry_jshort_arraycopy, 2090 entry_jint_arraycopy, 2091 entry_oop_arraycopy, 2092 entry_jlong_arraycopy, 2093 entry_checkcast_arraycopy); 2094 } 2095 2096 // AES intrinsic stubs 2097 enum {AESBlockSize = 16}; 2098 2099 address generate_key_shuffle_mask() { 2100 __ align(16); 2101 StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask"); 2102 address start = __ pc(); 2103 __ emit_data(0x00010203, relocInfo::none, 0 ); 2104 __ emit_data(0x04050607, relocInfo::none, 0 ); 2105 __ emit_data(0x08090a0b, relocInfo::none, 0 ); 2106 __ emit_data(0x0c0d0e0f, relocInfo::none, 0 ); 2107 return start; 2108 } 2109 2110 address generate_counter_shuffle_mask() { 2111 __ align(16); 2112 StubCodeMark mark(this, "StubRoutines", "counter_shuffle_mask"); 2113 address start = __ pc(); 2114 __ emit_data(0x0c0d0e0f, relocInfo::none, 0); 2115 __ emit_data(0x08090a0b, relocInfo::none, 0); 3498 const XMMRegister x2 = xmm2; 3499 const XMMRegister x3 = xmm3; 3500 3501 const XMMRegister x4 = xmm4; 3502 const XMMRegister x5 = xmm5; 3503 const XMMRegister x6 = xmm6; 3504 const XMMRegister x7 = xmm7; 3505 3506 const Register tmp = rbx; 3507 3508 BLOCK_COMMENT("Entry:"); 3509 __ enter(); // required for proper stackwalking of RuntimeStub frame 3510 __ fast_log(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3511 __ leave(); // required for proper stackwalking of RuntimeStub frame 3512 __ ret(0); 3513 3514 return start; 3515 3516 } 3517 3518 address generate_libmLog10() { 3519 address start = __ pc(); 3520 3521 const XMMRegister x0 = xmm0; 3522 const XMMRegister x1 = xmm1; 3523 const XMMRegister x2 = xmm2; 3524 const XMMRegister x3 = xmm3; 3525 3526 const XMMRegister x4 = xmm4; 3527 const XMMRegister x5 = xmm5; 3528 const XMMRegister x6 = xmm6; 3529 const XMMRegister x7 = xmm7; 3530 3531 const Register tmp = rbx; 3532 3533 BLOCK_COMMENT("Entry:"); 3534 __ enter(); // required for proper stackwalking of RuntimeStub frame 3535 __ fast_log10(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3536 __ leave(); // required for proper stackwalking of RuntimeStub frame 3537 __ ret(0); 3538 3539 return start; 3540 3541 } 3542 3543 address generate_libmPow() { 3544 address start = __ pc(); 3545 3546 const XMMRegister x0 = xmm0; 3547 const XMMRegister x1 = xmm1; 3548 const XMMRegister x2 = xmm2; 3549 const XMMRegister x3 = xmm3; 3550 3551 const XMMRegister x4 = xmm4; 3552 const XMMRegister x5 = xmm5; 3553 const XMMRegister x6 = xmm6; 3554 const XMMRegister x7 = xmm7; 3555 3556 const Register tmp = rbx; 3557 3558 BLOCK_COMMENT("Entry:"); 3559 __ enter(); // required for proper stackwalking of RuntimeStub frame 3560 __ fast_pow(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3561 __ leave(); // required for proper stackwalking of RuntimeStub frame 3562 __ ret(0); 3619 const XMMRegister x2 = xmm2; 3620 const XMMRegister x3 = xmm3; 3621 3622 const XMMRegister x4 = xmm4; 3623 const XMMRegister x5 = xmm5; 3624 const XMMRegister x6 = xmm6; 3625 const XMMRegister x7 = xmm7; 3626 3627 const Register tmp = rbx; 3628 3629 BLOCK_COMMENT("Entry:"); 3630 __ enter(); // required for proper stackwalking of RuntimeStub frame 3631 __ fast_cos(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3632 __ leave(); // required for proper stackwalking of RuntimeStub frame 3633 __ ret(0); 3634 3635 return start; 3636 3637 } 3638 3639 address generate_libm_tan_cot_huge() { 3640 address start = __ pc(); 3641 3642 const XMMRegister x0 = xmm0; 3643 const XMMRegister x1 = xmm1; 3644 3645 BLOCK_COMMENT("Entry:"); 3646 __ libm_tancot_huge(x0, x1, rax, rcx, rdx, rbx, rsi, rdi, rbp, rsp); 3647 3648 return start; 3649 3650 } 3651 3652 address generate_libmTan() { 3653 address start = __ pc(); 3654 3655 const XMMRegister x0 = xmm0; 3656 const XMMRegister x1 = xmm1; 3657 const XMMRegister x2 = xmm2; 3658 const XMMRegister x3 = xmm3; 3659 3660 const XMMRegister x4 = xmm4; 3661 const XMMRegister x5 = xmm5; 3662 const XMMRegister x6 = xmm6; 3663 const XMMRegister x7 = xmm7; 3664 3665 const Register tmp = rbx; 3666 3667 BLOCK_COMMENT("Entry:"); 3668 __ enter(); // required for proper stackwalking of RuntimeStub frame 3669 __ fast_tan(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); 3670 __ leave(); // required for proper stackwalking of RuntimeStub frame 3671 __ ret(0); 3672 3673 return start; 3674 3675 } 3676 3677 // Safefetch stubs. 3678 void generate_safefetch(const char* name, int size, address* entry, 3679 address* fault_pc, address* continuation_pc) { 3680 // safefetch signatures: 3681 // int SafeFetch32(int* adr, int errValue); 3682 // intptr_t SafeFetchN (intptr_t* adr, intptr_t errValue); 3683 3684 StubCodeMark mark(this, "StubRoutines", name); 3685 3686 // Entry point, pc or function descriptor. 3687 *entry = __ pc(); 3688 3689 __ movl(rax, Address(rsp, 0x8)); 3690 __ movl(rcx, Address(rsp, 0x4)); 3691 // Load *adr into eax, may fault. 3692 *fault_pc = __ pc(); 3693 switch (size) { 3694 case 4: 3695 // int32_t 3696 __ movl(rax, Address(rcx, 0)); 3880 CAST_FROM_FN_PTR(address, SharedRuntime::d2l)); 3881 3882 // Build this early so it's available for the interpreter 3883 StubRoutines::_throw_StackOverflowError_entry = generate_throw_exception("StackOverflowError throw_exception", 3884 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError)); 3885 StubRoutines::_throw_delayed_StackOverflowError_entry = generate_throw_exception("delayed StackOverflowError throw_exception", 3886 CAST_FROM_FN_PTR(address, SharedRuntime::throw_delayed_StackOverflowError)); 3887 3888 if (UseCRC32Intrinsics) { 3889 // set table address before stub generation which use it 3890 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table; 3891 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32(); 3892 } 3893 3894 if (UseCRC32CIntrinsics) { 3895 bool supports_clmul = VM_Version::supports_clmul(); 3896 StubRoutines::x86::generate_CRC32C_table(supports_clmul); 3897 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; 3898 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); 3899 } 3900 if (VM_Version::supports_sse2() && UseLibmIntrinsic) { 3901 StubRoutines::x86::_L_2il0floatpacket_0_adr = (address)StubRoutines::x86::_L_2il0floatpacket_0; 3902 StubRoutines::x86::_Pi4Inv_adr = (address)StubRoutines::x86::_Pi4Inv; 3903 StubRoutines::x86::_Pi4x3_adr = (address)StubRoutines::x86::_Pi4x3; 3904 StubRoutines::x86::_Pi4x4_adr = (address)StubRoutines::x86::_Pi4x4; 3905 StubRoutines::x86::_ones_adr = (address)StubRoutines::x86::_ones; 3906 StubRoutines::_dexp = generate_libmExp(); 3907 StubRoutines::_dlog = generate_libmLog(); 3908 StubRoutines::_dlog10 = generate_libmLog10(); 3909 StubRoutines::_dpow = generate_libmPow(); 3910 StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l(); 3911 StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge(); 3912 StubRoutines::_dsin = generate_libmSin(); 3913 StubRoutines::_dcos = generate_libmCos(); 3914 StubRoutines::_dlibm_tan_cot_huge = generate_libm_tan_cot_huge(); 3915 StubRoutines::_dtan = generate_libmTan(); 3916 } 3917 } 3918 3919 void generate_all() { 3920 // Generates all stubs and initializes the entry points 3921 3922 // These entry points require SharedInfo::stack0 to be set up in non-core builds 3923 // and need to be relocatable, so they each fabricate a RuntimeStub internally. 3924 StubRoutines::_throw_AbstractMethodError_entry = generate_throw_exception("AbstractMethodError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError)); 3925 StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError)); 3926 StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call)); 3927 3928 //------------------------------------------------------------------------------------------------------------------------ 3929 // entry points that are platform specific 3930 3931 // support for verify_oop (must happen after universe_init) 3932 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); 3933 3934 // arraycopy stubs used by compilers 3935 generate_arraycopy_stubs(); 3936 3937 // don't bother generating these AES intrinsic stubs unless global flag is set 3938 if (UseAESIntrinsics) { 3939 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // might be needed by the others 3940 3941 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock(); 3942 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock(); 3943 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt(); 3944 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel(); 3945 } 3946 3947 if (UseAESCTRIntrinsics) { 3948 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask(); 3949 StubRoutines::_counterMode_AESCrypt = generate_counterMode_AESCrypt_Parallel(); 3950 } 3951 3952 if (UseSHA1Intrinsics) { 3953 StubRoutines::x86::_upper_word_mask_addr = generate_upper_word_mask(); 3954 StubRoutines::x86::_shuffle_byte_flip_mask_addr = generate_shuffle_byte_flip_mask(); 3955 StubRoutines::_sha1_implCompress = generate_sha1_implCompress(false, "sha1_implCompress"); |