2899 __ BIND(L_exit);
2900 __ movdqu(xmm_counter_shuf_mask, ExternalAddress(StubRoutines::x86::counter_shuffle_mask_addr()));
2901 __ pshufb(xmm_curr_counter, xmm_counter_shuf_mask); //counter is shuffled back.
2902 __ movdqu(Address(counter, 0), xmm_curr_counter); //save counter back
2903 handleSOERegisters(false /*restoring*/);
2904 __ movptr(rax, len_param); // return length
2905 __ leave(); // required for proper stackwalking of RuntimeStub frame
2906 __ ret(0);
2907
2908 __ BIND (L_key192_top);
2909 __ movptr(pos, 0); // init pos before L_multiBlock_loopTop
2910 __ jmp(L_multiBlock_loopTop[1]); //key192
2911
2912 __ BIND (L_key256_top);
2913 __ movptr(pos, 0); // init pos before L_multiBlock_loopTop
2914 __ jmp(L_multiBlock_loopTop[2]); //key192
2915
2916 return start;
2917 }
2918
2919 address generate_upper_word_mask() {
2920 __ align(64);
2921 StubCodeMark mark(this, "StubRoutines", "upper_word_mask");
2922 address start = __ pc();
2923 __ emit_data(0x00000000, relocInfo::none, 0);
2924 __ emit_data(0x00000000, relocInfo::none, 0);
2925 __ emit_data(0x00000000, relocInfo::none, 0);
2926 __ emit_data(0xFFFFFFFF, relocInfo::none, 0);
2927 return start;
2928 }
2929
2930 address generate_shuffle_byte_flip_mask() {
2931 __ align(64);
2932 StubCodeMark mark(this, "StubRoutines", "shuffle_byte_flip_mask");
2933 address start = __ pc();
2934 __ emit_data(0x0c0d0e0f, relocInfo::none, 0);
2935 __ emit_data(0x08090a0b, relocInfo::none, 0);
2936 __ emit_data(0x04050607, relocInfo::none, 0);
2937 __ emit_data(0x00010203, relocInfo::none, 0);
2938 return start;
3904 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
3905
3906 // arraycopy stubs used by compilers
3907 generate_arraycopy_stubs();
3908
3909 // don't bother generating these AES intrinsic stubs unless global flag is set
3910 if (UseAESIntrinsics) {
3911 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // might be needed by the others
3912
3913 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
3914 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
3915 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
3916 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel();
3917 }
3918
3919 if (UseAESCTRIntrinsics) {
3920 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask();
3921 StubRoutines::_counterMode_AESCrypt = generate_counterMode_AESCrypt_Parallel();
3922 }
3923
3924 if (UseSHA1Intrinsics) {
3925 StubRoutines::x86::_upper_word_mask_addr = generate_upper_word_mask();
3926 StubRoutines::x86::_shuffle_byte_flip_mask_addr = generate_shuffle_byte_flip_mask();
3927 StubRoutines::_sha1_implCompress = generate_sha1_implCompress(false, "sha1_implCompress");
3928 StubRoutines::_sha1_implCompressMB = generate_sha1_implCompress(true, "sha1_implCompressMB");
3929 }
3930 if (UseSHA256Intrinsics) {
3931 StubRoutines::x86::_k256_adr = (address)StubRoutines::x86::_k256;
3932 StubRoutines::x86::_pshuffle_byte_flip_mask_addr = generate_pshuffle_byte_flip_mask();
3933 StubRoutines::_sha256_implCompress = generate_sha256_implCompress(false, "sha256_implCompress");
3934 StubRoutines::_sha256_implCompressMB = generate_sha256_implCompress(true, "sha256_implCompressMB");
3935 }
3936
3937 // Generate GHASH intrinsics code
3938 if (UseGHASHIntrinsics) {
3939 StubRoutines::x86::_ghash_long_swap_mask_addr = generate_ghash_long_swap_mask();
3940 StubRoutines::x86::_ghash_byte_swap_mask_addr = generate_ghash_byte_swap_mask();
3941 StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
3942 }
3943
|
2899 __ BIND(L_exit);
2900 __ movdqu(xmm_counter_shuf_mask, ExternalAddress(StubRoutines::x86::counter_shuffle_mask_addr()));
2901 __ pshufb(xmm_curr_counter, xmm_counter_shuf_mask); //counter is shuffled back.
2902 __ movdqu(Address(counter, 0), xmm_curr_counter); //save counter back
2903 handleSOERegisters(false /*restoring*/);
2904 __ movptr(rax, len_param); // return length
2905 __ leave(); // required for proper stackwalking of RuntimeStub frame
2906 __ ret(0);
2907
2908 __ BIND (L_key192_top);
2909 __ movptr(pos, 0); // init pos before L_multiBlock_loopTop
2910 __ jmp(L_multiBlock_loopTop[1]); //key192
2911
2912 __ BIND (L_key256_top);
2913 __ movptr(pos, 0); // init pos before L_multiBlock_loopTop
2914 __ jmp(L_multiBlock_loopTop[2]); //key192
2915
2916 return start;
2917 }
2918
2919 // ofs and limit are use for multi-block byte array.
2920 // int com.sun.security.provider.MD5.implCompress(byte[] b, int ofs)
2921 address generate_md5_implCompress(bool multi_block, const char *name) {
2922 __ align(CodeEntryAlignment);
2923 StubCodeMark mark(this, "StubRoutines", name);
2924 address start = __ pc();
2925
2926 const Register buf_param = rbp;
2927 const Address state_param(rsp, 0 * wordSize);
2928 const Address ofs_param (rsp, 1 * wordSize);
2929 const Address limit_param(rsp, 2 * wordSize);
2930
2931 __ enter();
2932 __ push(rbx);
2933 __ push(rdi);
2934 __ push(rsi);
2935 __ push(rbp);
2936 __ subptr(rsp, 3 * wordSize);
2937
2938 __ movptr(rsi, Address(rbp, 8 + 4));
2939 __ movptr(state_param, rsi);
2940 if (multi_block) {
2941 __ movptr(rsi, Address(rbp, 8 + 8));
2942 __ movptr(ofs_param, rsi);
2943 __ movptr(rsi, Address(rbp, 8 + 12));
2944 __ movptr(limit_param, rsi);
2945 }
2946 __ movptr(buf_param, Address(rbp, 8 + 0)); // do it last because it override rbp
2947 __ fast_md5(buf_param, state_param, ofs_param, limit_param, multi_block);
2948
2949 __ addptr(rsp, 3 * wordSize);
2950 __ pop(rbp);
2951 __ pop(rsi);
2952 __ pop(rdi);
2953 __ pop(rbx);
2954 __ leave();
2955 __ ret(0);
2956 return start;
2957 }
2958
2959 address generate_upper_word_mask() {
2960 __ align(64);
2961 StubCodeMark mark(this, "StubRoutines", "upper_word_mask");
2962 address start = __ pc();
2963 __ emit_data(0x00000000, relocInfo::none, 0);
2964 __ emit_data(0x00000000, relocInfo::none, 0);
2965 __ emit_data(0x00000000, relocInfo::none, 0);
2966 __ emit_data(0xFFFFFFFF, relocInfo::none, 0);
2967 return start;
2968 }
2969
2970 address generate_shuffle_byte_flip_mask() {
2971 __ align(64);
2972 StubCodeMark mark(this, "StubRoutines", "shuffle_byte_flip_mask");
2973 address start = __ pc();
2974 __ emit_data(0x0c0d0e0f, relocInfo::none, 0);
2975 __ emit_data(0x08090a0b, relocInfo::none, 0);
2976 __ emit_data(0x04050607, relocInfo::none, 0);
2977 __ emit_data(0x00010203, relocInfo::none, 0);
2978 return start;
3944 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
3945
3946 // arraycopy stubs used by compilers
3947 generate_arraycopy_stubs();
3948
3949 // don't bother generating these AES intrinsic stubs unless global flag is set
3950 if (UseAESIntrinsics) {
3951 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // might be needed by the others
3952
3953 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
3954 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
3955 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
3956 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel();
3957 }
3958
3959 if (UseAESCTRIntrinsics) {
3960 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask();
3961 StubRoutines::_counterMode_AESCrypt = generate_counterMode_AESCrypt_Parallel();
3962 }
3963
3964 if (UseMD5Intrinsics) {
3965 StubRoutines::_md5_implCompress = generate_md5_implCompress(false, "md5_implCompress");
3966 StubRoutines::_md5_implCompressMB = generate_md5_implCompress(true, "md5_implCompressMB");
3967 }
3968 if (UseSHA1Intrinsics) {
3969 StubRoutines::x86::_upper_word_mask_addr = generate_upper_word_mask();
3970 StubRoutines::x86::_shuffle_byte_flip_mask_addr = generate_shuffle_byte_flip_mask();
3971 StubRoutines::_sha1_implCompress = generate_sha1_implCompress(false, "sha1_implCompress");
3972 StubRoutines::_sha1_implCompressMB = generate_sha1_implCompress(true, "sha1_implCompressMB");
3973 }
3974 if (UseSHA256Intrinsics) {
3975 StubRoutines::x86::_k256_adr = (address)StubRoutines::x86::_k256;
3976 StubRoutines::x86::_pshuffle_byte_flip_mask_addr = generate_pshuffle_byte_flip_mask();
3977 StubRoutines::_sha256_implCompress = generate_sha256_implCompress(false, "sha256_implCompress");
3978 StubRoutines::_sha256_implCompressMB = generate_sha256_implCompress(true, "sha256_implCompressMB");
3979 }
3980
3981 // Generate GHASH intrinsics code
3982 if (UseGHASHIntrinsics) {
3983 StubRoutines::x86::_ghash_long_swap_mask_addr = generate_ghash_long_swap_mask();
3984 StubRoutines::x86::_ghash_byte_swap_mask_addr = generate_ghash_byte_swap_mask();
3985 StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
3986 }
3987
|