925 __ align(CodeEntryAlignment);
926 StubCodeMark mark(this, "StubRoutines", stub_name);
927 address start = __ pc();
928
929 __ emit_data64( mask, relocInfo::none );
930 __ emit_data64( mask, relocInfo::none );
931
932 return start;
933 }
934
935 // The following routine generates a subroutine to throw an
936 // asynchronous UnknownError when an unsafe access gets a fault that
937 // could not be reasonably prevented by the programmer. (Example:
938 // SIGBUS/OBJERR.)
939 address generate_handler_for_unsafe_access() {
940 StubCodeMark mark(this, "StubRoutines", "handler_for_unsafe_access");
941 address start = __ pc();
942
943 __ push(0); // hole for return address-to-be
944 __ pusha(); // push registers
945 Address next_pc(rsp, RegisterImpl::number_of_registers * BytesPerWord);
946
947 // FIXME: this probably needs alignment logic
948
949 __ subptr(rsp, frame::arg_reg_save_area_bytes);
950 BLOCK_COMMENT("call handle_unsafe_access");
951 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, handle_unsafe_access)));
952 __ addptr(rsp, frame::arg_reg_save_area_bytes);
953
954 __ movptr(next_pc, rax); // stuff next address
955 __ popa();
956 __ ret(0); // jump to next address
957
958 return start;
959 }
960
961 // Non-destructive plausibility checks for oops
962 //
963 // Arguments:
964 // all args on stack!
965 //
2993 __ movdbl(xmm0, Address(rsp, 0));
2994 __ addq(rsp, 8);
2995 __ ret(0);
2996 }
2997 }
2998
2999 // AES intrinsic stubs
3000 enum {AESBlockSize = 16};
3001
3002 address generate_key_shuffle_mask() {
3003 __ align(16);
3004 StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask");
3005 address start = __ pc();
3006 __ emit_data64( 0x0405060700010203, relocInfo::none );
3007 __ emit_data64( 0x0c0d0e0f08090a0b, relocInfo::none );
3008 return start;
3009 }
3010
3011 // Utility routine for loading a 128-bit key word in little endian format
3012 // can optionally specify that the shuffle mask is already in an xmmregister
3013 void load_key(XMMRegister xmmdst, Register key, int offset, XMMRegister xmm_shuf_mask=NULL) {
3014 __ movdqu(xmmdst, Address(key, offset));
3015 if (xmm_shuf_mask != NULL) {
3016 __ pshufb(xmmdst, xmm_shuf_mask);
3017 } else {
3018 __ pshufb(xmmdst, ExternalAddress(StubRoutines::x86::key_shuffle_mask_addr()));
3019 }
3020 }
3021
3022 // Arguments:
3023 //
3024 // Inputs:
3025 // c_rarg0 - source byte array address
3026 // c_rarg1 - destination byte array address
3027 // c_rarg2 - K (key) in little endian int array
3028 //
3029 address generate_aescrypt_encryptBlock() {
3030 assert(UseAES, "need AES instructions and misaligned SSE support");
3031 __ align(CodeEntryAlignment);
3032 StubCodeMark mark(this, "StubRoutines", "aescrypt_encryptBlock");
3033 Label L_doLast;
3034 address start = __ pc();
3035
|
925 __ align(CodeEntryAlignment);
926 StubCodeMark mark(this, "StubRoutines", stub_name);
927 address start = __ pc();
928
929 __ emit_data64( mask, relocInfo::none );
930 __ emit_data64( mask, relocInfo::none );
931
932 return start;
933 }
934
935 // The following routine generates a subroutine to throw an
936 // asynchronous UnknownError when an unsafe access gets a fault that
937 // could not be reasonably prevented by the programmer. (Example:
938 // SIGBUS/OBJERR.)
939 address generate_handler_for_unsafe_access() {
940 StubCodeMark mark(this, "StubRoutines", "handler_for_unsafe_access");
941 address start = __ pc();
942
943 __ push(0); // hole for return address-to-be
944 __ pusha(); // push registers
945 Address next_pc(rsp, Register::number_of_registers * BytesPerWord);
946
947 // FIXME: this probably needs alignment logic
948
949 __ subptr(rsp, frame::arg_reg_save_area_bytes);
950 BLOCK_COMMENT("call handle_unsafe_access");
951 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, handle_unsafe_access)));
952 __ addptr(rsp, frame::arg_reg_save_area_bytes);
953
954 __ movptr(next_pc, rax); // stuff next address
955 __ popa();
956 __ ret(0); // jump to next address
957
958 return start;
959 }
960
961 // Non-destructive plausibility checks for oops
962 //
963 // Arguments:
964 // all args on stack!
965 //
2993 __ movdbl(xmm0, Address(rsp, 0));
2994 __ addq(rsp, 8);
2995 __ ret(0);
2996 }
2997 }
2998
2999 // AES intrinsic stubs
3000 enum {AESBlockSize = 16};
3001
3002 address generate_key_shuffle_mask() {
3003 __ align(16);
3004 StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask");
3005 address start = __ pc();
3006 __ emit_data64( 0x0405060700010203, relocInfo::none );
3007 __ emit_data64( 0x0c0d0e0f08090a0b, relocInfo::none );
3008 return start;
3009 }
3010
3011 // Utility routine for loading a 128-bit key word in little endian format
3012 // can optionally specify that the shuffle mask is already in an xmmregister
3013 void load_key(XMMRegister xmmdst, Register key, int offset, XMMRegister xmm_shuf_mask = xnoreg) {
3014 __ movdqu(xmmdst, Address(key, offset));
3015 if (xmm_shuf_mask != xnoreg) {
3016 __ pshufb(xmmdst, xmm_shuf_mask);
3017 } else {
3018 __ pshufb(xmmdst, ExternalAddress(StubRoutines::x86::key_shuffle_mask_addr()));
3019 }
3020 }
3021
3022 // Arguments:
3023 //
3024 // Inputs:
3025 // c_rarg0 - source byte array address
3026 // c_rarg1 - destination byte array address
3027 // c_rarg2 - K (key) in little endian int array
3028 //
3029 address generate_aescrypt_encryptBlock() {
3030 assert(UseAES, "need AES instructions and misaligned SSE support");
3031 __ align(CodeEntryAlignment);
3032 StubCodeMark mark(this, "StubRoutines", "aescrypt_encryptBlock");
3033 Label L_doLast;
3034 address start = __ pc();
3035
|