2970 2971 StubRoutines::_arrayof_oop_disjoint_arraycopy_uninit = StubRoutines::_oop_disjoint_arraycopy_uninit; 2972 StubRoutines::_arrayof_oop_arraycopy_uninit = StubRoutines::_oop_arraycopy_uninit; 2973 } 2974 2975 void generate_math_stubs() { 2976 { 2977 StubCodeMark mark(this, "StubRoutines", "log10"); 2978 StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc(); 2979 2980 __ subq(rsp, 8); 2981 __ movdbl(Address(rsp, 0), xmm0); 2982 __ fld_d(Address(rsp, 0)); 2983 __ flog10(); 2984 __ fstp_d(Address(rsp, 0)); 2985 __ movdbl(xmm0, Address(rsp, 0)); 2986 __ addq(rsp, 8); 2987 __ ret(0); 2988 } 2989 { 2990 StubCodeMark mark(this, "StubRoutines", "sin"); 2991 StubRoutines::_intrinsic_sin = (double (*)(double)) __ pc(); 2992 2993 __ subq(rsp, 8); 2994 __ movdbl(Address(rsp, 0), xmm0); 2995 __ fld_d(Address(rsp, 0)); 2996 __ trigfunc('s'); 2997 __ fstp_d(Address(rsp, 0)); 2998 __ movdbl(xmm0, Address(rsp, 0)); 2999 __ addq(rsp, 8); 3000 __ ret(0); 3001 } 3002 { 3003 StubCodeMark mark(this, "StubRoutines", "cos"); 3004 StubRoutines::_intrinsic_cos = (double (*)(double)) __ pc(); 3005 3006 __ subq(rsp, 8); 3007 __ movdbl(Address(rsp, 0), xmm0); 3008 __ fld_d(Address(rsp, 0)); 3009 __ trigfunc('c'); 3010 __ fstp_d(Address(rsp, 0)); 3011 __ movdbl(xmm0, Address(rsp, 0)); 3012 __ addq(rsp, 8); 3013 __ ret(0); 3014 } 3015 { 3016 StubCodeMark mark(this, "StubRoutines", "tan"); 3017 StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc(); 3018 3019 __ subq(rsp, 8); 3020 __ movdbl(Address(rsp, 0), xmm0); 3021 __ fld_d(Address(rsp, 0)); 3022 __ trigfunc('t'); 3023 __ fstp_d(Address(rsp, 0)); 3024 __ movdbl(xmm0, Address(rsp, 0)); 3025 __ addq(rsp, 8); 3026 __ ret(0); 3027 } 3028 } 3029 3030 // AES intrinsic stubs 3031 enum {AESBlockSize = 16}; 3032 3033 address generate_key_shuffle_mask() { 3034 __ align(16); 3035 StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask"); 4637 __ subptr(rsp, 4 * wordSize); 4638 __ movdqu(Address(rsp, 0), xmm6); 4639 __ movdqu(Address(rsp, 2 * wordSize), xmm7); 4640 #endif 4641 __ fast_pow(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp1, tmp2, tmp3, tmp4); 4642 4643 #ifdef _WIN64 4644 // restore xmm regs belonging to calling function 4645 __ movdqu(xmm6, Address(rsp, 0)); 4646 __ movdqu(xmm7, Address(rsp, 2 * wordSize)); 4647 __ addptr(rsp, 4 * wordSize); 4648 #endif 4649 4650 __ leave(); // required for proper stackwalking of RuntimeStub frame 4651 __ ret(0); 4652 4653 return start; 4654 4655 } 4656 4657 #undef __ 4658 #define __ masm-> 4659 4660 // Continuation point for throwing of implicit exceptions that are 4661 // not handled in the current activation. Fabricates an exception 4662 // oop and initiates normal exception dispatching in this 4663 // frame. Since we need to preserve callee-saved values (currently 4664 // only for C2, but done for C1 as well) we need a callee-saved oop 4665 // map and therefore have to make these stubs into RuntimeStubs 4666 // rather than BufferBlobs. If the compiler needs all registers to 4667 // be preserved between the fault point and the exception handler 4668 // then it must assume responsibility for that in 4669 // AbstractCompiler::continuation_for_implicit_null_exception or 4670 // continuation_for_implicit_division_by_zero_exception. All other 4671 // implicit exceptions (e.g., NullPointerException or 4672 // AbstractMethodError on entry) are either at call sites or 4673 // otherwise assume that stack unwinding will be initiated, so 4674 // caller saved registers were assumed volatile in the compiler. 4675 address generate_throw_exception(const char* name, 4676 address runtime_entry, 4832 generate_throw_exception("delayed StackOverflowError throw_exception", 4833 CAST_FROM_FN_PTR(address, 4834 SharedRuntime:: 4835 throw_delayed_StackOverflowError)); 4836 if (UseCRC32Intrinsics) { 4837 // set table address before stub generation which use it 4838 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table; 4839 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32(); 4840 } 4841 4842 if (UseCRC32CIntrinsics) { 4843 bool supports_clmul = VM_Version::supports_clmul(); 4844 StubRoutines::x86::generate_CRC32C_table(supports_clmul); 4845 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; 4846 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); 4847 } 4848 if (VM_Version::supports_sse2()) { 4849 StubRoutines::_dexp = generate_libmExp(); 4850 StubRoutines::_dlog = generate_libmLog(); 4851 StubRoutines::_dpow = generate_libmPow(); 4852 } 4853 } 4854 4855 void generate_all() { 4856 // Generates all stubs and initializes the entry points 4857 4858 // These entry points require SharedInfo::stack0 to be set up in 4859 // non-core builds and need to be relocatable, so they each 4860 // fabricate a RuntimeStub internally. 4861 StubRoutines::_throw_AbstractMethodError_entry = 4862 generate_throw_exception("AbstractMethodError throw_exception", 4863 CAST_FROM_FN_PTR(address, 4864 SharedRuntime:: 4865 throw_AbstractMethodError)); 4866 4867 StubRoutines::_throw_IncompatibleClassChangeError_entry = 4868 generate_throw_exception("IncompatibleClassChangeError throw_exception", 4869 CAST_FROM_FN_PTR(address, 4870 SharedRuntime:: 4871 throw_IncompatibleClassChangeError)); | 2970 2971 StubRoutines::_arrayof_oop_disjoint_arraycopy_uninit = StubRoutines::_oop_disjoint_arraycopy_uninit; 2972 StubRoutines::_arrayof_oop_arraycopy_uninit = StubRoutines::_oop_arraycopy_uninit; 2973 } 2974 2975 void generate_math_stubs() { 2976 { 2977 StubCodeMark mark(this, "StubRoutines", "log10"); 2978 StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc(); 2979 2980 __ subq(rsp, 8); 2981 __ movdbl(Address(rsp, 0), xmm0); 2982 __ fld_d(Address(rsp, 0)); 2983 __ flog10(); 2984 __ fstp_d(Address(rsp, 0)); 2985 __ movdbl(xmm0, Address(rsp, 0)); 2986 __ addq(rsp, 8); 2987 __ ret(0); 2988 } 2989 { 2990 StubCodeMark mark(this, "StubRoutines", "tan"); 2991 StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc(); 2992 2993 __ subq(rsp, 8); 2994 __ movdbl(Address(rsp, 0), xmm0); 2995 __ fld_d(Address(rsp, 0)); 2996 __ trigfunc('t'); 2997 __ fstp_d(Address(rsp, 0)); 2998 __ movdbl(xmm0, Address(rsp, 0)); 2999 __ addq(rsp, 8); 3000 __ ret(0); 3001 } 3002 } 3003 3004 // AES intrinsic stubs 3005 enum {AESBlockSize = 16}; 3006 3007 address generate_key_shuffle_mask() { 3008 __ align(16); 3009 StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask"); 4611 __ subptr(rsp, 4 * wordSize); 4612 __ movdqu(Address(rsp, 0), xmm6); 4613 __ movdqu(Address(rsp, 2 * wordSize), xmm7); 4614 #endif 4615 __ fast_pow(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp1, tmp2, tmp3, tmp4); 4616 4617 #ifdef _WIN64 4618 // restore xmm regs belonging to calling function 4619 __ movdqu(xmm6, Address(rsp, 0)); 4620 __ movdqu(xmm7, Address(rsp, 2 * wordSize)); 4621 __ addptr(rsp, 4 * wordSize); 4622 #endif 4623 4624 __ leave(); // required for proper stackwalking of RuntimeStub frame 4625 __ ret(0); 4626 4627 return start; 4628 4629 } 4630 4631 address generate_libmSin() { 4632 address start = __ pc(); 4633 4634 const XMMRegister x0 = xmm0; 4635 const XMMRegister x1 = xmm1; 4636 const XMMRegister x2 = xmm2; 4637 const XMMRegister x3 = xmm3; 4638 4639 const XMMRegister x4 = xmm4; 4640 const XMMRegister x5 = xmm5; 4641 const XMMRegister x6 = xmm6; 4642 const XMMRegister x7 = xmm7; 4643 4644 const Register tmp1 = r8; 4645 const Register tmp2 = r9; 4646 const Register tmp3 = r10; 4647 const Register tmp4 = r11; 4648 4649 BLOCK_COMMENT("Entry:"); 4650 __ enter(); // required for proper stackwalking of RuntimeStub frame 4651 4652 #ifdef _WIN64 4653 // save the xmm registers which must be preserved 6-7 4654 __ subptr(rsp, 4 * wordSize); 4655 __ movdqu(Address(rsp, 0), xmm6); 4656 __ movdqu(Address(rsp, 2 * wordSize), xmm7); 4657 #endif 4658 __ fast_sin(x0, x1, x2, x3, x4, x5, x6, x7, rax, rbx, rcx, rdx, tmp1, tmp2, tmp3, tmp4); 4659 4660 #ifdef _WIN64 4661 // restore xmm regs belonging to calling function 4662 __ movdqu(xmm6, Address(rsp, 0)); 4663 __ movdqu(xmm7, Address(rsp, 2 * wordSize)); 4664 __ addptr(rsp, 4 * wordSize); 4665 #endif 4666 4667 __ leave(); // required for proper stackwalking of RuntimeStub frame 4668 __ ret(0); 4669 4670 return start; 4671 4672 } 4673 4674 address generate_libmCos() { 4675 address start = __ pc(); 4676 4677 const XMMRegister x0 = xmm0; 4678 const XMMRegister x1 = xmm1; 4679 const XMMRegister x2 = xmm2; 4680 const XMMRegister x3 = xmm3; 4681 4682 const XMMRegister x4 = xmm4; 4683 const XMMRegister x5 = xmm5; 4684 const XMMRegister x6 = xmm6; 4685 const XMMRegister x7 = xmm7; 4686 4687 const Register tmp1 = r8; 4688 const Register tmp2 = r9; 4689 const Register tmp3 = r10; 4690 const Register tmp4 = r11; 4691 4692 BLOCK_COMMENT("Entry:"); 4693 __ enter(); // required for proper stackwalking of RuntimeStub frame 4694 4695 #ifdef _WIN64 4696 // save the xmm registers which must be preserved 6-7 4697 __ subptr(rsp, 4 * wordSize); 4698 __ movdqu(Address(rsp, 0), xmm6); 4699 __ movdqu(Address(rsp, 2 * wordSize), xmm7); 4700 #endif 4701 __ fast_cos(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp1, tmp2, tmp3, tmp4); 4702 4703 #ifdef _WIN64 4704 // restore xmm regs belonging to calling function 4705 __ movdqu(xmm6, Address(rsp, 0)); 4706 __ movdqu(xmm7, Address(rsp, 2 * wordSize)); 4707 __ addptr(rsp, 4 * wordSize); 4708 #endif 4709 4710 __ leave(); // required for proper stackwalking of RuntimeStub frame 4711 __ ret(0); 4712 4713 return start; 4714 4715 } 4716 4717 #undef __ 4718 #define __ masm-> 4719 4720 // Continuation point for throwing of implicit exceptions that are 4721 // not handled in the current activation. Fabricates an exception 4722 // oop and initiates normal exception dispatching in this 4723 // frame. Since we need to preserve callee-saved values (currently 4724 // only for C2, but done for C1 as well) we need a callee-saved oop 4725 // map and therefore have to make these stubs into RuntimeStubs 4726 // rather than BufferBlobs. If the compiler needs all registers to 4727 // be preserved between the fault point and the exception handler 4728 // then it must assume responsibility for that in 4729 // AbstractCompiler::continuation_for_implicit_null_exception or 4730 // continuation_for_implicit_division_by_zero_exception. All other 4731 // implicit exceptions (e.g., NullPointerException or 4732 // AbstractMethodError on entry) are either at call sites or 4733 // otherwise assume that stack unwinding will be initiated, so 4734 // caller saved registers were assumed volatile in the compiler. 4735 address generate_throw_exception(const char* name, 4736 address runtime_entry, 4892 generate_throw_exception("delayed StackOverflowError throw_exception", 4893 CAST_FROM_FN_PTR(address, 4894 SharedRuntime:: 4895 throw_delayed_StackOverflowError)); 4896 if (UseCRC32Intrinsics) { 4897 // set table address before stub generation which use it 4898 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table; 4899 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32(); 4900 } 4901 4902 if (UseCRC32CIntrinsics) { 4903 bool supports_clmul = VM_Version::supports_clmul(); 4904 StubRoutines::x86::generate_CRC32C_table(supports_clmul); 4905 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; 4906 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); 4907 } 4908 if (VM_Version::supports_sse2()) { 4909 StubRoutines::_dexp = generate_libmExp(); 4910 StubRoutines::_dlog = generate_libmLog(); 4911 StubRoutines::_dpow = generate_libmPow(); 4912 if (UseLibmSinIntrinsic) { 4913 StubRoutines::_dsin = generate_libmSin(); 4914 } 4915 if (UseLibmCosIntrinsic) { 4916 StubRoutines::_dcos = generate_libmCos(); 4917 } 4918 } 4919 } 4920 4921 void generate_all() { 4922 // Generates all stubs and initializes the entry points 4923 4924 // These entry points require SharedInfo::stack0 to be set up in 4925 // non-core builds and need to be relocatable, so they each 4926 // fabricate a RuntimeStub internally. 4927 StubRoutines::_throw_AbstractMethodError_entry = 4928 generate_throw_exception("AbstractMethodError throw_exception", 4929 CAST_FROM_FN_PTR(address, 4930 SharedRuntime:: 4931 throw_AbstractMethodError)); 4932 4933 StubRoutines::_throw_IncompatibleClassChangeError_entry = 4934 generate_throw_exception("IncompatibleClassChangeError throw_exception", 4935 CAST_FROM_FN_PTR(address, 4936 SharedRuntime:: 4937 throw_IncompatibleClassChangeError)); |