< prev index next >

src/cpu/x86/vm/stubGenerator_x86_32.cpp

Print this page

        

*** 2091,2119 **** entry_oop_arraycopy, entry_jlong_arraycopy, entry_checkcast_arraycopy); } - void generate_math_stubs() { - { - StubCodeMark mark(this, "StubRoutines", "log10"); - StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc(); - - __ fld_d(Address(rsp, 4)); - __ flog10(); - __ ret(0); - } - { - StubCodeMark mark(this, "StubRoutines", "tan"); - StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc(); - - __ fld_d(Address(rsp, 4)); - __ trigfunc('t'); - __ ret(0); - } - } - // AES intrinsic stubs enum {AESBlockSize = 16}; address generate_key_shuffle_mask() { __ align(16); --- 2091,2100 ----
*** 3532,3541 **** --- 3513,3547 ---- return start; } + address generate_libmLog10() { + address start = __ pc(); + + const XMMRegister x0 = xmm0; + const XMMRegister x1 = xmm1; + const XMMRegister x2 = xmm2; + const XMMRegister x3 = xmm3; + + const XMMRegister x4 = xmm4; + const XMMRegister x5 = xmm5; + const XMMRegister x6 = xmm6; + const XMMRegister x7 = xmm7; + + const Register tmp = rbx; + + BLOCK_COMMENT("Entry:"); + __ enter(); // required for proper stackwalking of RuntimeStub frame + __ fast_log10(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); + __ leave(); // required for proper stackwalking of RuntimeStub frame + __ ret(0); + + return start; + + } + address generate_libmPow() { address start = __ pc(); const XMMRegister x0 = xmm0; const XMMRegister x1 = xmm1;
*** 3628,3637 **** --- 3634,3681 ---- return start; } + address generate_libm_tan_cot_huge() { + address start = __ pc(); + + const XMMRegister x0 = xmm0; + const XMMRegister x1 = xmm1; + + BLOCK_COMMENT("Entry:"); + __ libm_tancot_huge(x0, x1, rax, rcx, rdx, rbx, rsi, rdi, rbp, rsp); + + return start; + + } + + address generate_libmTan() { + address start = __ pc(); + + const XMMRegister x0 = xmm0; + const XMMRegister x1 = xmm1; + const XMMRegister x2 = xmm2; + const XMMRegister x3 = xmm3; + + const XMMRegister x4 = xmm4; + const XMMRegister x5 = xmm5; + const XMMRegister x6 = xmm6; + const XMMRegister x7 = xmm7; + + const Register tmp = rbx; + + BLOCK_COMMENT("Entry:"); + __ enter(); // required for proper stackwalking of RuntimeStub frame + __ fast_tan(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); + __ leave(); // required for proper stackwalking of RuntimeStub frame + __ ret(0); + + return start; + + } + // Safefetch stubs. void generate_safefetch(const char* name, int size, address* entry, address* fault_pc, address* continuation_pc) { // safefetch signatures: // int SafeFetch32(int* adr, int errValue);
*** 3851,3878 **** bool supports_clmul = VM_Version::supports_clmul(); StubRoutines::x86::generate_CRC32C_table(supports_clmul); StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); } ! if (VM_Version::supports_sse2()) { StubRoutines::_dexp = generate_libmExp(); StubRoutines::_dlog = generate_libmLog(); StubRoutines::_dpow = generate_libmPow(); - if (UseLibmSinIntrinsic || UseLibmCosIntrinsic) { StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l(); StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge(); - } - if (UseLibmSinIntrinsic) { StubRoutines::_dsin = generate_libmSin(); - } - if (UseLibmCosIntrinsic) { StubRoutines::_dcos = generate_libmCos(); ! } } } - void generate_all() { // Generates all stubs and initializes the entry points // These entry points require SharedInfo::stack0 to be set up in non-core builds // and need to be relocatable, so they each fabricate a RuntimeStub internally. --- 3895,3923 ---- bool supports_clmul = VM_Version::supports_clmul(); StubRoutines::x86::generate_CRC32C_table(supports_clmul); StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); } ! if (VM_Version::supports_sse2() && UseLibmIntrinsic) { ! StubRoutines::x86::_L_2il0floatpacket_0_adr = (address)StubRoutines::x86::_L_2il0floatpacket_0; ! StubRoutines::x86::_Pi4Inv_adr = (address)StubRoutines::x86::_Pi4Inv; ! StubRoutines::x86::_Pi4x3_adr = (address)StubRoutines::x86::_Pi4x3; ! StubRoutines::x86::_Pi4x4_adr = (address)StubRoutines::x86::_Pi4x4; ! StubRoutines::x86::_ones_adr = (address)StubRoutines::x86::_ones; StubRoutines::_dexp = generate_libmExp(); StubRoutines::_dlog = generate_libmLog(); + StubRoutines::_dlog10 = generate_libmLog10(); StubRoutines::_dpow = generate_libmPow(); StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l(); StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge(); StubRoutines::_dsin = generate_libmSin(); StubRoutines::_dcos = generate_libmCos(); ! StubRoutines::_dlibm_tan_cot_huge = generate_libm_tan_cot_huge(); ! StubRoutines::_dtan = generate_libmTan(); } } void generate_all() { // Generates all stubs and initializes the entry points // These entry points require SharedInfo::stack0 to be set up in non-core builds // and need to be relocatable, so they each fabricate a RuntimeStub internally.
*** 3887,3898 **** StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); // arraycopy stubs used by compilers generate_arraycopy_stubs(); - generate_math_stubs(); - // don't bother generating these AES intrinsic stubs unless global flag is set if (UseAESIntrinsics) { StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // might be needed by the others StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock(); --- 3932,3941 ----
< prev index next >