< prev index next >

src/cpu/x86/vm/stubGenerator_x86_32.cpp

Print this page




2086     StubRoutines::_generic_arraycopy =
2087         generate_generic_copy("generic_arraycopy",
2088                                entry_jbyte_arraycopy,
2089                                entry_jshort_arraycopy,
2090                                entry_jint_arraycopy,
2091                                entry_oop_arraycopy,
2092                                entry_jlong_arraycopy,
2093                                entry_checkcast_arraycopy);
2094   }
2095 
2096   void generate_math_stubs() {
2097     {
2098       StubCodeMark mark(this, "StubRoutines", "log10");
2099       StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc();
2100 
2101       __ fld_d(Address(rsp, 4));
2102       __ flog10();
2103       __ ret(0);
2104     }
2105     {
2106       StubCodeMark mark(this, "StubRoutines", "sin");
2107       StubRoutines::_intrinsic_sin = (double (*)(double))  __ pc();
2108 
2109       __ fld_d(Address(rsp, 4));
2110       __ trigfunc('s');
2111       __ ret(0);
2112     }
2113     {
2114       StubCodeMark mark(this, "StubRoutines", "cos");
2115       StubRoutines::_intrinsic_cos = (double (*)(double)) __ pc();
2116 
2117       __ fld_d(Address(rsp, 4));
2118       __ trigfunc('c');
2119       __ ret(0);
2120     }
2121     {
2122       StubCodeMark mark(this, "StubRoutines", "tan");
2123       StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc();
2124 
2125       __ fld_d(Address(rsp, 4));
2126       __ trigfunc('t');
2127       __ ret(0);
2128     }
2129   }
2130 
2131   // AES intrinsic stubs
2132   enum {AESBlockSize = 16};
2133 
2134   address generate_key_shuffle_mask() {
2135     __ align(16);
2136     StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask");
2137     address start = __ pc();
2138     __ emit_data(0x00010203, relocInfo::none, 0 );
2139     __ emit_data(0x04050607, relocInfo::none, 0 );
2140     __ emit_data(0x08090a0b, relocInfo::none, 0 );
2141     __ emit_data(0x0c0d0e0f, relocInfo::none, 0 );


3424    const XMMRegister x2 = xmm2;
3425    const XMMRegister x3 = xmm3;
3426 
3427    const XMMRegister x4 = xmm4;
3428    const XMMRegister x5 = xmm5;
3429    const XMMRegister x6 = xmm6;
3430    const XMMRegister x7 = xmm7;
3431 
3432    const Register tmp = rbx;
3433 
3434    BLOCK_COMMENT("Entry:");
3435    __ enter(); // required for proper stackwalking of RuntimeStub frame
3436    __ fast_pow(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp);
3437    __ leave(); // required for proper stackwalking of RuntimeStub frame
3438    __ ret(0);
3439 
3440    return start;
3441 
3442  }
3443 






































































3444 
3445   // Safefetch stubs.
3446   void generate_safefetch(const char* name, int size, address* entry,
3447                           address* fault_pc, address* continuation_pc) {
3448     // safefetch signatures:
3449     //   int      SafeFetch32(int*      adr, int      errValue);
3450     //   intptr_t SafeFetchN (intptr_t* adr, intptr_t errValue);
3451 
3452     StubCodeMark mark(this, "StubRoutines", name);
3453 
3454     // Entry point, pc or function descriptor.
3455     *entry = __ pc();
3456 
3457     __ movl(rax, Address(rsp, 0x8));
3458     __ movl(rcx, Address(rsp, 0x4));
3459     // Load *adr into eax, may fault.
3460     *fault_pc = __ pc();
3461     switch (size) {
3462       case 4:
3463         // int32_t


3652                                                                                       CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
3653     StubRoutines::_throw_delayed_StackOverflowError_entry  = generate_throw_exception("delayed StackOverflowError throw_exception",
3654                                                                                       CAST_FROM_FN_PTR(address, SharedRuntime::throw_delayed_StackOverflowError));
3655 
3656     if (UseCRC32Intrinsics) {
3657       // set table address before stub generation which use it
3658       StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
3659       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
3660     }
3661 
3662     if (UseCRC32CIntrinsics) {
3663       bool supports_clmul = VM_Version::supports_clmul();
3664       StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3665       StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3666       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3667     }
3668     if (VM_Version::supports_sse2()) {
3669       StubRoutines::_dexp = generate_libmExp();
3670       StubRoutines::_dlog = generate_libmLog();
3671       StubRoutines::_dpow = generate_libmPow();










3672     }
3673   }
3674 
3675 
3676   void generate_all() {
3677     // Generates all stubs and initializes the entry points
3678 
3679     // These entry points require SharedInfo::stack0 to be set up in non-core builds
3680     // and need to be relocatable, so they each fabricate a RuntimeStub internally.
3681     StubRoutines::_throw_AbstractMethodError_entry         = generate_throw_exception("AbstractMethodError throw_exception",          CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
3682     StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
3683     StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
3684 
3685     //------------------------------------------------------------------------------------------------------------------------
3686     // entry points that are platform specific
3687 
3688     // support for verify_oop (must happen after universe_init)
3689     StubRoutines::_verify_oop_subroutine_entry     = generate_verify_oop();
3690 
3691     // arraycopy stubs used by compilers




2086     StubRoutines::_generic_arraycopy =
2087         generate_generic_copy("generic_arraycopy",
2088                                entry_jbyte_arraycopy,
2089                                entry_jshort_arraycopy,
2090                                entry_jint_arraycopy,
2091                                entry_oop_arraycopy,
2092                                entry_jlong_arraycopy,
2093                                entry_checkcast_arraycopy);
2094   }
2095 
2096   void generate_math_stubs() {
2097     {
2098       StubCodeMark mark(this, "StubRoutines", "log10");
2099       StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc();
2100 
2101       __ fld_d(Address(rsp, 4));
2102       __ flog10();
2103       __ ret(0);
2104     }
2105     {
















2106       StubCodeMark mark(this, "StubRoutines", "tan");
2107       StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc();
2108 
2109       __ fld_d(Address(rsp, 4));
2110       __ trigfunc('t');
2111       __ ret(0);
2112     }
2113   }
2114 
2115   // AES intrinsic stubs
2116   enum {AESBlockSize = 16};
2117 
2118   address generate_key_shuffle_mask() {
2119     __ align(16);
2120     StubCodeMark mark(this, "StubRoutines", "key_shuffle_mask");
2121     address start = __ pc();
2122     __ emit_data(0x00010203, relocInfo::none, 0 );
2123     __ emit_data(0x04050607, relocInfo::none, 0 );
2124     __ emit_data(0x08090a0b, relocInfo::none, 0 );
2125     __ emit_data(0x0c0d0e0f, relocInfo::none, 0 );


3408    const XMMRegister x2 = xmm2;
3409    const XMMRegister x3 = xmm3;
3410 
3411    const XMMRegister x4 = xmm4;
3412    const XMMRegister x5 = xmm5;
3413    const XMMRegister x6 = xmm6;
3414    const XMMRegister x7 = xmm7;
3415 
3416    const Register tmp = rbx;
3417 
3418    BLOCK_COMMENT("Entry:");
3419    __ enter(); // required for proper stackwalking of RuntimeStub frame
3420    __ fast_pow(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp);
3421    __ leave(); // required for proper stackwalking of RuntimeStub frame
3422    __ ret(0);
3423 
3424    return start;
3425 
3426  }
3427 
3428  address generate_libm_reduce_pi04l() {
3429    address start = __ pc();
3430 
3431    BLOCK_COMMENT("Entry:");
3432    __ libm_reduce_pi04l(rax, rcx, rdx, rbx, rsi, rdi, rbp, rsp);
3433 
3434    return start;
3435 
3436  }
3437 
3438  address generate_libm_sin_cos_huge() {
3439    address start = __ pc();
3440 
3441    const XMMRegister x0 = xmm0;
3442    const XMMRegister x1 = xmm1;
3443 
3444    BLOCK_COMMENT("Entry:");
3445    __ libm_sincos_huge(x0, x1, rax, rcx, rdx, rbx, rsi, rdi, rbp, rsp);
3446 
3447    return start;
3448 
3449  }
3450 
3451  address generate_libmSin() {
3452    address start = __ pc();
3453 
3454    const XMMRegister x0 = xmm0;
3455    const XMMRegister x1 = xmm1;
3456    const XMMRegister x2 = xmm2;
3457    const XMMRegister x3 = xmm3;
3458 
3459    const XMMRegister x4 = xmm4;
3460    const XMMRegister x5 = xmm5;
3461    const XMMRegister x6 = xmm6;
3462    const XMMRegister x7 = xmm7;
3463 
3464    BLOCK_COMMENT("Entry:");
3465    __ enter(); // required for proper stackwalking of RuntimeStub frame
3466    __ fast_sin(x0, x1, x2, x3, x4, x5, x6, x7, rax, rbx, rdx);
3467    __ leave(); // required for proper stackwalking of RuntimeStub frame
3468    __ ret(0);
3469 
3470    return start;
3471 
3472  }
3473 
3474  address generate_libmCos() {
3475    address start = __ pc();
3476 
3477    const XMMRegister x0 = xmm0;
3478    const XMMRegister x1 = xmm1;
3479    const XMMRegister x2 = xmm2;
3480    const XMMRegister x3 = xmm3;
3481 
3482    const XMMRegister x4 = xmm4;
3483    const XMMRegister x5 = xmm5;
3484    const XMMRegister x6 = xmm6;
3485    const XMMRegister x7 = xmm7;
3486 
3487    const Register tmp = rbx;
3488 
3489    BLOCK_COMMENT("Entry:");
3490    __ enter(); // required for proper stackwalking of RuntimeStub frame
3491    __ fast_cos(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp);
3492    __ leave(); // required for proper stackwalking of RuntimeStub frame
3493    __ ret(0);
3494 
3495    return start;
3496 
3497  }
3498 
3499   // Safefetch stubs.
3500   void generate_safefetch(const char* name, int size, address* entry,
3501                           address* fault_pc, address* continuation_pc) {
3502     // safefetch signatures:
3503     //   int      SafeFetch32(int*      adr, int      errValue);
3504     //   intptr_t SafeFetchN (intptr_t* adr, intptr_t errValue);
3505 
3506     StubCodeMark mark(this, "StubRoutines", name);
3507 
3508     // Entry point, pc or function descriptor.
3509     *entry = __ pc();
3510 
3511     __ movl(rax, Address(rsp, 0x8));
3512     __ movl(rcx, Address(rsp, 0x4));
3513     // Load *adr into eax, may fault.
3514     *fault_pc = __ pc();
3515     switch (size) {
3516       case 4:
3517         // int32_t


3706                                                                                       CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
3707     StubRoutines::_throw_delayed_StackOverflowError_entry  = generate_throw_exception("delayed StackOverflowError throw_exception",
3708                                                                                       CAST_FROM_FN_PTR(address, SharedRuntime::throw_delayed_StackOverflowError));
3709 
3710     if (UseCRC32Intrinsics) {
3711       // set table address before stub generation which use it
3712       StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
3713       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
3714     }
3715 
3716     if (UseCRC32CIntrinsics) {
3717       bool supports_clmul = VM_Version::supports_clmul();
3718       StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3719       StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3720       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3721     }
3722     if (VM_Version::supports_sse2()) {
3723       StubRoutines::_dexp = generate_libmExp();
3724       StubRoutines::_dlog = generate_libmLog();
3725       StubRoutines::_dpow = generate_libmPow();
3726       if (UseLibmSinIntrinsic || UseLibmCosIntrinsic) {
3727         StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l();
3728         StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge();
3729       }
3730       if (UseLibmSinIntrinsic) {
3731         StubRoutines::_dsin = generate_libmSin();
3732       }
3733       if (UseLibmCosIntrinsic) {
3734         StubRoutines::_dcos = generate_libmCos();
3735       }
3736     }
3737   }
3738 
3739 
3740   void generate_all() {
3741     // Generates all stubs and initializes the entry points
3742 
3743     // These entry points require SharedInfo::stack0 to be set up in non-core builds
3744     // and need to be relocatable, so they each fabricate a RuntimeStub internally.
3745     StubRoutines::_throw_AbstractMethodError_entry         = generate_throw_exception("AbstractMethodError throw_exception",          CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
3746     StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
3747     StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
3748 
3749     //------------------------------------------------------------------------------------------------------------------------
3750     // entry points that are platform specific
3751 
3752     // support for verify_oop (must happen after universe_init)
3753     StubRoutines::_verify_oop_subroutine_entry     = generate_verify_oop();
3754 
3755     // arraycopy stubs used by compilers


< prev index next >