3022 __ trigfunc('c');
3023 __ fstp_d(Address(rsp, 0));
3024 __ movdbl(xmm0, Address(rsp, 0));
3025 __ addq(rsp, 8);
3026 __ ret(0);
3027 }
3028 {
3029 StubCodeMark mark(this, "StubRoutines", "tan");
3030 StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc();
3031
3032 __ subq(rsp, 8);
3033 __ movdbl(Address(rsp, 0), xmm0);
3034 __ fld_d(Address(rsp, 0));
3035 __ trigfunc('t');
3036 __ fstp_d(Address(rsp, 0));
3037 __ movdbl(xmm0, Address(rsp, 0));
3038 __ addq(rsp, 8);
3039 __ ret(0);
3040 }
3041 {
3042 StubCodeMark mark(this, "StubRoutines", "exp");
3043 StubRoutines::_intrinsic_exp = (double (*)(double)) __ pc();
3044
3045 __ subq(rsp, 8);
3046 __ movdbl(Address(rsp, 0), xmm0);
3047 __ fld_d(Address(rsp, 0));
3048 __ exp_with_fallback(0);
3049 __ fstp_d(Address(rsp, 0));
3050 __ movdbl(xmm0, Address(rsp, 0));
3051 __ addq(rsp, 8);
3052 __ ret(0);
3053 }
3054 {
3055 StubCodeMark mark(this, "StubRoutines", "pow");
3056 StubRoutines::_intrinsic_pow = (double (*)(double,double)) __ pc();
3057
3058 __ subq(rsp, 8);
3059 __ movdbl(Address(rsp, 0), xmm1);
3060 __ fld_d(Address(rsp, 0));
3061 __ movdbl(Address(rsp, 0), xmm0);
3062 __ fld_d(Address(rsp, 0));
3063 __ pow_with_fallback(0);
3064 __ fstp_d(Address(rsp, 0));
3065 __ movdbl(xmm0, Address(rsp, 0));
3066 __ addq(rsp, 8);
3067 __ ret(0);
3068 }
3069 }
3070
3071 // AES intrinsic stubs
3072 enum {AESBlockSize = 16};
3073
3074 address generate_key_shuffle_mask() {
4105 __ enter(); // required for proper stackwalking of RuntimeStub frame
4106
4107 setup_arg_regs(4); // out => rdi, in => rsi, offset => rdx
4108 // len => rcx, k => r8
4109 // r9 and r10 may be used to save non-volatile registers
4110 #ifdef _WIN64
4111 // last argument is on stack on Win64
4112 __ movl(k, Address(rsp, 6 * wordSize));
4113 #endif
4114 __ movptr(r11, rdx); // move offset in rdx to offset(r11)
4115 __ mul_add(out, in, offset, len, k, tmp1, tmp2, tmp3, tmp4, tmp5, rdx, rax);
4116
4117 restore_arg_regs();
4118
4119 __ leave(); // required for proper stackwalking of RuntimeStub frame
4120 __ ret(0);
4121
4122 return start;
4123 }
4124
4125
4126 #undef __
4127 #define __ masm->
4128
4129 // Continuation point for throwing of implicit exceptions that are
4130 // not handled in the current activation. Fabricates an exception
4131 // oop and initiates normal exception dispatching in this
4132 // frame. Since we need to preserve callee-saved values (currently
4133 // only for C2, but done for C1 as well) we need a callee-saved oop
4134 // map and therefore have to make these stubs into RuntimeStubs
4135 // rather than BufferBlobs. If the compiler needs all registers to
4136 // be preserved between the fault point and the exception handler
4137 // then it must assume responsibility for that in
4138 // AbstractCompiler::continuation_for_implicit_null_exception or
4139 // continuation_for_implicit_division_by_zero_exception. All other
4140 // implicit exceptions (e.g., NullPointerException or
4141 // AbstractMethodError on entry) are either at call sites or
4142 // otherwise assume that stack unwinding will be initiated, so
4143 // caller saved registers were assumed volatile in the compiler.
4144 address generate_throw_exception(const char* name,
4285 StubRoutines::_handler_for_unsafe_access_entry =
4286 generate_handler_for_unsafe_access();
4287
4288 // platform dependent
4289 StubRoutines::x86::_get_previous_fp_entry = generate_get_previous_fp();
4290 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
4291
4292 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
4293
4294 // Build this early so it's available for the interpreter.
4295 StubRoutines::_throw_StackOverflowError_entry =
4296 generate_throw_exception("StackOverflowError throw_exception",
4297 CAST_FROM_FN_PTR(address,
4298 SharedRuntime::
4299 throw_StackOverflowError));
4300 if (UseCRC32Intrinsics) {
4301 // set table address before stub generation which use it
4302 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
4303 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
4304 }
4305 }
4306
4307 void generate_all() {
4308 // Generates all stubs and initializes the entry points
4309
4310 // These entry points require SharedInfo::stack0 to be set up in
4311 // non-core builds and need to be relocatable, so they each
4312 // fabricate a RuntimeStub internally.
4313 StubRoutines::_throw_AbstractMethodError_entry =
4314 generate_throw_exception("AbstractMethodError throw_exception",
4315 CAST_FROM_FN_PTR(address,
4316 SharedRuntime::
4317 throw_AbstractMethodError));
4318
4319 StubRoutines::_throw_IncompatibleClassChangeError_entry =
4320 generate_throw_exception("IncompatibleClassChangeError throw_exception",
4321 CAST_FROM_FN_PTR(address,
4322 SharedRuntime::
4323 throw_IncompatibleClassChangeError));
4324
|
3022 __ trigfunc('c');
3023 __ fstp_d(Address(rsp, 0));
3024 __ movdbl(xmm0, Address(rsp, 0));
3025 __ addq(rsp, 8);
3026 __ ret(0);
3027 }
3028 {
3029 StubCodeMark mark(this, "StubRoutines", "tan");
3030 StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc();
3031
3032 __ subq(rsp, 8);
3033 __ movdbl(Address(rsp, 0), xmm0);
3034 __ fld_d(Address(rsp, 0));
3035 __ trigfunc('t');
3036 __ fstp_d(Address(rsp, 0));
3037 __ movdbl(xmm0, Address(rsp, 0));
3038 __ addq(rsp, 8);
3039 __ ret(0);
3040 }
3041 {
3042 StubCodeMark mark(this, "StubRoutines", "pow");
3043 StubRoutines::_intrinsic_pow = (double (*)(double,double)) __ pc();
3044
3045 __ subq(rsp, 8);
3046 __ movdbl(Address(rsp, 0), xmm1);
3047 __ fld_d(Address(rsp, 0));
3048 __ movdbl(Address(rsp, 0), xmm0);
3049 __ fld_d(Address(rsp, 0));
3050 __ pow_with_fallback(0);
3051 __ fstp_d(Address(rsp, 0));
3052 __ movdbl(xmm0, Address(rsp, 0));
3053 __ addq(rsp, 8);
3054 __ ret(0);
3055 }
3056 }
3057
3058 // AES intrinsic stubs
3059 enum {AESBlockSize = 16};
3060
3061 address generate_key_shuffle_mask() {
4092 __ enter(); // required for proper stackwalking of RuntimeStub frame
4093
4094 setup_arg_regs(4); // out => rdi, in => rsi, offset => rdx
4095 // len => rcx, k => r8
4096 // r9 and r10 may be used to save non-volatile registers
4097 #ifdef _WIN64
4098 // last argument is on stack on Win64
4099 __ movl(k, Address(rsp, 6 * wordSize));
4100 #endif
4101 __ movptr(r11, rdx); // move offset in rdx to offset(r11)
4102 __ mul_add(out, in, offset, len, k, tmp1, tmp2, tmp3, tmp4, tmp5, rdx, rax);
4103
4104 restore_arg_regs();
4105
4106 __ leave(); // required for proper stackwalking of RuntimeStub frame
4107 __ ret(0);
4108
4109 return start;
4110 }
4111
4112 address generate_libmExp() {
4113 address start = __ pc();
4114
4115 const XMMRegister x0 = xmm0;
4116 const XMMRegister x1 = xmm1;
4117 const XMMRegister x2 = xmm2;
4118 const XMMRegister x3 = xmm3;
4119
4120 const XMMRegister x4 = xmm4;
4121 const XMMRegister x5 = xmm5;
4122 const XMMRegister x6 = xmm6;
4123 const XMMRegister x7 = xmm7;
4124
4125 const Register tmp = r11;
4126
4127 BLOCK_COMMENT("Entry:");
4128 __ enter(); // required for proper stackwalking of RuntimeStub frame
4129
4130 #ifdef _WIN64
4131 // save the xmm registers which must be preserved 6-7
4132 __ movdqu(xmm_save(6), as_XMMRegister(6));
4133 __ movdqu(xmm_save(7), as_XMMRegister(7));
4134 #endif
4135 __ fast_exp(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp);
4136
4137 #ifdef _WIN64
4138 // restore xmm regs belonging to calling function
4139 __ movdqu(as_XMMRegister(6), xmm_save(6));
4140 __ movdqu(as_XMMRegister(7), xmm_save(7));
4141 #endif
4142
4143 __ leave(); // required for proper stackwalking of RuntimeStub frame
4144 __ ret(0);
4145
4146 return start;
4147
4148 }
4149
4150
4151 #undef __
4152 #define __ masm->
4153
4154 // Continuation point for throwing of implicit exceptions that are
4155 // not handled in the current activation. Fabricates an exception
4156 // oop and initiates normal exception dispatching in this
4157 // frame. Since we need to preserve callee-saved values (currently
4158 // only for C2, but done for C1 as well) we need a callee-saved oop
4159 // map and therefore have to make these stubs into RuntimeStubs
4160 // rather than BufferBlobs. If the compiler needs all registers to
4161 // be preserved between the fault point and the exception handler
4162 // then it must assume responsibility for that in
4163 // AbstractCompiler::continuation_for_implicit_null_exception or
4164 // continuation_for_implicit_division_by_zero_exception. All other
4165 // implicit exceptions (e.g., NullPointerException or
4166 // AbstractMethodError on entry) are either at call sites or
4167 // otherwise assume that stack unwinding will be initiated, so
4168 // caller saved registers were assumed volatile in the compiler.
4169 address generate_throw_exception(const char* name,
4310 StubRoutines::_handler_for_unsafe_access_entry =
4311 generate_handler_for_unsafe_access();
4312
4313 // platform dependent
4314 StubRoutines::x86::_get_previous_fp_entry = generate_get_previous_fp();
4315 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
4316
4317 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
4318
4319 // Build this early so it's available for the interpreter.
4320 StubRoutines::_throw_StackOverflowError_entry =
4321 generate_throw_exception("StackOverflowError throw_exception",
4322 CAST_FROM_FN_PTR(address,
4323 SharedRuntime::
4324 throw_StackOverflowError));
4325 if (UseCRC32Intrinsics) {
4326 // set table address before stub generation which use it
4327 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
4328 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
4329 }
4330 StubRoutines::_dexp = generate_libmExp();
4331 }
4332
4333 void generate_all() {
4334 // Generates all stubs and initializes the entry points
4335
4336 // These entry points require SharedInfo::stack0 to be set up in
4337 // non-core builds and need to be relocatable, so they each
4338 // fabricate a RuntimeStub internally.
4339 StubRoutines::_throw_AbstractMethodError_entry =
4340 generate_throw_exception("AbstractMethodError throw_exception",
4341 CAST_FROM_FN_PTR(address,
4342 SharedRuntime::
4343 throw_AbstractMethodError));
4344
4345 StubRoutines::_throw_IncompatibleClassChangeError_entry =
4346 generate_throw_exception("IncompatibleClassChangeError throw_exception",
4347 CAST_FROM_FN_PTR(address,
4348 SharedRuntime::
4349 throw_IncompatibleClassChangeError));
4350
|