6083
6084 // is referenced by megamorphic call
6085 StubRoutines::_catch_exception_entry = generate_catch_exception();
6086
6087 // atomic calls
6088 StubRoutines::_atomic_xchg_entry = generate_atomic_xchg();
6089 StubRoutines::_atomic_xchg_long_entry = generate_atomic_xchg_long();
6090 StubRoutines::_atomic_cmpxchg_entry = generate_atomic_cmpxchg();
6091 StubRoutines::_atomic_cmpxchg_byte_entry = generate_atomic_cmpxchg_byte();
6092 StubRoutines::_atomic_cmpxchg_long_entry = generate_atomic_cmpxchg_long();
6093 StubRoutines::_atomic_add_entry = generate_atomic_add();
6094 StubRoutines::_atomic_add_long_entry = generate_atomic_add_long();
6095 StubRoutines::_fence_entry = generate_orderaccess_fence();
6096
6097 // platform dependent
6098 StubRoutines::x86::_get_previous_fp_entry = generate_get_previous_fp();
6099 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
6100
6101 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
6102
6103 // Build this early so it's available for the interpreter.
6104 StubRoutines::_throw_StackOverflowError_entry =
6105 generate_throw_exception("StackOverflowError throw_exception",
6106 CAST_FROM_FN_PTR(address,
6107 SharedRuntime::
6108 throw_StackOverflowError));
6109 StubRoutines::_throw_delayed_StackOverflowError_entry =
6110 generate_throw_exception("delayed StackOverflowError throw_exception",
6111 CAST_FROM_FN_PTR(address,
6112 SharedRuntime::
6113 throw_delayed_StackOverflowError));
6114 if (UseCRC32Intrinsics) {
6115 // set table address before stub generation which use it
6116 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
6117 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6118 }
6119
6120 if (UseCRC32CIntrinsics) {
6121 bool supports_clmul = VM_Version::supports_clmul();
6122 StubRoutines::x86::generate_CRC32C_table(supports_clmul);
6123 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
6124 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
6125 }
6126 if (VM_Version::supports_sse2() && UseLibmIntrinsic && InlineIntrinsics) {
6127 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin) ||
6128 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos) ||
6129 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
6130 StubRoutines::x86::_ONEHALF_adr = (address)StubRoutines::x86::_ONEHALF;
6131 StubRoutines::x86::_P_2_adr = (address)StubRoutines::x86::_P_2;
6132 StubRoutines::x86::_SC_4_adr = (address)StubRoutines::x86::_SC_4;
6133 StubRoutines::x86::_Ctable_adr = (address)StubRoutines::x86::_Ctable;
6134 StubRoutines::x86::_SC_2_adr = (address)StubRoutines::x86::_SC_2;
6135 StubRoutines::x86::_SC_3_adr = (address)StubRoutines::x86::_SC_3;
6136 StubRoutines::x86::_SC_1_adr = (address)StubRoutines::x86::_SC_1;
6137 StubRoutines::x86::_PI_INV_TABLE_adr = (address)StubRoutines::x86::_PI_INV_TABLE;
6138 StubRoutines::x86::_PI_4_adr = (address)StubRoutines::x86::_PI_4;
6139 StubRoutines::x86::_PI32INV_adr = (address)StubRoutines::x86::_PI32INV;
6140 StubRoutines::x86::_SIGN_MASK_adr = (address)StubRoutines::x86::_SIGN_MASK;
6141 StubRoutines::x86::_P_1_adr = (address)StubRoutines::x86::_P_1;
6142 StubRoutines::x86::_P_3_adr = (address)StubRoutines::x86::_P_3;
6143 StubRoutines::x86::_NEG_ZERO_adr = (address)StubRoutines::x86::_NEG_ZERO;
6144 }
6145 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dexp)) {
6146 StubRoutines::_dexp = generate_libmExp();
6174 // fabricate a RuntimeStub internally.
6175 StubRoutines::_throw_AbstractMethodError_entry =
6176 generate_throw_exception("AbstractMethodError throw_exception",
6177 CAST_FROM_FN_PTR(address,
6178 SharedRuntime::
6179 throw_AbstractMethodError));
6180
6181 StubRoutines::_throw_IncompatibleClassChangeError_entry =
6182 generate_throw_exception("IncompatibleClassChangeError throw_exception",
6183 CAST_FROM_FN_PTR(address,
6184 SharedRuntime::
6185 throw_IncompatibleClassChangeError));
6186
6187 StubRoutines::_throw_NullPointerException_at_call_entry =
6188 generate_throw_exception("NullPointerException at call throw_exception",
6189 CAST_FROM_FN_PTR(address,
6190 SharedRuntime::
6191 throw_NullPointerException_at_call));
6192
6193 // entry points that are platform specific
6194 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
6195 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
6196 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
6197 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
6198
6199 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
6200 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000);
6201 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF);
6202 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000);
6203 StubRoutines::x86::_vector_float_sign_mask = generate_vector_mask("vector_float_sign_mask", 0x7FFFFFFF7FFFFFFF);
6204 StubRoutines::x86::_vector_float_sign_flip = generate_vector_mask("vector_float_sign_flip", 0x8000000080000000);
6205 StubRoutines::x86::_vector_double_sign_mask = generate_vector_mask("vector_double_sign_mask", 0x7FFFFFFFFFFFFFFF);
6206 StubRoutines::x86::_vector_double_sign_flip = generate_vector_mask("vector_double_sign_flip", 0x8000000000000000);
6207 StubRoutines::x86::_vector_short_to_byte_mask = generate_vector_mask("vector_short_to_byte_mask", 0x00ff00ff00ff00ff);
6208 StubRoutines::x86::_vector_byte_perm_mask = generate_vector_byte_perm_mask("vector_byte_perm_mask");
6209 StubRoutines::x86::_vector_long_sign_mask = generate_vector_mask("vector_long_sign_mask", 0x8000000000000000);
6210
6211 // support for verify_oop (must happen after universe_init)
6212 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
6213
6214 // data cache line writeback
6215 StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
6216 StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
6217
6218 // arraycopy stubs used by compilers
6219 generate_arraycopy_stubs();
6220
6221 // don't bother generating these AES intrinsic stubs unless global flag is set
6222 if (UseAESIntrinsics) {
|
6083
6084 // is referenced by megamorphic call
6085 StubRoutines::_catch_exception_entry = generate_catch_exception();
6086
6087 // atomic calls
6088 StubRoutines::_atomic_xchg_entry = generate_atomic_xchg();
6089 StubRoutines::_atomic_xchg_long_entry = generate_atomic_xchg_long();
6090 StubRoutines::_atomic_cmpxchg_entry = generate_atomic_cmpxchg();
6091 StubRoutines::_atomic_cmpxchg_byte_entry = generate_atomic_cmpxchg_byte();
6092 StubRoutines::_atomic_cmpxchg_long_entry = generate_atomic_cmpxchg_long();
6093 StubRoutines::_atomic_add_entry = generate_atomic_add();
6094 StubRoutines::_atomic_add_long_entry = generate_atomic_add_long();
6095 StubRoutines::_fence_entry = generate_orderaccess_fence();
6096
6097 // platform dependent
6098 StubRoutines::x86::_get_previous_fp_entry = generate_get_previous_fp();
6099 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
6100
6101 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
6102
6103 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
6104 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
6105 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
6106 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
6107
6108 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
6109 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000);
6110 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF);
6111 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000);
6112
6113 // Build this early so it's available for the interpreter.
6114 StubRoutines::_throw_StackOverflowError_entry =
6115 generate_throw_exception("StackOverflowError throw_exception",
6116 CAST_FROM_FN_PTR(address,
6117 SharedRuntime::
6118 throw_StackOverflowError));
6119 StubRoutines::_throw_delayed_StackOverflowError_entry =
6120 generate_throw_exception("delayed StackOverflowError throw_exception",
6121 CAST_FROM_FN_PTR(address,
6122 SharedRuntime::
6123 throw_delayed_StackOverflowError));
6124 if (UseCRC32Intrinsics) {
6125 // set table address before stub generation which use it
6126 StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
6127 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6128 }
6129
6130 if (UseCRC32CIntrinsics) {
6131 bool supports_clmul = VM_Version::supports_clmul();
6132 StubRoutines::x86::generate_CRC32C_table(supports_clmul);
6133 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
6134 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
6135 }
6136 if (UseLibmIntrinsic && InlineIntrinsics) {
6137 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin) ||
6138 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos) ||
6139 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
6140 StubRoutines::x86::_ONEHALF_adr = (address)StubRoutines::x86::_ONEHALF;
6141 StubRoutines::x86::_P_2_adr = (address)StubRoutines::x86::_P_2;
6142 StubRoutines::x86::_SC_4_adr = (address)StubRoutines::x86::_SC_4;
6143 StubRoutines::x86::_Ctable_adr = (address)StubRoutines::x86::_Ctable;
6144 StubRoutines::x86::_SC_2_adr = (address)StubRoutines::x86::_SC_2;
6145 StubRoutines::x86::_SC_3_adr = (address)StubRoutines::x86::_SC_3;
6146 StubRoutines::x86::_SC_1_adr = (address)StubRoutines::x86::_SC_1;
6147 StubRoutines::x86::_PI_INV_TABLE_adr = (address)StubRoutines::x86::_PI_INV_TABLE;
6148 StubRoutines::x86::_PI_4_adr = (address)StubRoutines::x86::_PI_4;
6149 StubRoutines::x86::_PI32INV_adr = (address)StubRoutines::x86::_PI32INV;
6150 StubRoutines::x86::_SIGN_MASK_adr = (address)StubRoutines::x86::_SIGN_MASK;
6151 StubRoutines::x86::_P_1_adr = (address)StubRoutines::x86::_P_1;
6152 StubRoutines::x86::_P_3_adr = (address)StubRoutines::x86::_P_3;
6153 StubRoutines::x86::_NEG_ZERO_adr = (address)StubRoutines::x86::_NEG_ZERO;
6154 }
6155 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dexp)) {
6156 StubRoutines::_dexp = generate_libmExp();
6184 // fabricate a RuntimeStub internally.
6185 StubRoutines::_throw_AbstractMethodError_entry =
6186 generate_throw_exception("AbstractMethodError throw_exception",
6187 CAST_FROM_FN_PTR(address,
6188 SharedRuntime::
6189 throw_AbstractMethodError));
6190
6191 StubRoutines::_throw_IncompatibleClassChangeError_entry =
6192 generate_throw_exception("IncompatibleClassChangeError throw_exception",
6193 CAST_FROM_FN_PTR(address,
6194 SharedRuntime::
6195 throw_IncompatibleClassChangeError));
6196
6197 StubRoutines::_throw_NullPointerException_at_call_entry =
6198 generate_throw_exception("NullPointerException at call throw_exception",
6199 CAST_FROM_FN_PTR(address,
6200 SharedRuntime::
6201 throw_NullPointerException_at_call));
6202
6203 // entry points that are platform specific
6204 StubRoutines::x86::_vector_float_sign_mask = generate_vector_mask("vector_float_sign_mask", 0x7FFFFFFF7FFFFFFF);
6205 StubRoutines::x86::_vector_float_sign_flip = generate_vector_mask("vector_float_sign_flip", 0x8000000080000000);
6206 StubRoutines::x86::_vector_double_sign_mask = generate_vector_mask("vector_double_sign_mask", 0x7FFFFFFFFFFFFFFF);
6207 StubRoutines::x86::_vector_double_sign_flip = generate_vector_mask("vector_double_sign_flip", 0x8000000000000000);
6208 StubRoutines::x86::_vector_short_to_byte_mask = generate_vector_mask("vector_short_to_byte_mask", 0x00ff00ff00ff00ff);
6209 StubRoutines::x86::_vector_byte_perm_mask = generate_vector_byte_perm_mask("vector_byte_perm_mask");
6210 StubRoutines::x86::_vector_long_sign_mask = generate_vector_mask("vector_long_sign_mask", 0x8000000000000000);
6211
6212 // support for verify_oop (must happen after universe_init)
6213 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
6214
6215 // data cache line writeback
6216 StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
6217 StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
6218
6219 // arraycopy stubs used by compilers
6220 generate_arraycopy_stubs();
6221
6222 // don't bother generating these AES intrinsic stubs unless global flag is set
6223 if (UseAESIntrinsics) {
|