1115 __ push(c_rarg3);
1116
1117 enum {
1118 // After previous pushes.
1119 oop_to_verify = 6 * wordSize,
1120 saved_rax = 7 * wordSize,
1121 saved_r10 = 8 * wordSize,
1122
1123 // Before the call to MacroAssembler::debug(), see below.
1124 return_addr = 16 * wordSize,
1125 error_msg = 17 * wordSize
1126 };
1127
1128 // get object
1129 __ movptr(rax, Address(rsp, oop_to_verify));
1130
1131 // make sure object is 'reasonable'
1132 __ testptr(rax, rax);
1133 __ jcc(Assembler::zero, exit); // if obj is NULL it is OK
1134
1135 if (UseLoadBarrier) {
1136 // Check if metadata bits indicate a bad oop
1137 __ testptr(rax, Address(r15_thread, JavaThread::zaddress_bad_mask_offset()));
1138 __ jcc(Assembler::notZero, error);
1139 }
1140
1141 // Check if the oop is in the right area of memory
1142 __ movptr(c_rarg2, rax);
1143 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_mask());
1144 __ andptr(c_rarg2, c_rarg3);
1145 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_bits());
1146 __ cmpptr(c_rarg2, c_rarg3);
1147 __ jcc(Assembler::notZero, error);
1148
1149 // set r12 to heapbase for load_klass()
1150 __ reinit_heapbase();
1151
1152 // make sure klass is 'reasonable', which is not zero.
1153 __ load_klass(rax, rax); // get klass
1154 __ testptr(rax, rax);
1155 __ jcc(Assembler::zero, error); // if klass is NULL it is broken
5291 throw_NullPointerException_at_call));
5292
5293 // entry points that are platform specific
5294 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
5295 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
5296 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
5297 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
5298
5299 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
5300 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000);
5301 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF);
5302 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000);
5303
5304 // support for verify_oop (must happen after universe_init)
5305 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
5306
5307 // arraycopy stubs used by compilers
5308 generate_arraycopy_stubs();
5309
5310 // Load barrier stubs
5311 if (UseLoadBarrier) {
5312 address loadbarrier_address = CAST_FROM_FN_PTR(address, SharedRuntime::z_load_barrier_on_oop_field_preloaded);
5313 address loadbarrier_weak_address = CAST_FROM_FN_PTR(address, SharedRuntime::z_load_barrier_on_weak_oop_field_preloaded);
5314 Register rr = as_Register(0);
5315 for (int i = 0; i < RegisterImpl::number_of_registers; i++) {
5316 StubRoutines::x86::_load_barrier_slow_stub[i] = generate_load_barrier_stub(rr, loadbarrier_address, false);
5317 StubRoutines::x86::_load_barrier_weak_slow_stub[i] = generate_load_barrier_stub(rr, loadbarrier_weak_address, true);
5318 rr = rr->successor();
5319 }
5320 }
5321
5322 // don't bother generating these AES intrinsic stubs unless global flag is set
5323 if (UseAESIntrinsics) {
5324 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // needed by the others
5325 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
5326 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
5327 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
5328 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel();
5329 }
5330 if (UseAESCTRIntrinsics){
5331 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask();
|
1115 __ push(c_rarg3);
1116
1117 enum {
1118 // After previous pushes.
1119 oop_to_verify = 6 * wordSize,
1120 saved_rax = 7 * wordSize,
1121 saved_r10 = 8 * wordSize,
1122
1123 // Before the call to MacroAssembler::debug(), see below.
1124 return_addr = 16 * wordSize,
1125 error_msg = 17 * wordSize
1126 };
1127
1128 // get object
1129 __ movptr(rax, Address(rsp, oop_to_verify));
1130
1131 // make sure object is 'reasonable'
1132 __ testptr(rax, rax);
1133 __ jcc(Assembler::zero, exit); // if obj is NULL it is OK
1134
1135 if (UseZGC) {
1136 // Check if metadata bits indicate a bad oop
1137 __ testptr(rax, Address(r15_thread, JavaThread::zaddress_bad_mask_offset()));
1138 __ jcc(Assembler::notZero, error);
1139 }
1140
1141 // Check if the oop is in the right area of memory
1142 __ movptr(c_rarg2, rax);
1143 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_mask());
1144 __ andptr(c_rarg2, c_rarg3);
1145 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_bits());
1146 __ cmpptr(c_rarg2, c_rarg3);
1147 __ jcc(Assembler::notZero, error);
1148
1149 // set r12 to heapbase for load_klass()
1150 __ reinit_heapbase();
1151
1152 // make sure klass is 'reasonable', which is not zero.
1153 __ load_klass(rax, rax); // get klass
1154 __ testptr(rax, rax);
1155 __ jcc(Assembler::zero, error); // if klass is NULL it is broken
5291 throw_NullPointerException_at_call));
5292
5293 // entry points that are platform specific
5294 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
5295 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
5296 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
5297 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
5298
5299 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
5300 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000);
5301 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF);
5302 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000);
5303
5304 // support for verify_oop (must happen after universe_init)
5305 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
5306
5307 // arraycopy stubs used by compilers
5308 generate_arraycopy_stubs();
5309
5310 // Load barrier stubs
5311 if (UseZGC) {
5312 address loadbarrier_address = CAST_FROM_FN_PTR(address, SharedRuntime::z_load_barrier_on_oop_field_preloaded);
5313 address loadbarrier_weak_address = CAST_FROM_FN_PTR(address, SharedRuntime::z_load_barrier_on_weak_oop_field_preloaded);
5314 Register rr = as_Register(0);
5315 for (int i = 0; i < RegisterImpl::number_of_registers; i++) {
5316 StubRoutines::x86::_load_barrier_slow_stub[i] = generate_load_barrier_stub(rr, loadbarrier_address, false);
5317 StubRoutines::x86::_load_barrier_weak_slow_stub[i] = generate_load_barrier_stub(rr, loadbarrier_weak_address, true);
5318 rr = rr->successor();
5319 }
5320 }
5321
5322 // don't bother generating these AES intrinsic stubs unless global flag is set
5323 if (UseAESIntrinsics) {
5324 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // needed by the others
5325 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
5326 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
5327 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
5328 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel();
5329 }
5330 if (UseAESCTRIntrinsics){
5331 StubRoutines::x86::_counter_shuffle_mask_addr = generate_counter_shuffle_mask();
|