3249 }
3250
3251 void MacroAssembler::movdl(XMMRegister dst, AddressLiteral src) {
3252 if (reachable(src)) {
3253 movdl(dst, as_Address(src));
3254 } else {
3255 lea(rscratch1, src);
3256 movdl(dst, Address(rscratch1, 0));
3257 }
3258 }
3259
3260 void MacroAssembler::movq(XMMRegister dst, AddressLiteral src) {
3261 if (reachable(src)) {
3262 movq(dst, as_Address(src));
3263 } else {
3264 lea(rscratch1, src);
3265 movq(dst, Address(rscratch1, 0));
3266 }
3267 }
3268
3269 void MacroAssembler::setvectmask(Register dst, Register src) {
3270 guarantee(PostLoopMultiversioning == true, "must be");
3271 Assembler::movl(dst, 1);
3272 Assembler::shlxl(dst, dst, src);
3273 Assembler::decl(dst);
3274 Assembler::kmovdl(k1, dst);
3275 Assembler::movl(dst, src);
3276 }
3277
3278 void MacroAssembler::restorevectmask() {
3279 guarantee(PostLoopMultiversioning == true, "must be");
3280 Assembler::knotwl(k1, k0);
3281 }
3282
3283 void MacroAssembler::movdbl(XMMRegister dst, AddressLiteral src) {
3284 if (reachable(src)) {
3285 if (UseXmmLoadAndClearUpper) {
3286 movsd (dst, as_Address(src));
3287 } else {
3288 movlpd(dst, as_Address(src));
3289 }
3290 } else {
3291 lea(rscratch1, src);
3292 if (UseXmmLoadAndClearUpper) {
3293 movsd (dst, Address(rscratch1, 0));
3294 } else {
3295 movlpd(dst, Address(rscratch1, 0));
3296 }
3297 }
3298 }
3299
3300 void MacroAssembler::movflt(XMMRegister dst, AddressLiteral src) {
3301 if (reachable(src)) {
5011 jcc(Assembler::notZero, L);
5012 int3(); // break if error condition
5013 bind(L);
5014 }
5015 pop_CPU_state();
5016 }
5017
5018 void MacroAssembler::restore_cpu_control_state_after_jni() {
5019 // Either restore the MXCSR register after returning from the JNI Call
5020 // or verify that it wasn't changed (with -Xcheck:jni flag).
5021 if (VM_Version::supports_sse()) {
5022 if (RestoreMXCSROnJNICalls) {
5023 ldmxcsr(ExternalAddress(StubRoutines::addr_mxcsr_std()));
5024 } else if (CheckJNICalls) {
5025 call(RuntimeAddress(StubRoutines::x86::verify_mxcsr_entry()));
5026 }
5027 }
5028 // Clear upper bits of YMM registers to avoid SSE <-> AVX transition penalty.
5029 vzeroupper();
5030 // Reset k1 to 0xffff.
5031 if (PostLoopMultiversioning && VM_Version::supports_evex()) {
5032 push(rcx);
5033 movl(rcx, 0xffff);
5034 kmovwl(k1, rcx);
5035 pop(rcx);
5036 }
5037
5038 #ifndef _LP64
5039 // Either restore the x87 floating pointer control word after returning
5040 // from the JNI call or verify that it wasn't changed.
5041 if (CheckJNICalls) {
5042 call(RuntimeAddress(StubRoutines::x86::verify_fpu_cntrl_wrd_entry()));
5043 }
5044 #endif // _LP64
5045 }
5046
5047 // ((OopHandle)result).resolve();
5048 void MacroAssembler::resolve_oop_handle(Register result, Register tmp) {
5049 assert_different_registers(result, tmp);
5050
5051 // Only 64 bit platforms support GCs that require a tmp register
5052 // Only IN_HEAP loads require a thread_tmp register
5053 // OopHandle::resolve is an indirection like jobject.
5054 access_load_at(T_OBJECT, IN_NATIVE,
5055 result, Address(result, 0), tmp, /*tmp_thread*/noreg);
5056 }
|
3249 }
3250
3251 void MacroAssembler::movdl(XMMRegister dst, AddressLiteral src) {
3252 if (reachable(src)) {
3253 movdl(dst, as_Address(src));
3254 } else {
3255 lea(rscratch1, src);
3256 movdl(dst, Address(rscratch1, 0));
3257 }
3258 }
3259
3260 void MacroAssembler::movq(XMMRegister dst, AddressLiteral src) {
3261 if (reachable(src)) {
3262 movq(dst, as_Address(src));
3263 } else {
3264 lea(rscratch1, src);
3265 movq(dst, Address(rscratch1, 0));
3266 }
3267 }
3268
3269 #ifdef COMPILER2
3270 void MacroAssembler::setvectmask(Register dst, Register src) {
3271 guarantee(PostLoopMultiversioning, "must be");
3272 Assembler::movl(dst, 1);
3273 Assembler::shlxl(dst, dst, src);
3274 Assembler::decl(dst);
3275 Assembler::kmovdl(k1, dst);
3276 Assembler::movl(dst, src);
3277 }
3278
3279 void MacroAssembler::restorevectmask() {
3280 guarantee(PostLoopMultiversioning, "must be");
3281 Assembler::knotwl(k1, k0);
3282 }
3283 #endif // COMPILER2
3284
3285 void MacroAssembler::movdbl(XMMRegister dst, AddressLiteral src) {
3286 if (reachable(src)) {
3287 if (UseXmmLoadAndClearUpper) {
3288 movsd (dst, as_Address(src));
3289 } else {
3290 movlpd(dst, as_Address(src));
3291 }
3292 } else {
3293 lea(rscratch1, src);
3294 if (UseXmmLoadAndClearUpper) {
3295 movsd (dst, Address(rscratch1, 0));
3296 } else {
3297 movlpd(dst, Address(rscratch1, 0));
3298 }
3299 }
3300 }
3301
3302 void MacroAssembler::movflt(XMMRegister dst, AddressLiteral src) {
3303 if (reachable(src)) {
5013 jcc(Assembler::notZero, L);
5014 int3(); // break if error condition
5015 bind(L);
5016 }
5017 pop_CPU_state();
5018 }
5019
5020 void MacroAssembler::restore_cpu_control_state_after_jni() {
5021 // Either restore the MXCSR register after returning from the JNI Call
5022 // or verify that it wasn't changed (with -Xcheck:jni flag).
5023 if (VM_Version::supports_sse()) {
5024 if (RestoreMXCSROnJNICalls) {
5025 ldmxcsr(ExternalAddress(StubRoutines::addr_mxcsr_std()));
5026 } else if (CheckJNICalls) {
5027 call(RuntimeAddress(StubRoutines::x86::verify_mxcsr_entry()));
5028 }
5029 }
5030 // Clear upper bits of YMM registers to avoid SSE <-> AVX transition penalty.
5031 vzeroupper();
5032 // Reset k1 to 0xffff.
5033
5034 #ifdef COMPILER2
5035 if (PostLoopMultiversioning && VM_Version::supports_evex()) {
5036 push(rcx);
5037 movl(rcx, 0xffff);
5038 kmovwl(k1, rcx);
5039 pop(rcx);
5040 }
5041 #endif // COMPILER2
5042
5043 #ifndef _LP64
5044 // Either restore the x87 floating pointer control word after returning
5045 // from the JNI call or verify that it wasn't changed.
5046 if (CheckJNICalls) {
5047 call(RuntimeAddress(StubRoutines::x86::verify_fpu_cntrl_wrd_entry()));
5048 }
5049 #endif // _LP64
5050 }
5051
5052 // ((OopHandle)result).resolve();
5053 void MacroAssembler::resolve_oop_handle(Register result, Register tmp) {
5054 assert_different_registers(result, tmp);
5055
5056 // Only 64 bit platforms support GCs that require a tmp register
5057 // Only IN_HEAP loads require a thread_tmp register
5058 // OopHandle::resolve is an indirection like jobject.
5059 access_load_at(T_OBJECT, IN_NATIVE,
5060 result, Address(result, 0), tmp, /*tmp_thread*/noreg);
5061 }
|