< prev index next >

src/cpu/x86/vm/macroAssembler_x86.cpp

Print this page




3383 }
3384 
3385 void MacroAssembler::movdl(XMMRegister dst, AddressLiteral src) {
3386   if (reachable(src)) {
3387     movdl(dst, as_Address(src));
3388   } else {
3389     lea(rscratch1, src);
3390     movdl(dst, Address(rscratch1, 0));
3391   }
3392 }
3393 
3394 void MacroAssembler::movq(XMMRegister dst, AddressLiteral src) {
3395   if (reachable(src)) {
3396     movq(dst, as_Address(src));
3397   } else {
3398     lea(rscratch1, src);
3399     movq(dst, Address(rscratch1, 0));
3400   }
3401 }
3402 


























3403 void MacroAssembler::movdbl(XMMRegister dst, AddressLiteral src) {
3404   if (reachable(src)) {
3405     if (UseXmmLoadAndClearUpper) {
3406       movsd (dst, as_Address(src));
3407     } else {
3408       movlpd(dst, as_Address(src));
3409     }
3410   } else {
3411     lea(rscratch1, src);
3412     if (UseXmmLoadAndClearUpper) {
3413       movsd (dst, Address(rscratch1, 0));
3414     } else {
3415       movlpd(dst, Address(rscratch1, 0));
3416     }
3417   }
3418 }
3419 
3420 void MacroAssembler::movflt(XMMRegister dst, AddressLiteral src) {
3421   if (reachable(src)) {
3422     movss(dst, as_Address(src));




3383 }
3384 
3385 void MacroAssembler::movdl(XMMRegister dst, AddressLiteral src) {
3386   if (reachable(src)) {
3387     movdl(dst, as_Address(src));
3388   } else {
3389     lea(rscratch1, src);
3390     movdl(dst, Address(rscratch1, 0));
3391   }
3392 }
3393 
3394 void MacroAssembler::movq(XMMRegister dst, AddressLiteral src) {
3395   if (reachable(src)) {
3396     movq(dst, as_Address(src));
3397   } else {
3398     lea(rscratch1, src);
3399     movq(dst, Address(rscratch1, 0));
3400   }
3401 }
3402 
3403 // AVX512 masks used for fixup loops
3404 jushort evex_simd_mask_table[] =
3405 {
3406   0xffff, 0x0001, 0x0003, 0x0007, 0x000f,
3407   0x001f, 0x003f, 0x007f, 0x00ff, 0x01ff,
3408   0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff,
3409   0x7fff
3410 };
3411 
3412 void MacroAssembler::createmsk(Register dst, Register src) {
3413   ExternalAddress mask_table((address)evex_simd_mask_table);
3414   lea(dst, mask_table);
3415   Assembler::kmovwl(k1, Address(dst, src, Address::times_2, 0));
3416   Assembler::movl(dst, src);
3417 }
3418 
3419 void MacroAssembler::restoremsk() {
3420   ExternalAddress mask_table((address)evex_simd_mask_table);
3421   if (reachable(mask_table)) {
3422     Assembler::kmovwl(k1, as_Address(mask_table));
3423   } else {
3424     lea(rscratch1, mask_table);
3425     Assembler::kmovwl(k1, Address(rscratch1, 0));
3426   }
3427 }
3428 
3429 void MacroAssembler::movdbl(XMMRegister dst, AddressLiteral src) {
3430   if (reachable(src)) {
3431     if (UseXmmLoadAndClearUpper) {
3432       movsd (dst, as_Address(src));
3433     } else {
3434       movlpd(dst, as_Address(src));
3435     }
3436   } else {
3437     lea(rscratch1, src);
3438     if (UseXmmLoadAndClearUpper) {
3439       movsd (dst, Address(rscratch1, 0));
3440     } else {
3441       movlpd(dst, Address(rscratch1, 0));
3442     }
3443   }
3444 }
3445 
3446 void MacroAssembler::movflt(XMMRegister dst, AddressLiteral src) {
3447   if (reachable(src)) {
3448     movss(dst, as_Address(src));


< prev index next >