src/cpu/x86/vm/assembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7121648 Sdiff src/cpu/x86/vm

src/cpu/x86/vm/assembler_x86.cpp

Print this page




2915   emit_byte(0x57);
2916   emit_operand(dst, src);
2917 }
2918 
2919 
2920 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2921   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2922   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE);
2923   emit_byte(0x57);
2924   emit_byte(0xC0 | encode);
2925 }
2926 
2927 void Assembler::xorps(XMMRegister dst, Address src) {
2928   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2929   InstructionMark im(this);
2930   simd_prefix(dst, dst, src, VEX_SIMD_NONE);
2931   emit_byte(0x57);
2932   emit_operand(dst, src);
2933 }
2934 



























































































































































2935 #ifndef _LP64
2936 // 32bit only pieces of the assembler
2937 
2938 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
2939   // NO PREFIX AS NEVER 64BIT
2940   InstructionMark im(this);
2941   emit_byte(0x81);
2942   emit_byte(0xF8 | src1->encoding());
2943   emit_data(imm32, rspec, 0);
2944 }
2945 
2946 void Assembler::cmp_literal32(Address src1, int32_t imm32, RelocationHolder const& rspec) {
2947   // NO PREFIX AS NEVER 64BIT (not even 32bit versions of 64bit regs
2948   InstructionMark im(this);
2949   emit_byte(0x81);
2950   emit_operand(rdi, src1);
2951   emit_data(imm32, rspec, 0);
2952 }
2953 
2954 // The 64-bit (32bit platform) cmpxchg compares the value at adr with the contents of rdx:rax,


7218 }
7219 
7220 void MacroAssembler::subsd(XMMRegister dst, AddressLiteral src) {
7221   if (reachable(src)) {
7222     Assembler::subsd(dst, as_Address(src));
7223   } else {
7224     lea(rscratch1, src);
7225     Assembler::subsd(dst, Address(rscratch1, 0));
7226   }
7227 }
7228 
7229 void MacroAssembler::subss(XMMRegister dst, AddressLiteral src) {
7230   if (reachable(src)) {
7231     Assembler::subss(dst, as_Address(src));
7232   } else {
7233     lea(rscratch1, src);
7234     Assembler::subss(dst, Address(rscratch1, 0));
7235   }
7236 }
7237 























































































































































7238 //////////////////////////////////////////////////////////////////////////////////
7239 #ifndef SERIALGC
7240 
7241 void MacroAssembler::g1_write_barrier_pre(Register obj,
7242                                           Register pre_val,
7243                                           Register thread,
7244                                           Register tmp,
7245                                           bool tosca_live,
7246                                           bool expand_call) {
7247 
7248   // If expand_call is true then we expand the call_VM_leaf macro
7249   // directly to skip generating the check by
7250   // InterpreterMacroAssembler::call_VM_leaf_base that checks _last_sp.
7251 
7252 #ifdef _LP64
7253   assert(thread == r15_thread, "must be");
7254 #endif // _LP64
7255 
7256   Label done;
7257   Label runtime;


8102     // Also, the condition codes are properly set Z/NZ on succeed/failure.
8103   }
8104 
8105   if (L_failure == &L_fallthrough)
8106         jccb(Assembler::notEqual, *L_failure);
8107   else  jcc(Assembler::notEqual, *L_failure);
8108 
8109   // Success.  Cache the super we found and proceed in triumph.
8110   movptr(super_cache_addr, super_klass);
8111 
8112   if (L_success != &L_fallthrough) {
8113     jmp(*L_success);
8114   }
8115 
8116 #undef IS_A_TEMP
8117 
8118   bind(L_fallthrough);
8119 }
8120 
8121 
8122 void MacroAssembler::ucomisd(XMMRegister dst, AddressLiteral src) {
8123   if (reachable(src)) {
8124     Assembler::ucomisd(dst, as_Address(src));
8125   } else {
8126     lea(rscratch1, src);
8127     Assembler::ucomisd(dst, Address(rscratch1, 0));
8128   }
8129 }
8130 
8131 void MacroAssembler::ucomiss(XMMRegister dst, AddressLiteral src) {
8132   if (reachable(src)) {
8133     Assembler::ucomiss(dst, as_Address(src));
8134   } else {
8135     lea(rscratch1, src);
8136     Assembler::ucomiss(dst, Address(rscratch1, 0));
8137   }
8138 }
8139 
8140 void MacroAssembler::xorpd(XMMRegister dst, AddressLiteral src) {
8141   // Used in sign-bit flipping with aligned address.
8142   assert((UseAVX > 0) || (((intptr_t)src.target() & 15) == 0), "SSE mode requires address alignment 16 bytes");
8143   if (reachable(src)) {
8144     Assembler::xorpd(dst, as_Address(src));
8145   } else {
8146     lea(rscratch1, src);
8147     Assembler::xorpd(dst, Address(rscratch1, 0));
8148   }
8149 }
8150 
8151 void MacroAssembler::xorps(XMMRegister dst, AddressLiteral src) {
8152   // Used in sign-bit flipping with aligned address.
8153   assert((UseAVX > 0) || (((intptr_t)src.target() & 15) == 0), "SSE mode requires address alignment 16 bytes");
8154   if (reachable(src)) {
8155     Assembler::xorps(dst, as_Address(src));
8156   } else {
8157     lea(rscratch1, src);
8158     Assembler::xorps(dst, Address(rscratch1, 0));
8159   }
8160 }
8161 
8162 void MacroAssembler::cmov32(Condition cc, Register dst, Address src) {
8163   if (VM_Version::supports_cmov()) {
8164     cmovl(cc, dst, src);
8165   } else {
8166     Label L;
8167     jccb(negate_condition(cc), L);
8168     movl(dst, src);
8169     bind(L);
8170   }
8171 }
8172 
8173 void MacroAssembler::cmov32(Condition cc, Register dst, Register src) {
8174   if (VM_Version::supports_cmov()) {
8175     cmovl(cc, dst, src);
8176   } else {
8177     Label L;
8178     jccb(negate_condition(cc), L);
8179     movl(dst, src);
8180     bind(L);
8181   }




2915   emit_byte(0x57);
2916   emit_operand(dst, src);
2917 }
2918 
2919 
2920 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2921   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2922   int encode = simd_prefix_and_encode(dst, dst, src, VEX_SIMD_NONE);
2923   emit_byte(0x57);
2924   emit_byte(0xC0 | encode);
2925 }
2926 
2927 void Assembler::xorps(XMMRegister dst, Address src) {
2928   NOT_LP64(assert(VM_Version::supports_sse(), ""));
2929   InstructionMark im(this);
2930   simd_prefix(dst, dst, src, VEX_SIMD_NONE);
2931   emit_byte(0x57);
2932   emit_operand(dst, src);
2933 }
2934 
2935 // AVX 3-operands non destructive source instructions (encoded with VEX prefix)
2936 
2937 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, Address src) {
2938   assert(VM_Version::supports_avx(), "");
2939   InstructionMark im(this);
2940   vex_prefix(dst, nds, src, VEX_SIMD_F2);
2941   emit_byte(0x58);
2942   emit_operand(dst, src);
2943 }
2944 
2945 void Assembler::vaddsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
2946   assert(VM_Version::supports_avx(), "");
2947   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F2);
2948   emit_byte(0x58);
2949   emit_byte(0xC0 | encode);
2950 }
2951 
2952 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, Address src) {
2953   assert(VM_Version::supports_avx(), "");
2954   InstructionMark im(this);
2955   vex_prefix(dst, nds, src, VEX_SIMD_F3);
2956   emit_byte(0x58);
2957   emit_operand(dst, src);
2958 }
2959 
2960 void Assembler::vaddss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
2961   assert(VM_Version::supports_avx(), "");
2962   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F3);
2963   emit_byte(0x58);
2964   emit_byte(0xC0 | encode);
2965 }
2966 
2967 void Assembler::vandpd(XMMRegister dst, XMMRegister nds, Address src) {
2968   assert(VM_Version::supports_avx(), "");
2969   InstructionMark im(this);
2970   vex_prefix(dst, nds, src, VEX_SIMD_66);
2971   emit_byte(0x54);
2972   emit_operand(dst, src);
2973 }
2974 
2975 void Assembler::vandps(XMMRegister dst, XMMRegister nds, Address src) {
2976   assert(VM_Version::supports_avx(), "");
2977   InstructionMark im(this);
2978   vex_prefix(dst, nds, src, VEX_SIMD_NONE);
2979   emit_byte(0x54);
2980   emit_operand(dst, src);
2981 }
2982 
2983 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, Address src) {
2984   assert(VM_Version::supports_avx(), "");
2985   InstructionMark im(this);
2986   vex_prefix(dst, nds, src, VEX_SIMD_F2);
2987   emit_byte(0x5E);
2988   emit_operand(dst, src);
2989 }
2990 
2991 void Assembler::vdivsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
2992   assert(VM_Version::supports_avx(), "");
2993   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F2);
2994   emit_byte(0x5E);
2995   emit_byte(0xC0 | encode);
2996 }
2997 
2998 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, Address src) {
2999   assert(VM_Version::supports_avx(), "");
3000   InstructionMark im(this);
3001   vex_prefix(dst, nds, src, VEX_SIMD_F3);
3002   emit_byte(0x5E);
3003   emit_operand(dst, src);
3004 }
3005 
3006 void Assembler::vdivss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3007   assert(VM_Version::supports_avx(), "");
3008   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F3);
3009   emit_byte(0x5E);
3010   emit_byte(0xC0 | encode);
3011 }
3012 
3013 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, Address src) {
3014   assert(VM_Version::supports_avx(), "");
3015   InstructionMark im(this);
3016   vex_prefix(dst, nds, src, VEX_SIMD_F2);
3017   emit_byte(0x59);
3018   emit_operand(dst, src);
3019 }
3020 
3021 void Assembler::vmulsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3022   assert(VM_Version::supports_avx(), "");
3023   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F2);
3024   emit_byte(0x59);
3025   emit_byte(0xC0 | encode);
3026 }
3027 
3028 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, Address src) {
3029   InstructionMark im(this);
3030   vex_prefix(dst, nds, src, VEX_SIMD_F3);
3031   emit_byte(0x59);
3032   emit_operand(dst, src);
3033 }
3034 
3035 void Assembler::vmulss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3036   assert(VM_Version::supports_avx(), "");
3037   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F3);
3038   emit_byte(0x59);
3039   emit_byte(0xC0 | encode);
3040 }
3041 
3042 
3043 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, Address src) {
3044   assert(VM_Version::supports_avx(), "");
3045   InstructionMark im(this);
3046   vex_prefix(dst, nds, src, VEX_SIMD_F2);
3047   emit_byte(0x5C);
3048   emit_operand(dst, src);
3049 }
3050 
3051 void Assembler::vsubsd(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3052   assert(VM_Version::supports_avx(), "");
3053   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F2);
3054   emit_byte(0x5C);
3055   emit_byte(0xC0 | encode);
3056 }
3057 
3058 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, Address src) {
3059   assert(VM_Version::supports_avx(), "");
3060   InstructionMark im(this);
3061   vex_prefix(dst, nds, src, VEX_SIMD_F3);
3062   emit_byte(0x5C);
3063   emit_operand(dst, src);
3064 }
3065 
3066 void Assembler::vsubss(XMMRegister dst, XMMRegister nds, XMMRegister src) {
3067   assert(VM_Version::supports_avx(), "");
3068   int encode = vex_prefix_and_encode(dst, nds, src, VEX_SIMD_F3);
3069   emit_byte(0x5C);
3070   emit_byte(0xC0 | encode);
3071 }
3072 
3073 void Assembler::vxorpd(XMMRegister dst, XMMRegister nds, Address src) {
3074   assert(VM_Version::supports_avx(), "");
3075   InstructionMark im(this);
3076   vex_prefix(dst, nds, src, VEX_SIMD_66);
3077   emit_byte(0x57);
3078   emit_operand(dst, src);
3079 }
3080 
3081 void Assembler::vxorps(XMMRegister dst, XMMRegister nds, Address src) {
3082   assert(VM_Version::supports_avx(), "");
3083   InstructionMark im(this);
3084   vex_prefix(dst, nds, src, VEX_SIMD_NONE);
3085   emit_byte(0x57);
3086   emit_operand(dst, src);
3087 }
3088 
3089 
3090 #ifndef _LP64
3091 // 32bit only pieces of the assembler
3092 
3093 void Assembler::cmp_literal32(Register src1, int32_t imm32, RelocationHolder const& rspec) {
3094   // NO PREFIX AS NEVER 64BIT
3095   InstructionMark im(this);
3096   emit_byte(0x81);
3097   emit_byte(0xF8 | src1->encoding());
3098   emit_data(imm32, rspec, 0);
3099 }
3100 
3101 void Assembler::cmp_literal32(Address src1, int32_t imm32, RelocationHolder const& rspec) {
3102   // NO PREFIX AS NEVER 64BIT (not even 32bit versions of 64bit regs
3103   InstructionMark im(this);
3104   emit_byte(0x81);
3105   emit_operand(rdi, src1);
3106   emit_data(imm32, rspec, 0);
3107 }
3108 
3109 // The 64-bit (32bit platform) cmpxchg compares the value at adr with the contents of rdx:rax,


7373 }
7374 
7375 void MacroAssembler::subsd(XMMRegister dst, AddressLiteral src) {
7376   if (reachable(src)) {
7377     Assembler::subsd(dst, as_Address(src));
7378   } else {
7379     lea(rscratch1, src);
7380     Assembler::subsd(dst, Address(rscratch1, 0));
7381   }
7382 }
7383 
7384 void MacroAssembler::subss(XMMRegister dst, AddressLiteral src) {
7385   if (reachable(src)) {
7386     Assembler::subss(dst, as_Address(src));
7387   } else {
7388     lea(rscratch1, src);
7389     Assembler::subss(dst, Address(rscratch1, 0));
7390   }
7391 }
7392 
7393 void MacroAssembler::ucomisd(XMMRegister dst, AddressLiteral src) {
7394   if (reachable(src)) {
7395     Assembler::ucomisd(dst, as_Address(src));
7396   } else {
7397     lea(rscratch1, src);
7398     Assembler::ucomisd(dst, Address(rscratch1, 0));
7399   }
7400 }
7401 
7402 void MacroAssembler::ucomiss(XMMRegister dst, AddressLiteral src) {
7403   if (reachable(src)) {
7404     Assembler::ucomiss(dst, as_Address(src));
7405   } else {
7406     lea(rscratch1, src);
7407     Assembler::ucomiss(dst, Address(rscratch1, 0));
7408   }
7409 }
7410 
7411 void MacroAssembler::xorpd(XMMRegister dst, AddressLiteral src) {
7412   // Used in sign-bit flipping with aligned address.
7413   assert((UseAVX > 0) || (((intptr_t)src.target() & 15) == 0), "SSE mode requires address alignment 16 bytes");
7414   if (reachable(src)) {
7415     Assembler::xorpd(dst, as_Address(src));
7416   } else {
7417     lea(rscratch1, src);
7418     Assembler::xorpd(dst, Address(rscratch1, 0));
7419   }
7420 }
7421 
7422 void MacroAssembler::xorps(XMMRegister dst, AddressLiteral src) {
7423   // Used in sign-bit flipping with aligned address.
7424   assert((UseAVX > 0) || (((intptr_t)src.target() & 15) == 0), "SSE mode requires address alignment 16 bytes");
7425   if (reachable(src)) {
7426     Assembler::xorps(dst, as_Address(src));
7427   } else {
7428     lea(rscratch1, src);
7429     Assembler::xorps(dst, Address(rscratch1, 0));
7430   }
7431 }
7432 
7433 // AVX 3-operands instructions
7434 
7435 void MacroAssembler::vaddsd(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7436   if (reachable(src)) {
7437     vaddsd(dst, nds, as_Address(src));
7438   } else {
7439     lea(rscratch1, src);
7440     vaddsd(dst, nds, Address(rscratch1, 0));
7441   }
7442 }
7443 
7444 void MacroAssembler::vaddss(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7445   if (reachable(src)) {
7446     vaddss(dst, nds, as_Address(src));
7447   } else {
7448     lea(rscratch1, src);
7449     vaddss(dst, nds, Address(rscratch1, 0));
7450   }
7451 }
7452 
7453 void MacroAssembler::vandpd(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7454   if (reachable(src)) {
7455     vandpd(dst, nds, as_Address(src));
7456   } else {
7457     lea(rscratch1, src);
7458     vandpd(dst, nds, Address(rscratch1, 0));
7459   }
7460 }
7461 
7462 void MacroAssembler::vandps(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7463   if (reachable(src)) {
7464     vandps(dst, nds, as_Address(src));
7465   } else {
7466     lea(rscratch1, src);
7467     vandps(dst, nds, Address(rscratch1, 0));
7468   }
7469 }
7470 
7471 void MacroAssembler::vdivsd(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7472   if (reachable(src)) {
7473     vdivsd(dst, nds, as_Address(src));
7474   } else {
7475     lea(rscratch1, src);
7476     vdivsd(dst, nds, Address(rscratch1, 0));
7477   }
7478 }
7479 
7480 void MacroAssembler::vdivss(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7481   if (reachable(src)) {
7482     vdivss(dst, nds, as_Address(src));
7483   } else {
7484     lea(rscratch1, src);
7485     vdivss(dst, nds, Address(rscratch1, 0));
7486   }
7487 }
7488 
7489 void MacroAssembler::vmulsd(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7490   if (reachable(src)) {
7491     vmulsd(dst, nds, as_Address(src));
7492   } else {
7493     lea(rscratch1, src);
7494     vmulsd(dst, nds, Address(rscratch1, 0));
7495   }
7496 }
7497 
7498 void MacroAssembler::vmulss(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7499   if (reachable(src)) {
7500     vmulss(dst, nds, as_Address(src));
7501   } else {
7502     lea(rscratch1, src);
7503     vmulss(dst, nds, Address(rscratch1, 0));
7504   }
7505 }
7506 
7507 void MacroAssembler::vsubsd(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7508   if (reachable(src)) {
7509     vsubsd(dst, nds, as_Address(src));
7510   } else {
7511     lea(rscratch1, src);
7512     vsubsd(dst, nds, Address(rscratch1, 0));
7513   }
7514 }
7515 
7516 void MacroAssembler::vsubss(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7517   if (reachable(src)) {
7518     vsubss(dst, nds, as_Address(src));
7519   } else {
7520     lea(rscratch1, src);
7521     vsubss(dst, nds, Address(rscratch1, 0));
7522   }
7523 }
7524 
7525 void MacroAssembler::vxorpd(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7526   if (reachable(src)) {
7527     vxorpd(dst, nds, as_Address(src));
7528   } else {
7529     lea(rscratch1, src);
7530     vxorpd(dst, nds, Address(rscratch1, 0));
7531   }
7532 }
7533 
7534 void MacroAssembler::vxorps(XMMRegister dst, XMMRegister nds, AddressLiteral src) {
7535   if (reachable(src)) {
7536     vxorps(dst, nds, as_Address(src));
7537   } else {
7538     lea(rscratch1, src);
7539     vxorps(dst, nds, Address(rscratch1, 0));
7540   }
7541 }
7542 
7543 
7544 //////////////////////////////////////////////////////////////////////////////////
7545 #ifndef SERIALGC
7546 
7547 void MacroAssembler::g1_write_barrier_pre(Register obj,
7548                                           Register pre_val,
7549                                           Register thread,
7550                                           Register tmp,
7551                                           bool tosca_live,
7552                                           bool expand_call) {
7553 
7554   // If expand_call is true then we expand the call_VM_leaf macro
7555   // directly to skip generating the check by
7556   // InterpreterMacroAssembler::call_VM_leaf_base that checks _last_sp.
7557 
7558 #ifdef _LP64
7559   assert(thread == r15_thread, "must be");
7560 #endif // _LP64
7561 
7562   Label done;
7563   Label runtime;


8408     // Also, the condition codes are properly set Z/NZ on succeed/failure.
8409   }
8410 
8411   if (L_failure == &L_fallthrough)
8412         jccb(Assembler::notEqual, *L_failure);
8413   else  jcc(Assembler::notEqual, *L_failure);
8414 
8415   // Success.  Cache the super we found and proceed in triumph.
8416   movptr(super_cache_addr, super_klass);
8417 
8418   if (L_success != &L_fallthrough) {
8419     jmp(*L_success);
8420   }
8421 
8422 #undef IS_A_TEMP
8423 
8424   bind(L_fallthrough);
8425 }
8426 
8427 








































8428 void MacroAssembler::cmov32(Condition cc, Register dst, Address src) {
8429   if (VM_Version::supports_cmov()) {
8430     cmovl(cc, dst, src);
8431   } else {
8432     Label L;
8433     jccb(negate_condition(cc), L);
8434     movl(dst, src);
8435     bind(L);
8436   }
8437 }
8438 
8439 void MacroAssembler::cmov32(Condition cc, Register dst, Register src) {
8440   if (VM_Version::supports_cmov()) {
8441     cmovl(cc, dst, src);
8442   } else {
8443     Label L;
8444     jccb(negate_condition(cc), L);
8445     movl(dst, src);
8446     bind(L);
8447   }


src/cpu/x86/vm/assembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File