< prev index next >

src/cpu/x86/vm/assembler_x86.hpp

Print this page




2027   void cmppd(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len);
2028   void vpblendd(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len);
2029 
2030 
2031  protected:
2032   // Next instructions require address alignment 16 bytes SSE mode.
2033   // They should be called only from corresponding MacroAssembler instructions.
2034   void andpd(XMMRegister dst, Address src);
2035   void andps(XMMRegister dst, Address src);
2036   void xorpd(XMMRegister dst, Address src);
2037   void xorps(XMMRegister dst, Address src);
2038 
2039 };
2040 
2041 // The Intel x86/Amd64 Assembler attributes: All fields enclosed here are to guide encoding level decisions.
2042 // Specific set functions are for specialized use, else defaults or whatever was supplied to object construction
2043 // are applied.
2044 class InstructionAttr {
2045 public:
2046   InstructionAttr(
2047     int vector_len,
2048     bool rex_vex_w,
2049     bool legacy_mode,
2050     bool no_reg_mask,
2051     bool uses_vl)
2052     :
2053       _avx_vector_len(vector_len),
2054       _rex_vex_w(rex_vex_w),
2055       _legacy_mode(legacy_mode),
2056       _no_reg_mask(no_reg_mask),
2057       _uses_vl(uses_vl),
2058       _tuple_type(Assembler::EVEX_ETUP),
2059       _input_size_in_bits(Assembler::EVEX_NObit),
2060       _is_evex_instruction(false),
2061       _evex_encoding(0),
2062       _is_clear_context(false),
2063       _is_extended_context(false),
2064       _current_assembler(NULL) {
2065     if (UseAVX < 3) _legacy_mode = true;
2066   }
2067 
2068   ~InstructionAttr() {
2069     if (_current_assembler != NULL) {
2070       _current_assembler->clear_attributes();
2071     }




2027   void cmppd(XMMRegister dst, XMMRegister nds, XMMRegister src, int cop, int vector_len);
2028   void vpblendd(XMMRegister dst, XMMRegister nds, XMMRegister src1, XMMRegister src2, int vector_len);
2029 
2030 
2031  protected:
2032   // Next instructions require address alignment 16 bytes SSE mode.
2033   // They should be called only from corresponding MacroAssembler instructions.
2034   void andpd(XMMRegister dst, Address src);
2035   void andps(XMMRegister dst, Address src);
2036   void xorpd(XMMRegister dst, Address src);
2037   void xorps(XMMRegister dst, Address src);
2038 
2039 };
2040 
2041 // The Intel x86/Amd64 Assembler attributes: All fields enclosed here are to guide encoding level decisions.
2042 // Specific set functions are for specialized use, else defaults or whatever was supplied to object construction
2043 // are applied.
2044 class InstructionAttr {
2045 public:
2046   InstructionAttr(
2047     int vector_len,     // The length of vector to be applied in encoding - for both AVX and EVEX
2048     bool rex_vex_w,     // Width of data: if 32-bits or less, false, else if 64-bit or specially defined, true
2049     bool legacy_mode,   // Details if either this instruction is conditionally encoded to AVX or earlier if true else possibly EVEX
2050     bool no_reg_mask,   // when true, k0 is used when EVEX encoding is chosen, else k1 is used under the same condition
2051     bool uses_vl)       // This instruction may have legacy constraints based on vector length for EVEX
2052     :
2053       _avx_vector_len(vector_len),
2054       _rex_vex_w(rex_vex_w),
2055       _legacy_mode(legacy_mode),
2056       _no_reg_mask(no_reg_mask),
2057       _uses_vl(uses_vl),
2058       _tuple_type(Assembler::EVEX_ETUP),
2059       _input_size_in_bits(Assembler::EVEX_NObit),
2060       _is_evex_instruction(false),
2061       _evex_encoding(0),
2062       _is_clear_context(false),
2063       _is_extended_context(false),
2064       _current_assembler(NULL) {
2065     if (UseAVX < 3) _legacy_mode = true;
2066   }
2067 
2068   ~InstructionAttr() {
2069     if (_current_assembler != NULL) {
2070       _current_assembler->clear_attributes();
2071     }


< prev index next >