899 , begin_op2
900 , lir_cmp
901 , lir_cmp_l2i
902 , lir_ucmp_fd2i
903 , lir_cmp_fd2i
904 , lir_cmove
905 , lir_add
906 , lir_sub
907 , lir_mul
908 , lir_mul_strictfp
909 , lir_div
910 , lir_div_strictfp
911 , lir_rem
912 , lir_sqrt
913 , lir_abs
914 , lir_sin
915 , lir_cos
916 , lir_tan
917 , lir_log
918 , lir_log10
919 , lir_logic_and
920 , lir_logic_or
921 , lir_logic_xor
922 , lir_shl
923 , lir_shr
924 , lir_ushr
925 , lir_alloc_array
926 , lir_throw
927 , lir_compare_to
928 , end_op2
929 , begin_op3
930 , lir_idiv
931 , lir_irem
932 , end_op3
933 , begin_opJavaCall
934 , lir_static_call
935 , lir_optvirtual_call
936 , lir_icvirtual_call
937 , lir_virtual_call
938 , lir_dynamic_call
1543 void set_should_profile(bool b) { _should_profile = b; }
1544 ciMethod* profiled_method() const { return _profiled_method; }
1545 int profiled_bci() const { return _profiled_bci; }
1546 bool should_profile() const { return _should_profile; }
1547
1548 virtual void emit_code(LIR_Assembler* masm);
1549 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return this; }
1550 void print_instr(outputStream* out) const PRODUCT_RETURN;
1551 };
1552
1553 // LIR_Op2
1554 class LIR_Op2: public LIR_Op {
1555 friend class LIR_OpVisitState;
1556
1557 int _fpu_stack_size; // for sin/cos implementation on Intel
1558
1559 protected:
1560 LIR_Opr _opr1;
1561 LIR_Opr _opr2;
1562 BasicType _type;
1563 LIR_Opr _tmp;
1564 LIR_Condition _condition;
1565
1566 void verify() const;
1567
1568 public:
1569 LIR_Op2(LIR_Code code, LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, CodeEmitInfo* info = NULL)
1570 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1571 , _opr1(opr1)
1572 , _opr2(opr2)
1573 , _type(T_ILLEGAL)
1574 , _condition(condition)
1575 , _fpu_stack_size(0)
1576 , _tmp(LIR_OprFact::illegalOpr) {
1577 assert(code == lir_cmp, "code check");
1578 }
1579
1580 LIR_Op2(LIR_Code code, LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr result, BasicType type)
1581 : LIR_Op(code, result, NULL)
1582 , _opr1(opr1)
1583 , _opr2(opr2)
1584 , _type(type)
1585 , _condition(condition)
1586 , _fpu_stack_size(0)
1587 , _tmp(LIR_OprFact::illegalOpr) {
1588 assert(code == lir_cmove, "code check");
1589 assert(type != T_ILLEGAL, "cmove should have type");
1590 }
1591
1592 LIR_Op2(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr result = LIR_OprFact::illegalOpr,
1593 CodeEmitInfo* info = NULL, BasicType type = T_ILLEGAL)
1594 : LIR_Op(code, result, info)
1595 , _opr1(opr1)
1596 , _opr2(opr2)
1597 , _type(type)
1598 , _condition(lir_cond_unknown)
1599 , _fpu_stack_size(0)
1600 , _tmp(LIR_OprFact::illegalOpr) {
1601 assert(code != lir_cmp && is_in_range(code, begin_op2, end_op2), "code check");
1602 }
1603
1604 LIR_Op2(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr result, LIR_Opr tmp)
1605 : LIR_Op(code, result, NULL)
1606 , _opr1(opr1)
1607 , _opr2(opr2)
1608 , _type(T_ILLEGAL)
1609 , _condition(lir_cond_unknown)
1610 , _fpu_stack_size(0)
1611 , _tmp(tmp) {
1612 assert(code != lir_cmp && is_in_range(code, begin_op2, end_op2), "code check");
1613 }
1614
1615 LIR_Opr in_opr1() const { return _opr1; }
1616 LIR_Opr in_opr2() const { return _opr2; }
1617 BasicType type() const { return _type; }
1618 LIR_Opr tmp_opr() const { return _tmp; }
1619 LIR_Condition condition() const {
1620 assert(code() == lir_cmp || code() == lir_cmove, "only valid for cmp and cmove"); return _condition;
1621 }
1622 void set_condition(LIR_Condition condition) {
1623 assert(code() == lir_cmp || code() == lir_cmove, "only valid for cmp and cmove"); _condition = condition;
1624 }
1625
1626 void set_fpu_stack_size(int size) { _fpu_stack_size = size; }
1627 int fpu_stack_size() const { return _fpu_stack_size; }
1628
1629 void set_in_opr1(LIR_Opr opr) { _opr1 = opr; }
1630 void set_in_opr2(LIR_Opr opr) { _opr2 = opr; }
1631
1632 virtual void emit_code(LIR_Assembler* masm);
1633 virtual LIR_Op2* as_Op2() { return this; }
1634 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1635 };
1636
1637 class LIR_OpAllocArray : public LIR_Op {
1638 friend class LIR_OpVisitState;
2008 void cmp_reg_mem(LIR_Condition condition, LIR_Opr reg, LIR_Address* addr, CodeEmitInfo* info);
2009
2010 void cmove(LIR_Condition condition, LIR_Opr src1, LIR_Opr src2, LIR_Opr dst, BasicType type) {
2011 append(new LIR_Op2(lir_cmove, condition, src1, src2, dst, type));
2012 }
2013
2014 void cas_long(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value,
2015 LIR_Opr t1, LIR_Opr t2, LIR_Opr result = LIR_OprFact::illegalOpr);
2016 void cas_obj(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value,
2017 LIR_Opr t1, LIR_Opr t2, LIR_Opr result = LIR_OprFact::illegalOpr);
2018 void cas_int(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value,
2019 LIR_Opr t1, LIR_Opr t2, LIR_Opr result = LIR_OprFact::illegalOpr);
2020
2021 void abs (LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_abs , from, tmp, to)); }
2022 void sqrt(LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_sqrt, from, tmp, to)); }
2023 void log (LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_log, from, LIR_OprFact::illegalOpr, to, tmp)); }
2024 void log10 (LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_log10, from, LIR_OprFact::illegalOpr, to, tmp)); }
2025 void sin (LIR_Opr from, LIR_Opr to, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_Op2(lir_sin , from, tmp1, to, tmp2)); }
2026 void cos (LIR_Opr from, LIR_Opr to, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_Op2(lir_cos , from, tmp1, to, tmp2)); }
2027 void tan (LIR_Opr from, LIR_Opr to, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_Op2(lir_tan , from, tmp1, to, tmp2)); }
2028
2029 void add (LIR_Opr left, LIR_Opr right, LIR_Opr res) { append(new LIR_Op2(lir_add, left, right, res)); }
2030 void sub (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = NULL) { append(new LIR_Op2(lir_sub, left, right, res, info)); }
2031 void mul (LIR_Opr left, LIR_Opr right, LIR_Opr res) { append(new LIR_Op2(lir_mul, left, right, res)); }
2032 void mul_strictfp (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_mul_strictfp, left, right, res, tmp)); }
2033 void div (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = NULL) { append(new LIR_Op2(lir_div, left, right, res, info)); }
2034 void div_strictfp (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_div_strictfp, left, right, res, tmp)); }
2035 void rem (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = NULL) { append(new LIR_Op2(lir_rem, left, right, res, info)); }
2036
2037 void volatile_load_mem_reg(LIR_Address* address, LIR_Opr dst, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2038 void volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2039
2040 void load(LIR_Address* addr, LIR_Opr src, CodeEmitInfo* info = NULL, LIR_PatchCode patch_code = lir_patch_none);
2041
2042 void prefetch(LIR_Address* addr, bool is_store);
2043
2044 void store_mem_int(jint v, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2045 void store_mem_oop(jobject o, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2046 void store(LIR_Opr src, LIR_Address* addr, CodeEmitInfo* info = NULL, LIR_PatchCode patch_code = lir_patch_none);
2047 void volatile_store_mem_reg(LIR_Opr src, LIR_Address* address, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
|
899 , begin_op2
900 , lir_cmp
901 , lir_cmp_l2i
902 , lir_ucmp_fd2i
903 , lir_cmp_fd2i
904 , lir_cmove
905 , lir_add
906 , lir_sub
907 , lir_mul
908 , lir_mul_strictfp
909 , lir_div
910 , lir_div_strictfp
911 , lir_rem
912 , lir_sqrt
913 , lir_abs
914 , lir_sin
915 , lir_cos
916 , lir_tan
917 , lir_log
918 , lir_log10
919 , lir_exp
920 , lir_pow
921 , lir_logic_and
922 , lir_logic_or
923 , lir_logic_xor
924 , lir_shl
925 , lir_shr
926 , lir_ushr
927 , lir_alloc_array
928 , lir_throw
929 , lir_compare_to
930 , end_op2
931 , begin_op3
932 , lir_idiv
933 , lir_irem
934 , end_op3
935 , begin_opJavaCall
936 , lir_static_call
937 , lir_optvirtual_call
938 , lir_icvirtual_call
939 , lir_virtual_call
940 , lir_dynamic_call
1545 void set_should_profile(bool b) { _should_profile = b; }
1546 ciMethod* profiled_method() const { return _profiled_method; }
1547 int profiled_bci() const { return _profiled_bci; }
1548 bool should_profile() const { return _should_profile; }
1549
1550 virtual void emit_code(LIR_Assembler* masm);
1551 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return this; }
1552 void print_instr(outputStream* out) const PRODUCT_RETURN;
1553 };
1554
1555 // LIR_Op2
1556 class LIR_Op2: public LIR_Op {
1557 friend class LIR_OpVisitState;
1558
1559 int _fpu_stack_size; // for sin/cos implementation on Intel
1560
1561 protected:
1562 LIR_Opr _opr1;
1563 LIR_Opr _opr2;
1564 BasicType _type;
1565 LIR_Opr _tmp1;
1566 LIR_Opr _tmp2;
1567 LIR_Opr _tmp3;
1568 LIR_Opr _tmp4;
1569 LIR_Opr _tmp5;
1570 LIR_Condition _condition;
1571
1572 void verify() const;
1573
1574 public:
1575 LIR_Op2(LIR_Code code, LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, CodeEmitInfo* info = NULL)
1576 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1577 , _opr1(opr1)
1578 , _opr2(opr2)
1579 , _type(T_ILLEGAL)
1580 , _condition(condition)
1581 , _fpu_stack_size(0)
1582 , _tmp1(LIR_OprFact::illegalOpr)
1583 , _tmp2(LIR_OprFact::illegalOpr)
1584 , _tmp3(LIR_OprFact::illegalOpr)
1585 , _tmp4(LIR_OprFact::illegalOpr)
1586 , _tmp5(LIR_OprFact::illegalOpr) {
1587 assert(code == lir_cmp, "code check");
1588 }
1589
1590 LIR_Op2(LIR_Code code, LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr result, BasicType type)
1591 : LIR_Op(code, result, NULL)
1592 , _opr1(opr1)
1593 , _opr2(opr2)
1594 , _type(type)
1595 , _condition(condition)
1596 , _fpu_stack_size(0)
1597 , _tmp1(LIR_OprFact::illegalOpr)
1598 , _tmp2(LIR_OprFact::illegalOpr)
1599 , _tmp3(LIR_OprFact::illegalOpr)
1600 , _tmp4(LIR_OprFact::illegalOpr)
1601 , _tmp5(LIR_OprFact::illegalOpr) {
1602 assert(code == lir_cmove, "code check");
1603 assert(type != T_ILLEGAL, "cmove should have type");
1604 }
1605
1606 LIR_Op2(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr result = LIR_OprFact::illegalOpr,
1607 CodeEmitInfo* info = NULL, BasicType type = T_ILLEGAL)
1608 : LIR_Op(code, result, info)
1609 , _opr1(opr1)
1610 , _opr2(opr2)
1611 , _type(type)
1612 , _condition(lir_cond_unknown)
1613 , _fpu_stack_size(0)
1614 , _tmp1(LIR_OprFact::illegalOpr)
1615 , _tmp2(LIR_OprFact::illegalOpr)
1616 , _tmp3(LIR_OprFact::illegalOpr)
1617 , _tmp4(LIR_OprFact::illegalOpr)
1618 , _tmp5(LIR_OprFact::illegalOpr) {
1619 assert(code != lir_cmp && is_in_range(code, begin_op2, end_op2), "code check");
1620 }
1621
1622 LIR_Op2(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr result, LIR_Opr tmp1, LIR_Opr tmp2 = LIR_OprFact::illegalOpr,
1623 LIR_Opr tmp3 = LIR_OprFact::illegalOpr, LIR_Opr tmp4 = LIR_OprFact::illegalOpr, LIR_Opr tmp5 = LIR_OprFact::illegalOpr)
1624 : LIR_Op(code, result, NULL)
1625 , _opr1(opr1)
1626 , _opr2(opr2)
1627 , _type(T_ILLEGAL)
1628 , _condition(lir_cond_unknown)
1629 , _fpu_stack_size(0)
1630 , _tmp1(tmp1)
1631 , _tmp2(tmp2)
1632 , _tmp3(tmp3)
1633 , _tmp4(tmp4)
1634 , _tmp5(tmp5) {
1635 assert(code != lir_cmp && is_in_range(code, begin_op2, end_op2), "code check");
1636 }
1637
1638 LIR_Opr in_opr1() const { return _opr1; }
1639 LIR_Opr in_opr2() const { return _opr2; }
1640 BasicType type() const { return _type; }
1641 LIR_Opr tmp1_opr() const { return _tmp1; }
1642 LIR_Opr tmp2_opr() const { return _tmp2; }
1643 LIR_Opr tmp3_opr() const { return _tmp3; }
1644 LIR_Opr tmp4_opr() const { return _tmp4; }
1645 LIR_Opr tmp5_opr() const { return _tmp5; }
1646 LIR_Condition condition() const {
1647 assert(code() == lir_cmp || code() == lir_cmove, "only valid for cmp and cmove"); return _condition;
1648 }
1649 void set_condition(LIR_Condition condition) {
1650 assert(code() == lir_cmp || code() == lir_cmove, "only valid for cmp and cmove"); _condition = condition;
1651 }
1652
1653 void set_fpu_stack_size(int size) { _fpu_stack_size = size; }
1654 int fpu_stack_size() const { return _fpu_stack_size; }
1655
1656 void set_in_opr1(LIR_Opr opr) { _opr1 = opr; }
1657 void set_in_opr2(LIR_Opr opr) { _opr2 = opr; }
1658
1659 virtual void emit_code(LIR_Assembler* masm);
1660 virtual LIR_Op2* as_Op2() { return this; }
1661 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1662 };
1663
1664 class LIR_OpAllocArray : public LIR_Op {
1665 friend class LIR_OpVisitState;
2035 void cmp_reg_mem(LIR_Condition condition, LIR_Opr reg, LIR_Address* addr, CodeEmitInfo* info);
2036
2037 void cmove(LIR_Condition condition, LIR_Opr src1, LIR_Opr src2, LIR_Opr dst, BasicType type) {
2038 append(new LIR_Op2(lir_cmove, condition, src1, src2, dst, type));
2039 }
2040
2041 void cas_long(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value,
2042 LIR_Opr t1, LIR_Opr t2, LIR_Opr result = LIR_OprFact::illegalOpr);
2043 void cas_obj(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value,
2044 LIR_Opr t1, LIR_Opr t2, LIR_Opr result = LIR_OprFact::illegalOpr);
2045 void cas_int(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value,
2046 LIR_Opr t1, LIR_Opr t2, LIR_Opr result = LIR_OprFact::illegalOpr);
2047
2048 void abs (LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_abs , from, tmp, to)); }
2049 void sqrt(LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_sqrt, from, tmp, to)); }
2050 void log (LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_log, from, LIR_OprFact::illegalOpr, to, tmp)); }
2051 void log10 (LIR_Opr from, LIR_Opr to, LIR_Opr tmp) { append(new LIR_Op2(lir_log10, from, LIR_OprFact::illegalOpr, to, tmp)); }
2052 void sin (LIR_Opr from, LIR_Opr to, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_Op2(lir_sin , from, tmp1, to, tmp2)); }
2053 void cos (LIR_Opr from, LIR_Opr to, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_Op2(lir_cos , from, tmp1, to, tmp2)); }
2054 void tan (LIR_Opr from, LIR_Opr to, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_Op2(lir_tan , from, tmp1, to, tmp2)); }
2055 void exp (LIR_Opr from, LIR_Opr to, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, LIR_Opr tmp4, LIR_Opr tmp5) { append(new LIR_Op2(lir_exp , from, tmp1, to, tmp2, tmp3, tmp4, tmp5)); }
2056 void pow (LIR_Opr arg1, LIR_Opr arg2, LIR_Opr res, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, LIR_Opr tmp4, LIR_Opr tmp5) { append(new LIR_Op2(lir_pow, arg1, arg2, res, tmp1, tmp2, tmp3, tmp4, tmp5)); }
2057
2058 void add (LIR_Opr left, LIR_Opr right, LIR_Opr res) { append(new LIR_Op2(lir_add, left, right, res)); }
2059 void sub (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = NULL) { append(new LIR_Op2(lir_sub, left, right, res, info)); }
2060 void mul (LIR_Opr left, LIR_Opr right, LIR_Opr res) { append(new LIR_Op2(lir_mul, left, right, res)); }
2061 void mul_strictfp (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_mul_strictfp, left, right, res, tmp)); }
2062 void div (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = NULL) { append(new LIR_Op2(lir_div, left, right, res, info)); }
2063 void div_strictfp (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_div_strictfp, left, right, res, tmp)); }
2064 void rem (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = NULL) { append(new LIR_Op2(lir_rem, left, right, res, info)); }
2065
2066 void volatile_load_mem_reg(LIR_Address* address, LIR_Opr dst, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2067 void volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2068
2069 void load(LIR_Address* addr, LIR_Opr src, CodeEmitInfo* info = NULL, LIR_PatchCode patch_code = lir_patch_none);
2070
2071 void prefetch(LIR_Address* addr, bool is_store);
2072
2073 void store_mem_int(jint v, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2074 void store_mem_oop(jobject o, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2075 void store(LIR_Opr src, LIR_Address* addr, CodeEmitInfo* info = NULL, LIR_PatchCode patch_code = lir_patch_none);
2076 void volatile_store_mem_reg(LIR_Opr src, LIR_Address* address, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
|