963 , lir_cond_greater
964 , lir_cond_belowEqual
965 , lir_cond_aboveEqual
966 , lir_cond_always
967 , lir_cond_unknown = -1
968 };
969
970
971 enum LIR_PatchCode {
972 lir_patch_none,
973 lir_patch_low,
974 lir_patch_high,
975 lir_patch_normal
976 };
977
978
979 enum LIR_MoveKind {
980 lir_move_normal,
981 lir_move_volatile,
982 lir_move_unaligned,
983 lir_move_max_flag
984 };
985
986
987 // --------------------------------------------------
988 // LIR_Op
989 // --------------------------------------------------
990 class LIR_Op: public CompilationResourceObj {
991 friend class LIR_OpVisitState;
992
993 #ifdef ASSERT
994 private:
995 const char * _file;
996 int _line;
997 #endif
998
999 protected:
1000 LIR_Opr _result;
1001 unsigned short _code;
1002 unsigned short _flags;
1910 void nop() { append(new LIR_Op0(lir_nop)); }
1911 void build_frame() { append(new LIR_Op0(lir_build_frame)); }
1912
1913 void std_entry(LIR_Opr receiver) { append(new LIR_Op0(lir_std_entry, receiver)); }
1914 void osr_entry(LIR_Opr osrPointer) { append(new LIR_Op0(lir_osr_entry, osrPointer)); }
1915
1916 void branch_destination(Label* lbl) { append(new LIR_OpLabel(lbl)); }
1917
1918 void negate(LIR_Opr from, LIR_Opr to) { append(new LIR_Op1(lir_neg, from, to)); }
1919 void leal(LIR_Opr from, LIR_Opr result_reg) { append(new LIR_Op1(lir_leal, from, result_reg)); }
1920
1921 // result is a stack location for old backend and vreg for UseLinearScan
1922 // stack_loc_temp is an illegal register for old backend
1923 void roundfp(LIR_Opr reg, LIR_Opr stack_loc_temp, LIR_Opr result) { append(new LIR_OpRoundFP(reg, stack_loc_temp, result)); }
1924 void unaligned_move(LIR_Address* src, LIR_Opr dst) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
1925 void unaligned_move(LIR_Opr src, LIR_Address* dst) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), src->type(), lir_patch_none, NULL, lir_move_unaligned)); }
1926 void unaligned_move(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
1927 void move(LIR_Opr src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, info)); }
1928 void move(LIR_Address* src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, src->type(), lir_patch_none, info)); }
1929 void move(LIR_Opr src, LIR_Address* dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), dst->type(), lir_patch_none, info)); }
1930
1931 void volatile_move(LIR_Opr src, LIR_Opr dst, BasicType type, CodeEmitInfo* info = NULL, LIR_PatchCode patch_code = lir_patch_none) { append(new LIR_Op1(lir_move, src, dst, type, patch_code, info, lir_move_volatile)); }
1932
1933 void oop2reg (jobject o, LIR_Opr reg) { append(new LIR_Op1(lir_move, LIR_OprFact::oopConst(o), reg)); }
1934 void oop2reg_patch(jobject o, LIR_Opr reg, CodeEmitInfo* info);
1935
1936 void return_op(LIR_Opr result) { append(new LIR_Op1(lir_return, result)); }
1937
1938 void safepoint(LIR_Opr tmp, CodeEmitInfo* info) { append(new LIR_Op1(lir_safepoint, tmp, info)); }
1939
1940 #ifdef PPC
1941 void convert(Bytecodes::Code code, LIR_Opr left, LIR_Opr dst, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_OpConvert(code, left, dst, NULL, tmp1, tmp2)); }
1942 #endif
1943 void convert(Bytecodes::Code code, LIR_Opr left, LIR_Opr dst, ConversionStub* stub = NULL/*, bool is_32bit = false*/) { append(new LIR_OpConvert(code, left, dst, stub)); }
1944
1945 void logical_and (LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_logic_and, left, right, dst)); }
1946 void logical_or (LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_logic_or, left, right, dst)); }
1947 void logical_xor (LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_logic_xor, left, right, dst)); }
1948
1949 void pack64(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_pack64, src, dst, T_LONG, lir_patch_none, NULL)); }
1950 void unpack64(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_unpack64, src, dst, T_LONG, lir_patch_none, NULL)); }
|
963 , lir_cond_greater
964 , lir_cond_belowEqual
965 , lir_cond_aboveEqual
966 , lir_cond_always
967 , lir_cond_unknown = -1
968 };
969
970
971 enum LIR_PatchCode {
972 lir_patch_none,
973 lir_patch_low,
974 lir_patch_high,
975 lir_patch_normal
976 };
977
978
979 enum LIR_MoveKind {
980 lir_move_normal,
981 lir_move_volatile,
982 lir_move_unaligned,
983 lir_move_wide,
984 lir_move_max_flag
985 };
986
987
988 // --------------------------------------------------
989 // LIR_Op
990 // --------------------------------------------------
991 class LIR_Op: public CompilationResourceObj {
992 friend class LIR_OpVisitState;
993
994 #ifdef ASSERT
995 private:
996 const char * _file;
997 int _line;
998 #endif
999
1000 protected:
1001 LIR_Opr _result;
1002 unsigned short _code;
1003 unsigned short _flags;
1911 void nop() { append(new LIR_Op0(lir_nop)); }
1912 void build_frame() { append(new LIR_Op0(lir_build_frame)); }
1913
1914 void std_entry(LIR_Opr receiver) { append(new LIR_Op0(lir_std_entry, receiver)); }
1915 void osr_entry(LIR_Opr osrPointer) { append(new LIR_Op0(lir_osr_entry, osrPointer)); }
1916
1917 void branch_destination(Label* lbl) { append(new LIR_OpLabel(lbl)); }
1918
1919 void negate(LIR_Opr from, LIR_Opr to) { append(new LIR_Op1(lir_neg, from, to)); }
1920 void leal(LIR_Opr from, LIR_Opr result_reg) { append(new LIR_Op1(lir_leal, from, result_reg)); }
1921
1922 // result is a stack location for old backend and vreg for UseLinearScan
1923 // stack_loc_temp is an illegal register for old backend
1924 void roundfp(LIR_Opr reg, LIR_Opr stack_loc_temp, LIR_Opr result) { append(new LIR_OpRoundFP(reg, stack_loc_temp, result)); }
1925 void unaligned_move(LIR_Address* src, LIR_Opr dst) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
1926 void unaligned_move(LIR_Opr src, LIR_Address* dst) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), src->type(), lir_patch_none, NULL, lir_move_unaligned)); }
1927 void unaligned_move(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
1928 void move(LIR_Opr src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, info)); }
1929 void move(LIR_Address* src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, src->type(), lir_patch_none, info)); }
1930 void move(LIR_Opr src, LIR_Address* dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), dst->type(), lir_patch_none, info)); }
1931 void move_wide(LIR_Address* src, LIR_Opr dst, CodeEmitInfo* info = NULL) {
1932 #ifdef _LP64
1933 if (UseCompressedOops) {
1934 append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, src->type(), lir_patch_none, info, lir_move_wide));
1935 } else
1936 #endif
1937 move(src, dst, info);
1938 }
1939 void move_wide(LIR_Opr src, LIR_Address* dst, CodeEmitInfo* info = NULL) {
1940 #ifdef _LP64
1941 if (UseCompressedOops) {
1942 append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), dst->type(), lir_patch_none, info, lir_move_wide));
1943 } else
1944 #endif
1945 move(src, dst, info);
1946 }
1947 void volatile_move(LIR_Opr src, LIR_Opr dst, BasicType type, CodeEmitInfo* info = NULL, LIR_PatchCode patch_code = lir_patch_none) { append(new LIR_Op1(lir_move, src, dst, type, patch_code, info, lir_move_volatile)); }
1948
1949 void oop2reg (jobject o, LIR_Opr reg) { append(new LIR_Op1(lir_move, LIR_OprFact::oopConst(o), reg)); }
1950 void oop2reg_patch(jobject o, LIR_Opr reg, CodeEmitInfo* info);
1951
1952 void return_op(LIR_Opr result) { append(new LIR_Op1(lir_return, result)); }
1953
1954 void safepoint(LIR_Opr tmp, CodeEmitInfo* info) { append(new LIR_Op1(lir_safepoint, tmp, info)); }
1955
1956 #ifdef PPC
1957 void convert(Bytecodes::Code code, LIR_Opr left, LIR_Opr dst, LIR_Opr tmp1, LIR_Opr tmp2) { append(new LIR_OpConvert(code, left, dst, NULL, tmp1, tmp2)); }
1958 #endif
1959 void convert(Bytecodes::Code code, LIR_Opr left, LIR_Opr dst, ConversionStub* stub = NULL/*, bool is_32bit = false*/) { append(new LIR_OpConvert(code, left, dst, stub)); }
1960
1961 void logical_and (LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_logic_and, left, right, dst)); }
1962 void logical_or (LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_logic_or, left, right, dst)); }
1963 void logical_xor (LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_logic_xor, left, right, dst)); }
1964
1965 void pack64(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_pack64, src, dst, T_LONG, lir_patch_none, NULL)); }
1966 void unpack64(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_unpack64, src, dst, T_LONG, lir_patch_none, NULL)); }
|