src/share/vm/c1/c1_LIR.hpp

Print this page
rev 10271 : 8147844: new method j.l.Runtime.onSpinWait() and the corresponding x86 hotspot instrinsic
Summary: adds c1 & c2 x86 intrinsics for j.l.Runtime.onSpinWait() that utilize the PAUSE instruction
Reviewed-by: iveresov, kvn, vlivanov
Contributed-by: Ivan Krylov <ivan@azul.com>, Yuri Gaevsky <ygaevsky@azul.com>


 903       , lir_word_align
 904       , lir_label
 905       , lir_nop
 906       , lir_backwardbranch_target
 907       , lir_std_entry
 908       , lir_osr_entry
 909       , lir_build_frame
 910       , lir_fpop_raw
 911       , lir_24bit_FPU
 912       , lir_reset_FPU
 913       , lir_breakpoint
 914       , lir_rtcall
 915       , lir_membar
 916       , lir_membar_acquire
 917       , lir_membar_release
 918       , lir_membar_loadload
 919       , lir_membar_storestore
 920       , lir_membar_loadstore
 921       , lir_membar_storeload
 922       , lir_get_thread

 923   , end_op0
 924   , begin_op1
 925       , lir_fxch
 926       , lir_fld
 927       , lir_ffree
 928       , lir_push
 929       , lir_pop
 930       , lir_null_check
 931       , lir_return
 932       , lir_leal
 933       , lir_neg
 934       , lir_branch
 935       , lir_cond_float_branch
 936       , lir_move
 937       , lir_convert
 938       , lir_alloc_object
 939       , lir_monaddr
 940       , lir_roundfp
 941       , lir_safepoint
 942       , lir_pack64


2084                     address dest, LIR_OprList* arguments, CodeEmitInfo* info) {
2085     append(new LIR_OpJavaCall(lir_dynamic_call, method, receiver, result, dest, arguments, info));
2086   }
2087 
2088   void get_thread(LIR_Opr result)                { append(new LIR_Op0(lir_get_thread, result)); }
2089   void word_align()                              { append(new LIR_Op0(lir_word_align)); }
2090   void membar()                                  { append(new LIR_Op0(lir_membar)); }
2091   void membar_acquire()                          { append(new LIR_Op0(lir_membar_acquire)); }
2092   void membar_release()                          { append(new LIR_Op0(lir_membar_release)); }
2093   void membar_loadload()                         { append(new LIR_Op0(lir_membar_loadload)); }
2094   void membar_storestore()                       { append(new LIR_Op0(lir_membar_storestore)); }
2095   void membar_loadstore()                        { append(new LIR_Op0(lir_membar_loadstore)); }
2096   void membar_storeload()                        { append(new LIR_Op0(lir_membar_storeload)); }
2097 
2098   void nop()                                     { append(new LIR_Op0(lir_nop)); }
2099   void build_frame()                             { append(new LIR_Op0(lir_build_frame)); }
2100 
2101   void std_entry(LIR_Opr receiver)               { append(new LIR_Op0(lir_std_entry, receiver)); }
2102   void osr_entry(LIR_Opr osrPointer)             { append(new LIR_Op0(lir_osr_entry, osrPointer)); }
2103 


2104   void branch_destination(Label* lbl)            { append(new LIR_OpLabel(lbl)); }
2105 
2106   void negate(LIR_Opr from, LIR_Opr to)          { append(new LIR_Op1(lir_neg, from, to)); }
2107   void leal(LIR_Opr from, LIR_Opr result_reg)    { append(new LIR_Op1(lir_leal, from, result_reg)); }
2108 
2109   // result is a stack location for old backend and vreg for UseLinearScan
2110   // stack_loc_temp is an illegal register for old backend
2111   void roundfp(LIR_Opr reg, LIR_Opr stack_loc_temp, LIR_Opr result) { append(new LIR_OpRoundFP(reg, stack_loc_temp, result)); }
2112   void unaligned_move(LIR_Address* src, LIR_Opr dst) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
2113   void unaligned_move(LIR_Opr src, LIR_Address* dst) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), src->type(), lir_patch_none, NULL, lir_move_unaligned)); }
2114   void unaligned_move(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
2115   void move(LIR_Opr src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, info)); }
2116   void move(LIR_Address* src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, src->type(), lir_patch_none, info)); }
2117   void move(LIR_Opr src, LIR_Address* dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), dst->type(), lir_patch_none, info)); }
2118   void move_wide(LIR_Address* src, LIR_Opr dst, CodeEmitInfo* info = NULL) {
2119     if (UseCompressedOops) {
2120       append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, src->type(), lir_patch_none, info, lir_move_wide));
2121     } else {
2122       move(src, dst, info);
2123     }




 903       , lir_word_align
 904       , lir_label
 905       , lir_nop
 906       , lir_backwardbranch_target
 907       , lir_std_entry
 908       , lir_osr_entry
 909       , lir_build_frame
 910       , lir_fpop_raw
 911       , lir_24bit_FPU
 912       , lir_reset_FPU
 913       , lir_breakpoint
 914       , lir_rtcall
 915       , lir_membar
 916       , lir_membar_acquire
 917       , lir_membar_release
 918       , lir_membar_loadload
 919       , lir_membar_storestore
 920       , lir_membar_loadstore
 921       , lir_membar_storeload
 922       , lir_get_thread
 923       , lir_on_spin_wait
 924   , end_op0
 925   , begin_op1
 926       , lir_fxch
 927       , lir_fld
 928       , lir_ffree
 929       , lir_push
 930       , lir_pop
 931       , lir_null_check
 932       , lir_return
 933       , lir_leal
 934       , lir_neg
 935       , lir_branch
 936       , lir_cond_float_branch
 937       , lir_move
 938       , lir_convert
 939       , lir_alloc_object
 940       , lir_monaddr
 941       , lir_roundfp
 942       , lir_safepoint
 943       , lir_pack64


2085                     address dest, LIR_OprList* arguments, CodeEmitInfo* info) {
2086     append(new LIR_OpJavaCall(lir_dynamic_call, method, receiver, result, dest, arguments, info));
2087   }
2088 
2089   void get_thread(LIR_Opr result)                { append(new LIR_Op0(lir_get_thread, result)); }
2090   void word_align()                              { append(new LIR_Op0(lir_word_align)); }
2091   void membar()                                  { append(new LIR_Op0(lir_membar)); }
2092   void membar_acquire()                          { append(new LIR_Op0(lir_membar_acquire)); }
2093   void membar_release()                          { append(new LIR_Op0(lir_membar_release)); }
2094   void membar_loadload()                         { append(new LIR_Op0(lir_membar_loadload)); }
2095   void membar_storestore()                       { append(new LIR_Op0(lir_membar_storestore)); }
2096   void membar_loadstore()                        { append(new LIR_Op0(lir_membar_loadstore)); }
2097   void membar_storeload()                        { append(new LIR_Op0(lir_membar_storeload)); }
2098 
2099   void nop()                                     { append(new LIR_Op0(lir_nop)); }
2100   void build_frame()                             { append(new LIR_Op0(lir_build_frame)); }
2101 
2102   void std_entry(LIR_Opr receiver)               { append(new LIR_Op0(lir_std_entry, receiver)); }
2103   void osr_entry(LIR_Opr osrPointer)             { append(new LIR_Op0(lir_osr_entry, osrPointer)); }
2104 
2105   void on_spin_wait()                            { append(new LIR_Op0(lir_on_spin_wait)); }
2106 
2107   void branch_destination(Label* lbl)            { append(new LIR_OpLabel(lbl)); }
2108 
2109   void negate(LIR_Opr from, LIR_Opr to)          { append(new LIR_Op1(lir_neg, from, to)); }
2110   void leal(LIR_Opr from, LIR_Opr result_reg)    { append(new LIR_Op1(lir_leal, from, result_reg)); }
2111 
2112   // result is a stack location for old backend and vreg for UseLinearScan
2113   // stack_loc_temp is an illegal register for old backend
2114   void roundfp(LIR_Opr reg, LIR_Opr stack_loc_temp, LIR_Opr result) { append(new LIR_OpRoundFP(reg, stack_loc_temp, result)); }
2115   void unaligned_move(LIR_Address* src, LIR_Opr dst) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
2116   void unaligned_move(LIR_Opr src, LIR_Address* dst) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), src->type(), lir_patch_none, NULL, lir_move_unaligned)); }
2117   void unaligned_move(LIR_Opr src, LIR_Opr dst) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, NULL, lir_move_unaligned)); }
2118   void move(LIR_Opr src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, dst, dst->type(), lir_patch_none, info)); }
2119   void move(LIR_Address* src, LIR_Opr dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, src->type(), lir_patch_none, info)); }
2120   void move(LIR_Opr src, LIR_Address* dst, CodeEmitInfo* info = NULL) { append(new LIR_Op1(lir_move, src, LIR_OprFact::address(dst), dst->type(), lir_patch_none, info)); }
2121   void move_wide(LIR_Address* src, LIR_Opr dst, CodeEmitInfo* info = NULL) {
2122     if (UseCompressedOops) {
2123       append(new LIR_Op1(lir_move, LIR_OprFact::address(src), dst, src->type(), lir_patch_none, info, lir_move_wide));
2124     } else {
2125       move(src, dst, info);
2126     }