< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.hpp

Print this page
rev 57232 : v2.00 -> v2.08 (CR8/v2.08/11-for-jdk14) patches combined into one; merge with jdk-14+25 snapshot; merge with jdk-14+26 snapshot.


 665 
 666   // Biased locking support
 667   // lock_reg and obj_reg must be loaded up with the appropriate values.
 668   // swap_reg must be rax, and is killed.
 669   // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
 670   // be killed; if not supplied, push/pop will be used internally to
 671   // allocate a temporary (inefficient, avoid if possible).
 672   // Optional slow case is for implementations (interpreter and C1) which branch to
 673   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 674   // Returns offset of first potentially-faulting instruction for null
 675   // check info (currently consumed only by C1). If
 676   // swap_reg_contains_mark is true then returns -1 as it is assumed
 677   // the calling code has already passed any potential faults.
 678   int biased_locking_enter(Register lock_reg, Register obj_reg,
 679                            Register swap_reg, Register tmp_reg,
 680                            bool swap_reg_contains_mark,
 681                            Label& done, Label* slow_case = NULL,
 682                            BiasedLockingCounters* counters = NULL);
 683   void biased_locking_exit (Register obj_reg, Register temp_reg, Label& done);
 684 #ifdef COMPILER2

 685   // Code used by cmpFastLock and cmpFastUnlock mach instructions in .ad file.
 686   // See full desription in macroAssembler_x86.cpp.
 687   void fast_lock(Register obj, Register box, Register tmp,
 688                  Register scr, Register cx1, Register cx2,
 689                  BiasedLockingCounters* counters,
 690                  RTMLockingCounters* rtm_counters,
 691                  RTMLockingCounters* stack_rtm_counters,
 692                  Metadata* method_data,
 693                  bool use_rtm, bool profile_rtm);
 694   void fast_unlock(Register obj, Register box, Register tmp, bool use_rtm);
 695 #if INCLUDE_RTM_OPT
 696   void rtm_counters_update(Register abort_status, Register rtm_counters);
 697   void branch_on_random_using_rdtsc(Register tmp, Register scr, int count, Label& brLabel);
 698   void rtm_abort_ratio_calculation(Register tmp, Register rtm_counters_reg,
 699                                    RTMLockingCounters* rtm_counters,
 700                                    Metadata* method_data);
 701   void rtm_profiling(Register abort_status_Reg, Register rtm_counters_Reg,
 702                      RTMLockingCounters* rtm_counters, Metadata* method_data, bool profile_rtm);
 703   void rtm_retry_lock_on_abort(Register retry_count, Register abort_status, Label& retryLabel);
 704   void rtm_retry_lock_on_busy(Register retry_count, Register box, Register tmp, Register scr, Label& retryLabel);




 665 
 666   // Biased locking support
 667   // lock_reg and obj_reg must be loaded up with the appropriate values.
 668   // swap_reg must be rax, and is killed.
 669   // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
 670   // be killed; if not supplied, push/pop will be used internally to
 671   // allocate a temporary (inefficient, avoid if possible).
 672   // Optional slow case is for implementations (interpreter and C1) which branch to
 673   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 674   // Returns offset of first potentially-faulting instruction for null
 675   // check info (currently consumed only by C1). If
 676   // swap_reg_contains_mark is true then returns -1 as it is assumed
 677   // the calling code has already passed any potential faults.
 678   int biased_locking_enter(Register lock_reg, Register obj_reg,
 679                            Register swap_reg, Register tmp_reg,
 680                            bool swap_reg_contains_mark,
 681                            Label& done, Label* slow_case = NULL,
 682                            BiasedLockingCounters* counters = NULL);
 683   void biased_locking_exit (Register obj_reg, Register temp_reg, Label& done);
 684 #ifdef COMPILER2
 685   void inc_om_ref_count(Register obj_reg, Register om_reg, Register temp_reg, Label& done);
 686   // Code used by cmpFastLock and cmpFastUnlock mach instructions in .ad file.
 687   // See full desription in macroAssembler_x86.cpp.
 688   void fast_lock(Register obj, Register box, Register tmp,
 689                  Register scr, Register cx1, Register cx2,
 690                  BiasedLockingCounters* counters,
 691                  RTMLockingCounters* rtm_counters,
 692                  RTMLockingCounters* stack_rtm_counters,
 693                  Metadata* method_data,
 694                  bool use_rtm, bool profile_rtm);
 695   void fast_unlock(Register obj, Register box, Register tmp, bool use_rtm);
 696 #if INCLUDE_RTM_OPT
 697   void rtm_counters_update(Register abort_status, Register rtm_counters);
 698   void branch_on_random_using_rdtsc(Register tmp, Register scr, int count, Label& brLabel);
 699   void rtm_abort_ratio_calculation(Register tmp, Register rtm_counters_reg,
 700                                    RTMLockingCounters* rtm_counters,
 701                                    Metadata* method_data);
 702   void rtm_profiling(Register abort_status_Reg, Register rtm_counters_Reg,
 703                      RTMLockingCounters* rtm_counters, Metadata* method_data, bool profile_rtm);
 704   void rtm_retry_lock_on_abort(Register retry_count, Register abort_status, Label& retryLabel);
 705   void rtm_retry_lock_on_busy(Register retry_count, Register box, Register tmp, Register scr, Label& retryLabel);


< prev index next >