< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.hpp

Print this page
rev 58110 : v2.09a with 8235795, 8235931 and 8236035 extracted; rebased to jdk-14+28; merge with 8236035.patch.cr1; merge with 8235795.patch.cr1; merge with 8236035.patch.cr2; merge with 8235795.patch.cr2; merge with 8235795.patch.cr3.
rev 58111 : See CR9-to-CR10-changes; merge with jdk-15+11.


 663 
 664   // Biased locking support
 665   // lock_reg and obj_reg must be loaded up with the appropriate values.
 666   // swap_reg must be rax, and is killed.
 667   // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
 668   // be killed; if not supplied, push/pop will be used internally to
 669   // allocate a temporary (inefficient, avoid if possible).
 670   // Optional slow case is for implementations (interpreter and C1) which branch to
 671   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 672   // Returns offset of first potentially-faulting instruction for null
 673   // check info (currently consumed only by C1). If
 674   // swap_reg_contains_mark is true then returns -1 as it is assumed
 675   // the calling code has already passed any potential faults.
 676   int biased_locking_enter(Register lock_reg, Register obj_reg,
 677                            Register swap_reg, Register tmp_reg,
 678                            bool swap_reg_contains_mark,
 679                            Label& done, Label* slow_case = NULL,
 680                            BiasedLockingCounters* counters = NULL);
 681   void biased_locking_exit (Register obj_reg, Register temp_reg, Label& done);
 682 #ifdef COMPILER2
 683   void inc_om_ref_count(Register obj_reg, Register om_reg, Register temp_reg, Label& done);
 684   // Code used by cmpFastLock and cmpFastUnlock mach instructions in .ad file.
 685   // See full desription in macroAssembler_x86.cpp.
 686   void fast_lock(Register obj, Register box, Register tmp,
 687                  Register scr, Register cx1, Register cx2,
 688                  BiasedLockingCounters* counters,
 689                  RTMLockingCounters* rtm_counters,
 690                  RTMLockingCounters* stack_rtm_counters,
 691                  Metadata* method_data,
 692                  bool use_rtm, bool profile_rtm);
 693   void fast_unlock(Register obj, Register box, Register tmp, bool use_rtm);
 694 #if INCLUDE_RTM_OPT
 695   void rtm_counters_update(Register abort_status, Register rtm_counters);
 696   void branch_on_random_using_rdtsc(Register tmp, Register scr, int count, Label& brLabel);
 697   void rtm_abort_ratio_calculation(Register tmp, Register rtm_counters_reg,
 698                                    RTMLockingCounters* rtm_counters,
 699                                    Metadata* method_data);
 700   void rtm_profiling(Register abort_status_Reg, Register rtm_counters_Reg,
 701                      RTMLockingCounters* rtm_counters, Metadata* method_data, bool profile_rtm);
 702   void rtm_retry_lock_on_abort(Register retry_count, Register abort_status, Label& retryLabel);
 703   void rtm_retry_lock_on_busy(Register retry_count, Register box, Register tmp, Register scr, Label& retryLabel);




 663 
 664   // Biased locking support
 665   // lock_reg and obj_reg must be loaded up with the appropriate values.
 666   // swap_reg must be rax, and is killed.
 667   // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
 668   // be killed; if not supplied, push/pop will be used internally to
 669   // allocate a temporary (inefficient, avoid if possible).
 670   // Optional slow case is for implementations (interpreter and C1) which branch to
 671   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 672   // Returns offset of first potentially-faulting instruction for null
 673   // check info (currently consumed only by C1). If
 674   // swap_reg_contains_mark is true then returns -1 as it is assumed
 675   // the calling code has already passed any potential faults.
 676   int biased_locking_enter(Register lock_reg, Register obj_reg,
 677                            Register swap_reg, Register tmp_reg,
 678                            bool swap_reg_contains_mark,
 679                            Label& done, Label* slow_case = NULL,
 680                            BiasedLockingCounters* counters = NULL);
 681   void biased_locking_exit (Register obj_reg, Register temp_reg, Label& done);
 682 #ifdef COMPILER2

 683   // Code used by cmpFastLock and cmpFastUnlock mach instructions in .ad file.
 684   // See full desription in macroAssembler_x86.cpp.
 685   void fast_lock(Register obj, Register box, Register tmp,
 686                  Register scr, Register cx1, Register cx2,
 687                  BiasedLockingCounters* counters,
 688                  RTMLockingCounters* rtm_counters,
 689                  RTMLockingCounters* stack_rtm_counters,
 690                  Metadata* method_data,
 691                  bool use_rtm, bool profile_rtm);
 692   void fast_unlock(Register obj, Register box, Register tmp, bool use_rtm);
 693 #if INCLUDE_RTM_OPT
 694   void rtm_counters_update(Register abort_status, Register rtm_counters);
 695   void branch_on_random_using_rdtsc(Register tmp, Register scr, int count, Label& brLabel);
 696   void rtm_abort_ratio_calculation(Register tmp, Register rtm_counters_reg,
 697                                    RTMLockingCounters* rtm_counters,
 698                                    Metadata* method_data);
 699   void rtm_profiling(Register abort_status_Reg, Register rtm_counters_Reg,
 700                      RTMLockingCounters* rtm_counters, Metadata* method_data, bool profile_rtm);
 701   void rtm_retry_lock_on_abort(Register retry_count, Register abort_status, Label& retryLabel);
 702   void rtm_retry_lock_on_busy(Register retry_count, Register box, Register tmp, Register scr, Label& retryLabel);


< prev index next >