664
665 // Biased locking support
666 // lock_reg and obj_reg must be loaded up with the appropriate values.
667 // swap_reg must be rax, and is killed.
668 // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
669 // be killed; if not supplied, push/pop will be used internally to
670 // allocate a temporary (inefficient, avoid if possible).
671 // Optional slow case is for implementations (interpreter and C1) which branch to
672 // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
673 // Returns offset of first potentially-faulting instruction for null
674 // check info (currently consumed only by C1). If
675 // swap_reg_contains_mark is true then returns -1 as it is assumed
676 // the calling code has already passed any potential faults.
677 int biased_locking_enter(Register lock_reg, Register obj_reg,
678 Register swap_reg, Register tmp_reg,
679 bool swap_reg_contains_mark,
680 Label& done, Label* slow_case = NULL,
681 BiasedLockingCounters* counters = NULL);
682 void biased_locking_exit (Register obj_reg, Register temp_reg, Label& done);
683 #ifdef COMPILER2
684 // Code used by cmpFastLock and cmpFastUnlock mach instructions in .ad file.
685 // See full desription in macroAssembler_x86.cpp.
686 void fast_lock(Register obj, Register box, Register tmp,
687 Register scr, Register cx1, Register cx2,
688 BiasedLockingCounters* counters,
689 RTMLockingCounters* rtm_counters,
690 RTMLockingCounters* stack_rtm_counters,
691 Metadata* method_data,
692 bool use_rtm, bool profile_rtm);
693 void fast_unlock(Register obj, Register box, Register tmp, bool use_rtm);
694 #if INCLUDE_RTM_OPT
695 void rtm_counters_update(Register abort_status, Register rtm_counters);
696 void branch_on_random_using_rdtsc(Register tmp, Register scr, int count, Label& brLabel);
697 void rtm_abort_ratio_calculation(Register tmp, Register rtm_counters_reg,
698 RTMLockingCounters* rtm_counters,
699 Metadata* method_data);
700 void rtm_profiling(Register abort_status_Reg, Register rtm_counters_Reg,
701 RTMLockingCounters* rtm_counters, Metadata* method_data, bool profile_rtm);
702 void rtm_retry_lock_on_abort(Register retry_count, Register abort_status, Label& retryLabel);
703 void rtm_retry_lock_on_busy(Register retry_count, Register box, Register tmp, Register scr, Label& retryLabel);
|
664
665 // Biased locking support
666 // lock_reg and obj_reg must be loaded up with the appropriate values.
667 // swap_reg must be rax, and is killed.
668 // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
669 // be killed; if not supplied, push/pop will be used internally to
670 // allocate a temporary (inefficient, avoid if possible).
671 // Optional slow case is for implementations (interpreter and C1) which branch to
672 // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
673 // Returns offset of first potentially-faulting instruction for null
674 // check info (currently consumed only by C1). If
675 // swap_reg_contains_mark is true then returns -1 as it is assumed
676 // the calling code has already passed any potential faults.
677 int biased_locking_enter(Register lock_reg, Register obj_reg,
678 Register swap_reg, Register tmp_reg,
679 bool swap_reg_contains_mark,
680 Label& done, Label* slow_case = NULL,
681 BiasedLockingCounters* counters = NULL);
682 void biased_locking_exit (Register obj_reg, Register temp_reg, Label& done);
683 #ifdef COMPILER2
684 void inc_om_ref_count(Register obj_reg, Register om_reg, Register temp_reg, Label& done);
685 // Code used by cmpFastLock and cmpFastUnlock mach instructions in .ad file.
686 // See full desription in macroAssembler_x86.cpp.
687 void fast_lock(Register obj, Register box, Register tmp,
688 Register scr, Register cx1, Register cx2,
689 BiasedLockingCounters* counters,
690 RTMLockingCounters* rtm_counters,
691 RTMLockingCounters* stack_rtm_counters,
692 Metadata* method_data,
693 bool use_rtm, bool profile_rtm);
694 void fast_unlock(Register obj, Register box, Register tmp, bool use_rtm);
695 #if INCLUDE_RTM_OPT
696 void rtm_counters_update(Register abort_status, Register rtm_counters);
697 void branch_on_random_using_rdtsc(Register tmp, Register scr, int count, Label& brLabel);
698 void rtm_abort_ratio_calculation(Register tmp, Register rtm_counters_reg,
699 RTMLockingCounters* rtm_counters,
700 Metadata* method_data);
701 void rtm_profiling(Register abort_status_Reg, Register rtm_counters_Reg,
702 RTMLockingCounters* rtm_counters, Metadata* method_data, bool profile_rtm);
703 void rtm_retry_lock_on_abort(Register retry_count, Register abort_status, Label& retryLabel);
704 void rtm_retry_lock_on_busy(Register retry_count, Register box, Register tmp, Register scr, Label& retryLabel);
|