< prev index next >

src/cpu/aarch64/vm/macroAssembler_aarch64.hpp

Print this page
rev 10243 : 8150394: aarch64: add support for 8.1 LSE CAS instructions
Reviewed-by: aph
Contributed-by: ananth.jasty@caviumnetworks.com, edward.nevill@linaro.org


 954 
 955   void cmpxchgw(Register oldv, Register newv, Register addr, Register tmp,
 956                   Label &suceed, Label *fail);
 957 
 958   void atomic_add(Register prev, RegisterOrConstant incr, Register addr);
 959   void atomic_addw(Register prev, RegisterOrConstant incr, Register addr);
 960 
 961   void atomic_xchg(Register prev, Register newv, Register addr);
 962   void atomic_xchgw(Register prev, Register newv, Register addr);
 963 
 964   void orptr(Address adr, RegisterOrConstant src) {
 965     ldr(rscratch2, adr);
 966     if (src.is_register())
 967       orr(rscratch2, rscratch2, src.as_register());
 968     else
 969       orr(rscratch2, rscratch2, src.as_constant());
 970     str(rscratch2, adr);
 971   }
 972 
 973   // A generic CAS; success or failure is in the EQ flag.
 974   template <typename T1, typename T2>
 975   void cmpxchg(Register addr, Register expected, Register new_val,
 976                T1 load_insn,
 977                void (MacroAssembler::*cmp_insn)(Register, Register),
 978                T2 store_insn,
 979                Register tmp = rscratch1) {
 980     Label retry_load, done;
 981     bind(retry_load);
 982     (this->*load_insn)(tmp, addr);
 983     (this->*cmp_insn)(tmp, expected);
 984     br(Assembler::NE, done);
 985     (this->*store_insn)(tmp, new_val, addr);
 986     cbnzw(tmp, retry_load);
 987     bind(done);
 988   }
 989 
 990   // Calls
 991 
 992   address trampoline_call(Address entry, CodeBuffer *cbuf = NULL);
 993 
 994   static bool far_branches() {
 995     return ReservedCodeCacheSize > branch_range;
 996   }
 997 
 998   // Jumps that can reach anywhere in the code cache.
 999   // Trashes tmp.
1000   void far_call(Address entry, CodeBuffer *cbuf = NULL, Register tmp = rscratch1);
1001   void far_jump(Address entry, CodeBuffer *cbuf = NULL, Register tmp = rscratch1);
1002 
1003   static int far_branch_size() {
1004     if (far_branches()) {
1005       return 3 * 4;  // adrp, add, br
1006     } else {
1007       return 4;
1008     }




 954 
 955   void cmpxchgw(Register oldv, Register newv, Register addr, Register tmp,
 956                   Label &suceed, Label *fail);
 957 
 958   void atomic_add(Register prev, RegisterOrConstant incr, Register addr);
 959   void atomic_addw(Register prev, RegisterOrConstant incr, Register addr);
 960 
 961   void atomic_xchg(Register prev, Register newv, Register addr);
 962   void atomic_xchgw(Register prev, Register newv, Register addr);
 963 
 964   void orptr(Address adr, RegisterOrConstant src) {
 965     ldr(rscratch2, adr);
 966     if (src.is_register())
 967       orr(rscratch2, rscratch2, src.as_register());
 968     else
 969       orr(rscratch2, rscratch2, src.as_constant());
 970     str(rscratch2, adr);
 971   }
 972 
 973   // A generic CAS; success or failure is in the EQ flag.
 974   void cmpxchg(operand_size sz,
 975                Register oldv, Register newv, Register addr,
 976                bool acquire = true, bool release = true,
 977                Register tmp = rscratch1);











 978 
 979   // Calls
 980 
 981   address trampoline_call(Address entry, CodeBuffer *cbuf = NULL);
 982 
 983   static bool far_branches() {
 984     return ReservedCodeCacheSize > branch_range;
 985   }
 986 
 987   // Jumps that can reach anywhere in the code cache.
 988   // Trashes tmp.
 989   void far_call(Address entry, CodeBuffer *cbuf = NULL, Register tmp = rscratch1);
 990   void far_jump(Address entry, CodeBuffer *cbuf = NULL, Register tmp = rscratch1);
 991 
 992   static int far_branch_size() {
 993     if (far_branches()) {
 994       return 3 * 4;  // adrp, add, br
 995     } else {
 996       return 4;
 997     }


< prev index next >