< prev index next >

src/cpu/aarch64/vm/macroAssembler_aarch64.hpp

Print this page
rev 9428 : 8144028: Use AArch64 bit-test instructions in C2
Reviewed-by: kvn


 198   inline void movw(Register Rd, Register Rn) {
 199     if (Rd == sp || Rn == sp) {
 200       addw(Rd, Rn, 0U);
 201     } else {
 202       orrw(Rd, zr, Rn);
 203     }
 204   }
 205   inline void mov(Register Rd, Register Rn) {
 206     assert(Rd != r31_sp && Rn != r31_sp, "should be");
 207     if (Rd == Rn) {
 208     } else if (Rd == sp || Rn == sp) {
 209       add(Rd, Rn, 0U);
 210     } else {
 211       orr(Rd, zr, Rn);
 212     }
 213   }
 214 
 215   inline void moviw(Register Rd, unsigned imm) { orrw(Rd, zr, imm); }
 216   inline void movi(Register Rd, unsigned imm) { orr(Rd, zr, imm); }
 217 
 218   inline void tstw(Register Rd, unsigned imm) { andsw(zr, Rd, imm); }
 219   inline void tst(Register Rd, unsigned imm) { ands(zr, Rd, imm); }



 220 
 221   inline void bfiw(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 222     bfmw(Rd, Rn, ((32 - lsb) & 31), (width - 1));
 223   }
 224   inline void bfi(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 225     bfm(Rd, Rn, ((64 - lsb) & 63), (width - 1));
 226   }
 227 
 228   inline void bfxilw(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 229     bfmw(Rd, Rn, lsb, (lsb + width - 1));
 230   }
 231   inline void bfxil(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 232     bfm(Rd, Rn, lsb , (lsb + width - 1));
 233   }
 234 
 235   inline void sbfizw(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 236     sbfmw(Rd, Rn, ((32 - lsb) & 31), (width - 1));
 237   }
 238   inline void sbfiz(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 239     sbfm(Rd, Rn, ((64 - lsb) & 63), (width - 1));




 198   inline void movw(Register Rd, Register Rn) {
 199     if (Rd == sp || Rn == sp) {
 200       addw(Rd, Rn, 0U);
 201     } else {
 202       orrw(Rd, zr, Rn);
 203     }
 204   }
 205   inline void mov(Register Rd, Register Rn) {
 206     assert(Rd != r31_sp && Rn != r31_sp, "should be");
 207     if (Rd == Rn) {
 208     } else if (Rd == sp || Rn == sp) {
 209       add(Rd, Rn, 0U);
 210     } else {
 211       orr(Rd, zr, Rn);
 212     }
 213   }
 214 
 215   inline void moviw(Register Rd, unsigned imm) { orrw(Rd, zr, imm); }
 216   inline void movi(Register Rd, unsigned imm) { orr(Rd, zr, imm); }
 217 
 218   inline void tstw(Register Rd, Register Rn) { andsw(zr, Rd, Rn); }
 219   inline void tst(Register Rd, Register Rn) { ands(zr, Rd, Rn); }
 220 
 221   inline void tstw(Register Rd, uint64_t imm) { andsw(zr, Rd, imm); }
 222   inline void tst(Register Rd, uint64_t imm) { ands(zr, Rd, imm); }
 223 
 224   inline void bfiw(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 225     bfmw(Rd, Rn, ((32 - lsb) & 31), (width - 1));
 226   }
 227   inline void bfi(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 228     bfm(Rd, Rn, ((64 - lsb) & 63), (width - 1));
 229   }
 230 
 231   inline void bfxilw(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 232     bfmw(Rd, Rn, lsb, (lsb + width - 1));
 233   }
 234   inline void bfxil(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 235     bfm(Rd, Rn, lsb , (lsb + width - 1));
 236   }
 237 
 238   inline void sbfizw(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 239     sbfmw(Rd, Rn, ((32 - lsb) & 31), (width - 1));
 240   }
 241   inline void sbfiz(Register Rd, Register Rn, unsigned lsb, unsigned width) {
 242     sbfm(Rd, Rn, ((64 - lsb) & 63), (width - 1));


< prev index next >