< prev index next >

src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp

Print this page
rev 53399 : Redo: Avoid stub when calling to write-barrier from C2, remove all related code


 439 private:
 440 
 441   void mov_immediate64(Register dst, u_int64_t imm64);
 442   void mov_immediate32(Register dst, u_int32_t imm32);
 443 
 444   int push(unsigned int bitset, Register stack);
 445   int pop(unsigned int bitset, Register stack);
 446 
 447   void mov(Register dst, Address a);
 448 
 449 public:
 450   void push(RegSet regs, Register stack) { if (regs.bits()) push(regs.bits(), stack); }
 451   void pop(RegSet regs, Register stack) { if (regs.bits()) pop(regs.bits(), stack); }
 452 
 453   // Push and pop everything that might be clobbered by a native
 454   // runtime call except rscratch1 and rscratch2.  (They are always
 455   // scratch, so we don't have to protect them.)  Only save the lower
 456   // 64 bits of each vector register.
 457   void push_call_clobbered_registers();
 458   void pop_call_clobbered_registers();
 459   void push_call_clobbered_fp_registers();
 460   void pop_call_clobbered_fp_registers();
 461 
 462   // now mov instructions for loading absolute addresses and 32 or
 463   // 64 bit integers
 464 
 465   inline void mov(Register dst, address addr)
 466   {
 467     mov_immediate64(dst, (u_int64_t)addr);
 468   }
 469 
 470   inline void mov(Register dst, u_int64_t imm64)
 471   {
 472     mov_immediate64(dst, imm64);
 473   }
 474 
 475   inline void movw(Register dst, u_int32_t imm32)
 476   {
 477     mov_immediate32(dst, imm32);
 478   }
 479 
 480   inline void mov(Register dst, long l)




 439 private:
 440 
 441   void mov_immediate64(Register dst, u_int64_t imm64);
 442   void mov_immediate32(Register dst, u_int32_t imm32);
 443 
 444   int push(unsigned int bitset, Register stack);
 445   int pop(unsigned int bitset, Register stack);
 446 
 447   void mov(Register dst, Address a);
 448 
 449 public:
 450   void push(RegSet regs, Register stack) { if (regs.bits()) push(regs.bits(), stack); }
 451   void pop(RegSet regs, Register stack) { if (regs.bits()) pop(regs.bits(), stack); }
 452 
 453   // Push and pop everything that might be clobbered by a native
 454   // runtime call except rscratch1 and rscratch2.  (They are always
 455   // scratch, so we don't have to protect them.)  Only save the lower
 456   // 64 bits of each vector register.
 457   void push_call_clobbered_registers();
 458   void pop_call_clobbered_registers();


 459 
 460   // now mov instructions for loading absolute addresses and 32 or
 461   // 64 bit integers
 462 
 463   inline void mov(Register dst, address addr)
 464   {
 465     mov_immediate64(dst, (u_int64_t)addr);
 466   }
 467 
 468   inline void mov(Register dst, u_int64_t imm64)
 469   {
 470     mov_immediate64(dst, imm64);
 471   }
 472 
 473   inline void movw(Register dst, u_int32_t imm32)
 474   {
 475     mov_immediate32(dst, imm32);
 476   }
 477 
 478   inline void mov(Register dst, long l)


< prev index next >