< prev index next >

src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp

Print this page
rev 48555 : 8196064: AArch64: Merging ld/st into ldp/stp in macro-assembler
Reviewed-by: duke


 114   static bool is_ldrw_to_zr(address instr);
 115 
 116   static bool is_call_at(address instr) {
 117     const uint32_t insn = (*(uint32_t*)instr);
 118     return (insn >> 26) == 0b100101;
 119   }
 120 
 121   bool is_call() {
 122     return is_call_at(addr_at(0));
 123   }
 124 
 125   static bool maybe_cpool_ref(address instr) {
 126     return is_adrp_at(instr) || is_ldr_literal_at(instr);
 127   }
 128 
 129   bool is_Membar() {
 130     unsigned int insn = uint_at(0);
 131     return Instruction_aarch64::extract(insn, 31, 12) == 0b11010101000000110011 &&
 132       Instruction_aarch64::extract(insn, 7, 0) == 0b10111111;
 133   }







 134 };
 135 
 136 inline NativeInstruction* nativeInstruction_at(address address) {
 137   return (NativeInstruction*)address;
 138 }
 139 
 140 // The natural type of an AArch64 instruction is uint32_t
 141 inline NativeInstruction* nativeInstruction_at(uint32_t *address) {
 142   return (NativeInstruction*)address;
 143 }
 144 
 145 inline NativeCall* nativeCall_at(address address);
 146 // The NativeCall is an abstraction for accessing/manipulating native
 147 // call instructions (used to manipulate inline caches, primitive &
 148 // DSO calls, etc.).
 149 
 150 class NativeCall: public NativeInstruction {
 151  public:
 152   enum Aarch64_specific_constants {
 153     instruction_size            =    4,


 515   uint32_t *i = (uint32_t *)addr;
 516   return i[0] == 0x58000048 && i[1] == 0xd61f0100;
 517 }
 518 
 519 inline NativeCallTrampolineStub* nativeCallTrampolineStub_at(address addr) {
 520   assert(is_NativeCallTrampolineStub_at(addr), "no call trampoline found");
 521   return (NativeCallTrampolineStub*)addr;
 522 }
 523 
 524 class NativeMembar : public NativeInstruction {
 525 public:
 526   unsigned int get_kind() { return Instruction_aarch64::extract(uint_at(0), 11, 8); }
 527   void set_kind(int order_kind) { Instruction_aarch64::patch(addr_at(0), 11, 8, order_kind); }
 528 };
 529 
 530 inline NativeMembar *NativeMembar_at(address addr) {
 531   assert(nativeInstruction_at(addr)->is_Membar(), "no membar found");
 532   return (NativeMembar*)addr;
 533 }
 534 





















































 535 #endif // CPU_AARCH64_VM_NATIVEINST_AARCH64_HPP


 114   static bool is_ldrw_to_zr(address instr);
 115 
 116   static bool is_call_at(address instr) {
 117     const uint32_t insn = (*(uint32_t*)instr);
 118     return (insn >> 26) == 0b100101;
 119   }
 120 
 121   bool is_call() {
 122     return is_call_at(addr_at(0));
 123   }
 124 
 125   static bool maybe_cpool_ref(address instr) {
 126     return is_adrp_at(instr) || is_ldr_literal_at(instr);
 127   }
 128 
 129   bool is_Membar() {
 130     unsigned int insn = uint_at(0);
 131     return Instruction_aarch64::extract(insn, 31, 12) == 0b11010101000000110011 &&
 132       Instruction_aarch64::extract(insn, 7, 0) == 0b10111111;
 133   }
 134 
 135   bool is_Imm_LdSt() {
 136     unsigned int insn = uint_at(0);
 137     return Instruction_aarch64::extract(insn, 29, 27) == 0b111 &&
 138       Instruction_aarch64::extract(insn, 23, 23) == 0b0 &&
 139       Instruction_aarch64::extract(insn, 26, 25) == 0b00;
 140   }
 141 };
 142 
 143 inline NativeInstruction* nativeInstruction_at(address address) {
 144   return (NativeInstruction*)address;
 145 }
 146 
 147 // The natural type of an AArch64 instruction is uint32_t
 148 inline NativeInstruction* nativeInstruction_at(uint32_t *address) {
 149   return (NativeInstruction*)address;
 150 }
 151 
 152 inline NativeCall* nativeCall_at(address address);
 153 // The NativeCall is an abstraction for accessing/manipulating native
 154 // call instructions (used to manipulate inline caches, primitive &
 155 // DSO calls, etc.).
 156 
 157 class NativeCall: public NativeInstruction {
 158  public:
 159   enum Aarch64_specific_constants {
 160     instruction_size            =    4,


 522   uint32_t *i = (uint32_t *)addr;
 523   return i[0] == 0x58000048 && i[1] == 0xd61f0100;
 524 }
 525 
 526 inline NativeCallTrampolineStub* nativeCallTrampolineStub_at(address addr) {
 527   assert(is_NativeCallTrampolineStub_at(addr), "no call trampoline found");
 528   return (NativeCallTrampolineStub*)addr;
 529 }
 530 
 531 class NativeMembar : public NativeInstruction {
 532 public:
 533   unsigned int get_kind() { return Instruction_aarch64::extract(uint_at(0), 11, 8); }
 534   void set_kind(int order_kind) { Instruction_aarch64::patch(addr_at(0), 11, 8, order_kind); }
 535 };
 536 
 537 inline NativeMembar *NativeMembar_at(address addr) {
 538   assert(nativeInstruction_at(addr)->is_Membar(), "no membar found");
 539   return (NativeMembar*)addr;
 540 }
 541 
 542 class NativeLdSt : public NativeInstruction {
 543 private:
 544   int32_t size() { return Instruction_aarch64::extract(uint_at(0), 31, 30); }
 545   // Check whether instruction is with unscaled offset.
 546   bool is_ldst_ur() {
 547     return (Instruction_aarch64::extract(uint_at(0), 29, 21) == 0b111000010 ||
 548             Instruction_aarch64::extract(uint_at(0), 29, 21) == 0b111000000) &&
 549       Instruction_aarch64::extract(uint_at(0), 11, 10) == 0b00;
 550   }
 551   bool is_ldst_unsigned_offset() {
 552     return Instruction_aarch64::extract(uint_at(0), 29, 22) == 0b11100101 ||
 553       Instruction_aarch64::extract(uint_at(0), 29, 22) == 0b11100100;
 554   }
 555 public:
 556   Register target() {
 557     uint32_t r = Instruction_aarch64::extract(uint_at(0), 4, 0);
 558     return r == 0x1f ? zr : as_Register(r);
 559   }
 560   Register base() {
 561     uint32_t b = Instruction_aarch64::extract(uint_at(0), 9, 5);
 562     return b == 0x1f ? sp : as_Register(b);
 563   }
 564   int64_t offset() {
 565     if (is_ldst_ur()) {
 566       return Instruction_aarch64::sextract(uint_at(0), 20, 12);
 567     } else if (is_ldst_unsigned_offset()) {
 568       return Instruction_aarch64::extract(uint_at(0), 21, 10) << size();
 569     } else {
 570       // others like: pre-index or post-index.
 571       ShouldNotReachHere();
 572       return 0;
 573     }
 574   }
 575   size_t size_in_bytes() { return 1 << size(); }
 576   bool is_not_pre_post_index() { return (is_ldst_ur() || is_ldst_unsigned_offset()); }
 577   bool is_load() {
 578     assert(Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b01 ||
 579            Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b00, "must be ldr or str");
 580 
 581     return Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b01;
 582   }
 583   bool is_store() {
 584     assert(Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b01 ||
 585            Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b00, "must be ldr or str");
 586 
 587     return Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b00;
 588   }
 589 };
 590 
 591 inline NativeLdSt *NativeLdSt_at(address addr) {
 592   assert(nativeInstruction_at(addr)->is_Imm_LdSt(), "no immediate load/store found");
 593   return (NativeLdSt*)addr;
 594 }
 595 #endif // CPU_AARCH64_VM_NATIVEINST_AARCH64_HPP
< prev index next >