< prev index next >

src/cpu/x86/vm/nativeInst_x86.hpp

Print this page




  43 // - - NativeGeneralJump
  44 // - - NativeReturn
  45 // - - NativeReturnX (return with argument)
  46 // - - NativePushConst
  47 // - - NativeTstRegMem
  48 
  49 // The base class for different kinds of native instruction abstractions.
  50 // Provides the primitive operations to manipulate code relative to this.
  51 
  52 class NativeInstruction VALUE_OBJ_CLASS_SPEC {
  53   friend class Relocation;
  54 
  55  public:
  56   enum Intel_specific_constants {
  57     nop_instruction_code        = 0x90,
  58     nop_instruction_size        =    1
  59   };
  60 
  61   bool is_nop()                        { return ubyte_at(0) == nop_instruction_code; }
  62   inline bool is_call();

  63   inline bool is_illegal();
  64   inline bool is_return();
  65   inline bool is_jump();
  66   inline bool is_cond_jump();
  67   inline bool is_safepoint_poll();
  68   inline bool is_mov_literal64();
  69 
  70  protected:
  71   address addr_at(int offset) const    { return address(this) + offset; }
  72 
  73   s_char sbyte_at(int offset) const    { return *(s_char*) addr_at(offset); }
  74   u_char ubyte_at(int offset) const    { return *(u_char*) addr_at(offset); }
  75 
  76   jint int_at(int offset) const         { return *(jint*) addr_at(offset); }
  77 
  78   intptr_t ptr_at(int offset) const    { return *(intptr_t*) addr_at(offset); }
  79 
  80   oop  oop_at (int offset) const       { return *(oop*) addr_at(offset); }
  81 
  82 


 163 
 164   static void replace_mt_safe(address instr_addr, address code_buffer);
 165 };
 166 
 167 inline NativeCall* nativeCall_at(address address) {
 168   NativeCall* call = (NativeCall*)(address - NativeCall::instruction_offset);
 169 #ifdef ASSERT
 170   call->verify();
 171 #endif
 172   return call;
 173 }
 174 
 175 inline NativeCall* nativeCall_before(address return_address) {
 176   NativeCall* call = (NativeCall*)(return_address - NativeCall::return_address_offset);
 177 #ifdef ASSERT
 178   call->verify();
 179 #endif
 180   return call;
 181 }
 182 


















 183 // An interface for accessing/manipulating native mov reg, imm32 instructions.
 184 // (used to manipulate inlined 32bit data dll calls, etc.)
 185 class NativeMovConstReg: public NativeInstruction {
 186 #ifdef AMD64
 187   static const bool has_rex = true;
 188   static const int rex_size = 1;
 189 #else
 190   static const bool has_rex = false;
 191   static const int rex_size = 0;
 192 #endif // AMD64
 193  public:
 194   enum Intel_specific_constants {
 195     instruction_code            = 0xB8,
 196     instruction_size            =    1 + rex_size + wordSize,
 197     instruction_offset          =    0,
 198     data_offset                 =    1 + rex_size,
 199     next_instruction_offset     =    instruction_size,
 200     register_mask               = 0x07
 201   };
 202 


 502     instruction_size            =    2,
 503     instruction_offset          =    0,
 504     next_instruction_offset     =    2
 505   };
 506 };
 507 
 508 // Simple test vs memory
 509 class NativeTstRegMem: public NativeInstruction {
 510  public:
 511   enum Intel_specific_constants {
 512     instruction_rex_prefix_mask = 0xF0,
 513     instruction_rex_prefix      = Assembler::REX,
 514     instruction_code_memXregl   = 0x85,
 515     modrm_mask                  = 0x38, // select reg from the ModRM byte
 516     modrm_reg                   = 0x00  // rax
 517   };
 518 };
 519 
 520 inline bool NativeInstruction::is_illegal()      { return (short)int_at(0) == (short)NativeIllegalInstruction::instruction_code; }
 521 inline bool NativeInstruction::is_call()         { return ubyte_at(0) == NativeCall::instruction_code; }



 522 inline bool NativeInstruction::is_return()       { return ubyte_at(0) == NativeReturn::instruction_code ||
 523                                                           ubyte_at(0) == NativeReturnX::instruction_code; }
 524 inline bool NativeInstruction::is_jump()         { return ubyte_at(0) == NativeJump::instruction_code ||
 525                                                           ubyte_at(0) == 0xEB; /* short jump */ }
 526 inline bool NativeInstruction::is_cond_jump()    { return (int_at(0) & 0xF0FF) == 0x800F /* long jump */ ||
 527                                                           (ubyte_at(0) & 0xF0) == 0x70;  /* short jump */ }
 528 inline bool NativeInstruction::is_safepoint_poll() {
 529 #ifdef AMD64
 530   if (Assembler::is_polling_page_far()) {







 531     // two cases, depending on the choice of the base register in the address.
 532     if (((ubyte_at(0) & NativeTstRegMem::instruction_rex_prefix_mask) == NativeTstRegMem::instruction_rex_prefix &&
 533          ubyte_at(1) == NativeTstRegMem::instruction_code_memXregl &&
 534          (ubyte_at(2) & NativeTstRegMem::modrm_mask) == NativeTstRegMem::modrm_reg) ||
 535         ubyte_at(0) == NativeTstRegMem::instruction_code_memXregl &&
 536         (ubyte_at(1) & NativeTstRegMem::modrm_mask) == NativeTstRegMem::modrm_reg) {

 537       return true;
 538     } else {
 539       return false;
 540     }
 541   } else {
 542     if (ubyte_at(0) == NativeTstRegMem::instruction_code_memXregl &&
 543         ubyte_at(1) == 0x05) { // 00 rax 101
 544       address fault = addr_at(6) + int_at(2);
 545       return os::is_poll_address(fault);
 546     } else {
 547       return false;
 548     }
 549   }
 550 #else
 551   return ( ubyte_at(0) == NativeMovRegMem::instruction_code_mem2reg ||
 552            ubyte_at(0) == NativeTstRegMem::instruction_code_memXregl ) &&
 553            (ubyte_at(1)&0xC7) == 0x05 && /* Mod R/M == disp32 */
 554            (os::is_poll_address((address)int_at(2)));
 555 #endif // AMD64
 556 }
 557 
 558 inline bool NativeInstruction::is_mov_literal64() {
 559 #ifdef AMD64
 560   return ((ubyte_at(0) == Assembler::REX_W || ubyte_at(0) == Assembler::REX_WB) &&
 561           (ubyte_at(1) & (0xff ^ NativeMovConstReg::register_mask)) == 0xB8);
 562 #else
 563   return false;
 564 #endif // AMD64
 565 }
 566 
 567 #endif // CPU_X86_VM_NATIVEINST_X86_HPP


  43 // - - NativeGeneralJump
  44 // - - NativeReturn
  45 // - - NativeReturnX (return with argument)
  46 // - - NativePushConst
  47 // - - NativeTstRegMem
  48 
  49 // The base class for different kinds of native instruction abstractions.
  50 // Provides the primitive operations to manipulate code relative to this.
  51 
  52 class NativeInstruction VALUE_OBJ_CLASS_SPEC {
  53   friend class Relocation;
  54 
  55  public:
  56   enum Intel_specific_constants {
  57     nop_instruction_code        = 0x90,
  58     nop_instruction_size        =    1
  59   };
  60 
  61   bool is_nop()                        { return ubyte_at(0) == nop_instruction_code; }
  62   inline bool is_call();
  63   inline bool is_call_reg();
  64   inline bool is_illegal();
  65   inline bool is_return();
  66   inline bool is_jump();
  67   inline bool is_cond_jump();
  68   inline bool is_safepoint_poll();
  69   inline bool is_mov_literal64();
  70 
  71  protected:
  72   address addr_at(int offset) const    { return address(this) + offset; }
  73 
  74   s_char sbyte_at(int offset) const    { return *(s_char*) addr_at(offset); }
  75   u_char ubyte_at(int offset) const    { return *(u_char*) addr_at(offset); }
  76 
  77   jint int_at(int offset) const         { return *(jint*) addr_at(offset); }
  78 
  79   intptr_t ptr_at(int offset) const    { return *(intptr_t*) addr_at(offset); }
  80 
  81   oop  oop_at (int offset) const       { return *(oop*) addr_at(offset); }
  82 
  83 


 164 
 165   static void replace_mt_safe(address instr_addr, address code_buffer);
 166 };
 167 
 168 inline NativeCall* nativeCall_at(address address) {
 169   NativeCall* call = (NativeCall*)(address - NativeCall::instruction_offset);
 170 #ifdef ASSERT
 171   call->verify();
 172 #endif
 173   return call;
 174 }
 175 
 176 inline NativeCall* nativeCall_before(address return_address) {
 177   NativeCall* call = (NativeCall*)(return_address - NativeCall::return_address_offset);
 178 #ifdef ASSERT
 179   call->verify();
 180 #endif
 181   return call;
 182 }
 183 
 184 class NativeCallReg: public NativeInstruction {
 185  public:
 186   enum Intel_specific_constants {
 187     instruction_code            = 0xFF,
 188     instruction_offset          =    0,
 189     return_address_offset_norex =    2,
 190     return_address_offset_rex   =    3
 191   };
 192 
 193   int next_instruction_offset() const  {
 194     if (ubyte_at(0) == NativeCallReg::instruction_code) {
 195       return return_address_offset_norex;
 196     } else {
 197       return return_address_offset_rex;
 198     }
 199   }
 200 };
 201 
 202 // An interface for accessing/manipulating native mov reg, imm32 instructions.
 203 // (used to manipulate inlined 32bit data dll calls, etc.)
 204 class NativeMovConstReg: public NativeInstruction {
 205 #ifdef AMD64
 206   static const bool has_rex = true;
 207   static const int rex_size = 1;
 208 #else
 209   static const bool has_rex = false;
 210   static const int rex_size = 0;
 211 #endif // AMD64
 212  public:
 213   enum Intel_specific_constants {
 214     instruction_code            = 0xB8,
 215     instruction_size            =    1 + rex_size + wordSize,
 216     instruction_offset          =    0,
 217     data_offset                 =    1 + rex_size,
 218     next_instruction_offset     =    instruction_size,
 219     register_mask               = 0x07
 220   };
 221 


 521     instruction_size            =    2,
 522     instruction_offset          =    0,
 523     next_instruction_offset     =    2
 524   };
 525 };
 526 
 527 // Simple test vs memory
 528 class NativeTstRegMem: public NativeInstruction {
 529  public:
 530   enum Intel_specific_constants {
 531     instruction_rex_prefix_mask = 0xF0,
 532     instruction_rex_prefix      = Assembler::REX,
 533     instruction_code_memXregl   = 0x85,
 534     modrm_mask                  = 0x38, // select reg from the ModRM byte
 535     modrm_reg                   = 0x00  // rax
 536   };
 537 };
 538 
 539 inline bool NativeInstruction::is_illegal()      { return (short)int_at(0) == (short)NativeIllegalInstruction::instruction_code; }
 540 inline bool NativeInstruction::is_call()         { return ubyte_at(0) == NativeCall::instruction_code; }
 541 inline bool NativeInstruction::is_call_reg()     { return ubyte_at(0) == NativeCallReg::instruction_code ||
 542                                                           (ubyte_at(1) == NativeCallReg::instruction_code &&
 543                                                            (ubyte_at(0) == Assembler::REX || ubyte_at(0) == Assembler::REX_B)); }
 544 inline bool NativeInstruction::is_return()       { return ubyte_at(0) == NativeReturn::instruction_code ||
 545                                                           ubyte_at(0) == NativeReturnX::instruction_code; }
 546 inline bool NativeInstruction::is_jump()         { return ubyte_at(0) == NativeJump::instruction_code ||
 547                                                           ubyte_at(0) == 0xEB; /* short jump */ }
 548 inline bool NativeInstruction::is_cond_jump()    { return (int_at(0) & 0xF0FF) == 0x800F /* long jump */ ||
 549                                                           (ubyte_at(0) & 0xF0) == 0x70;  /* short jump */ }
 550 inline bool NativeInstruction::is_safepoint_poll() {
 551 #ifdef AMD64
 552   // Try decoding a near safepoint first:
 553   if (ubyte_at(0) == NativeTstRegMem::instruction_code_memXregl &&
 554       ubyte_at(1) == 0x05) { // 00 rax 101
 555     address fault = addr_at(6) + int_at(2);
 556     NOT_JVMCI(assert(!Assembler::is_polling_page_far(), "unexpected poll encoding");)
 557     return os::is_poll_address(fault);
 558   }
 559   // Now try decoding a far safepoint:
 560   // two cases, depending on the choice of the base register in the address.
 561   if (((ubyte_at(0) & NativeTstRegMem::instruction_rex_prefix_mask) == NativeTstRegMem::instruction_rex_prefix &&
 562        ubyte_at(1) == NativeTstRegMem::instruction_code_memXregl &&
 563        (ubyte_at(2) & NativeTstRegMem::modrm_mask) == NativeTstRegMem::modrm_reg) ||
 564       ubyte_at(0) == NativeTstRegMem::instruction_code_memXregl &&
 565       (ubyte_at(1) & NativeTstRegMem::modrm_mask) == NativeTstRegMem::modrm_reg) {
 566     NOT_JVMCI(assert(Assembler::is_polling_page_far(), "unexpected poll encoding");)
 567     return true;


 568   }






 569   return false;


 570 #else
 571   return ( ubyte_at(0) == NativeMovRegMem::instruction_code_mem2reg ||
 572            ubyte_at(0) == NativeTstRegMem::instruction_code_memXregl ) &&
 573            (ubyte_at(1)&0xC7) == 0x05 && /* Mod R/M == disp32 */
 574            (os::is_poll_address((address)int_at(2)));
 575 #endif // AMD64
 576 }
 577 
 578 inline bool NativeInstruction::is_mov_literal64() {
 579 #ifdef AMD64
 580   return ((ubyte_at(0) == Assembler::REX_W || ubyte_at(0) == Assembler::REX_WB) &&
 581           (ubyte_at(1) & (0xff ^ NativeMovConstReg::register_mask)) == 0xB8);
 582 #else
 583   return false;
 584 #endif // AMD64
 585 }
 586 
 587 #endif // CPU_X86_VM_NATIVEINST_X86_HPP
< prev index next >