1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * Copyright (c) 2014, Red Hat Inc. All rights reserved. 4 * Copyright (c) 2015, Linaro Ltd. All rights reserved. 5 * Copyright (c) 2015-2018, Azul Systems, Inc. All rights reserved. 6 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 7 * 8 * This code is free software; you can redistribute it and/or modify it 9 * under the terms of the GNU General Public License version 2 only, as 10 * published by the Free Software Foundation. 11 * 12 * This code is distributed in the hope that it will be useful, but WITHOUT 13 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 * version 2 for more details (a copy is included in the LICENSE file that 16 * accompanied this code). 17 * 18 * You should have received a copy of the GNU General Public License version 19 * 2 along with this work; if not, write to the Free Software Foundation, 20 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 21 * 22 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 23 * or visit www.oracle.com if you need additional information or have any 24 * questions. 25 * 26 */ 27 28 #ifndef CPU_AARCH32_VM_NATIVEINST_AARCH32_HPP 29 #define CPU_AARCH32_VM_NATIVEINST_AARCH32_HPP 30 31 #include "asm/assembler.hpp" 32 #include "runtime/icache.hpp" 33 #include "runtime/os.hpp" 34 35 // We have interfaces for the following instructions: 36 // - NativeInstruction 37 // - - NativeCall 38 // - - NativeMovConstReg 39 // - - NativeMovRegMem 40 // - - NativeMovRegMemPatching 41 // - - NativeJump 42 // - - NativeIllegalOpCode 43 // - - NativeGeneralJump 44 // - - NativeReturn 45 // - - NativeReturnX (return with argument) 46 // - - NativePushConst 47 // - - NativeTstRegMem 48 49 // The base class for different kinds of native instruction abstractions. 50 // Provides the primitive operations to manipulate code relative to this. 51 52 class NativeInstruction { 53 friend class Relocation; 54 friend bool is_NativeCallTrampolineStub_at(address); 55 public: 56 enum { arm_insn_sz = 4 }; 57 58 inline bool is_nop(); 59 inline bool is_barrer(); 60 inline bool is_illegal(); 61 inline bool is_return(); 62 inline bool is_jump_or_nop(); 63 inline bool is_cond_jump(); 64 bool is_safepoint_poll(); 65 bool is_movt(); 66 bool is_orr(); 67 bool is_sigill_zombie_not_entrant(); 68 69 bool is_movt(Register dst, unsigned imm, Assembler::Condition cond = Assembler::C_DFLT); 70 bool is_movw(Register dst, unsigned imm, Assembler::Condition cond = Assembler::C_DFLT); 71 bool is_ldr(Register dst, Address addr, Assembler::Condition cond = Assembler::C_DFLT); 72 73 inline bool is_jump() const; 74 inline bool is_call() const; 75 76 inline bool is_mov_const_reg() const; 77 inline bool is_reg_call() const; 78 inline bool is_imm_call() const; 79 inline bool is_reg_jump() const; 80 inline bool is_imm_jump() const; 81 82 protected: 83 address addr() const { return address(this); } 84 // TODO remove this, every command is 4byte long 85 86 address addr_at(int offset) const { return addr() + offset; } 87 88 s_char sbyte_at(int offset) const { return *(s_char*) addr_at(offset); } 89 u_char ubyte_at(int offset) const { return *(u_char*) addr_at(offset); } 90 91 jint int_at(int offset) const { return *(jint*) addr_at(offset); } 92 juint uint_at(int offset) const { return *(juint*) addr_at(offset); } 93 94 address ptr_at(int offset) const { return *(address*) addr_at(offset); } 95 96 oop oop_at (int offset) const { return *(oop*) addr_at(offset); } 97 98 99 void set_char_at(int offset, char c) { *addr_at(offset) = (u_char)c; } 100 void set_int_at(int offset, jint i) { *(jint*)addr_at(offset) = i; } 101 void set_uint_at(int offset, jint i) { *(juint*)addr_at(offset) = i; } 102 void set_ptr_at (int offset, address ptr) { *(address*) addr_at(offset) = ptr; } 103 void set_oop_at (int offset, oop o) { *(oop*) addr_at(offset) = o; } 104 105 static juint as_uint(address addr) { 106 return *(juint *) addr; 107 } 108 109 juint as_uint() const { 110 return as_uint(addr()); 111 } 112 113 void set_uint(juint v) { 114 *(juint *) addr() = v; 115 } 116 117 public: 118 119 // unit test stuff 120 static void test() {} // override for testing 121 122 static bool is_at(address address); 123 static NativeInstruction* from(address address); 124 125 }; 126 127 inline NativeInstruction* nativeInstruction_at(address addr) { 128 return NativeInstruction::from(addr); 129 } 130 131 inline NativeInstruction* nativeInstruction_at(uint32_t *addr) { 132 return NativeInstruction::from(address(addr)); 133 } 134 135 class NativeBranchType: public NativeInstruction { 136 protected: 137 static bool is_branch_type(uint32_t insn); 138 void patch_offset_to(address addr); 139 public: 140 enum { 141 instruction_size = arm_insn_sz, 142 }; 143 144 address next_instruction_address() const { 145 return addr() + arm_insn_sz; 146 } 147 }; 148 149 class NativeFarLdr: public NativeInstruction { 150 private: 151 static address skip_patching_prolog(address addr); 152 public: 153 static bool is_at(address addr); 154 static NativeFarLdr* from(address addr); 155 intptr_t *data_addr(); 156 void set_data_addr(intptr_t *data_addr); 157 address next_instruction_address() const; 158 }; 159 160 class NativeMovConstReg: public NativeInstruction { 161 friend class Relocation; 162 friend class NativeMovRegMem; 163 friend class NativeGeneralJump; 164 friend class NativeFarLdr; 165 166 protected: 167 static bool is_ldr_literal_at(address instr, Register from = r15_pc); 168 static bool is_far_ldr_literal_at(address instr); 169 static bool is_movw_movt_at(address instr); 170 static bool is_mov_n_three_orr_at(address instr); 171 public: 172 enum { 173 ldr_sz = 1 * arm_insn_sz, 174 far_ldr_sz = 2 * arm_insn_sz, 175 movw_movt_pair_sz = 2 * arm_insn_sz, 176 mov_n_three_orr_sz = 4 * arm_insn_sz, 177 min_instruction_size = 1 * arm_insn_sz, 178 max_instruction_size = 4 * arm_insn_sz, 179 }; 180 181 address next_instruction_address() const { 182 if (is_ldr_literal_at(addr())) { 183 return addr() + ldr_sz; 184 } else if (is_far_ldr_literal_at(addr())) { 185 return NativeFarLdr::from(addr())->next_instruction_address();; 186 } else if (is_movw_movt_at(addr())) { 187 return addr() + movw_movt_pair_sz; 188 } else if (is_mov_n_three_orr_at(addr())) { 189 return addr() + mov_n_three_orr_sz; 190 } 191 192 // Unknown instruction in NativeMovConstReg 193 ShouldNotReachHere(); 194 return NULL; 195 } 196 197 intptr_t data() const; 198 void set_data(intptr_t x); 199 200 Register destination() const; 201 void set_destination(Register r); 202 203 void flush() { 204 ICache::invalidate_range(addr(), max_instruction_size); 205 } 206 207 void verify(); 208 void print(); 209 210 // unit test stuff 211 static void test() {} 212 213 // Creation 214 inline friend NativeMovConstReg* nativeMovConstReg_at(address address); 215 216 static NativeMovConstReg* before(address addr) { 217 address mov = NULL; 218 if (is_ldr_literal_at(addr - ldr_sz)) { 219 mov = addr - ldr_sz; 220 } else if (is_far_ldr_literal_at(addr - far_ldr_sz)) { 221 mov = addr - far_ldr_sz; 222 } else if (is_movw_movt_at(addr - movw_movt_pair_sz)) { 223 mov = addr - movw_movt_pair_sz; 224 } else if (is_mov_n_three_orr_at(addr - mov_n_three_orr_sz)) { 225 mov = addr - mov_n_three_orr_sz; 226 } 227 guarantee(mov, "Can't find NativeMovConstReg before"); 228 return NativeMovConstReg::from(mov); 229 } 230 231 static bool is_at(address instr); 232 static NativeMovConstReg* from(address addr); 233 }; 234 235 inline NativeMovConstReg* nativeMovConstReg_at(address address) { 236 return NativeMovConstReg::from(address); 237 } 238 239 class NativeTrampolineCall: public NativeInstruction { 240 public: 241 // NativeTrampolineCall size is always equal to NativeCall::instruction_size 242 address destination() const; 243 void set_destination(address dest); 244 void set_destination_mt_safe(address dest, bool assert_lock = true); 245 246 static bool is_at(address address); 247 static NativeTrampolineCall* from(address address); 248 249 address next_instruction_address() const; 250 }; 251 252 class NativeRegCall: public NativeBranchType { 253 public: 254 255 Register destination() const; 256 void set_destination(Register r); 257 258 static bool is_at(address address); 259 static NativeRegCall* from(address address); 260 }; 261 262 class NativeCall: public NativeInstruction { 263 friend class Relocation; 264 protected: 265 NativeInstruction* is_long_jump_or_call_at(address addr); 266 267 // NativeCall represents: 268 // NativeImmCall, 269 // NativeMovConstReg + NativeBranchType, 270 // NativeTrampolineCall 271 public: 272 enum { 273 max_instruction_size = 5 * arm_insn_sz 274 }; 275 276 static int instruction_size; 277 #ifdef ASSERT 278 STATIC_ASSERT(NativeMovConstReg::movw_movt_pair_sz 279 + NativeRegCall::instruction_size <= (int) max_instruction_size); 280 STATIC_ASSERT(NativeMovConstReg::mov_n_three_orr_sz 281 + NativeRegCall::instruction_size <= (int) max_instruction_size); 282 #endif 283 284 address destination() const; 285 void set_destination(address dest); 286 287 static void init(); 288 void verify_alignment() { ; } 289 void verify(); 290 void print(); 291 292 address instruction_address() const { return addr_at(0); } 293 address next_instruction_address() const; 294 address return_address() const; 295 296 // MT-safe patching of a call instruction. 297 static void insert(address code_pos, address entry); 298 299 // Similar to replace_mt_safe, but just changes the destination. The 300 // important thing is that free-running threads are able to execute 301 // this call instruction at all times. If the call is an immediate BL 302 // instruction we can simply rely on atomicity of 32-bit writes to 303 // make sure other threads will see no intermediate states. 304 305 // We cannot rely on locks here, since the free-running threads must run at 306 // full speed. 307 // 308 // Used in the runtime linkage of calls; see class CompiledIC. 309 // (Cf. 4506997 and 4479829, where threads witnessed garbage displacements.) 310 311 // The parameter assert_lock disables the assertion during code generation. 312 void set_destination_mt_safe(address dest, bool assert_lock = true); 313 314 static bool is_at(address instr); 315 static NativeCall* from(address instr); 316 317 static bool is_call_before(address return_address); 318 }; 319 320 inline address NativeTrampolineCall::next_instruction_address() const { 321 assert(is_at(addr()), "not call"); 322 return addr() + NativeCall::instruction_size; 323 } 324 325 inline NativeCall* nativeCall_at(address address) { 326 return NativeCall::from(address); 327 } 328 329 // An interface for accessing/manipulating native moves of the form: 330 // mov[b/w/l/q] [reg + offset], reg (instruction_code_reg2mem) 331 // mov[b/w/l/q] reg, [reg+offset] (instruction_code_mem2reg 332 // mov[s/z]x[w/b/q] [reg + offset], reg 333 // fld_s [reg+offset] 334 // fld_d [reg+offset] 335 // fstp_s [reg + offset] 336 // fstp_d [reg + offset] 337 // mov_literal64 scratch,<pointer> ; mov[b/w/l/q] 0(scratch),reg | mov[b/w/l/q] reg,0(scratch) 338 // 339 // Warning: These routines must be able to handle any instruction sequences 340 // that are generated as a result of the load/store byte,word,long 341 // macros. For example: The load_unsigned_byte instruction generates 342 // an xor reg,reg inst prior to generating the movb instruction. This 343 // class must skip the xor instruction. 344 345 346 // TODO Review 347 class NativeMovRegMem: public NativeInstruction { 348 public: 349 enum { 350 instruction_size = 2 * arm_insn_sz, // TODO check this 351 }; 352 // helper 353 int instruction_start() const; 354 355 address instruction_address() const; 356 357 address next_instruction_address() const; 358 359 int offset() const; 360 361 void set_offset(int x); 362 363 void add_offset_in_bytes(int add_offset) { set_offset ( ( offset() + add_offset ) ); } 364 365 void verify(); 366 void print (); 367 368 // unit test stuff 369 static void test() {} 370 371 private: 372 inline friend NativeMovRegMem* nativeMovRegMem_at (address address); 373 }; 374 375 inline NativeMovRegMem* nativeMovRegMem_at (address address) { 376 NativeMovRegMem* test = (NativeMovRegMem*) address; 377 #ifdef ASSERT 378 test->verify(); 379 #endif 380 return test; 381 } 382 383 class NativeMovRegMemPatching: public NativeMovRegMem { 384 private: 385 friend NativeMovRegMemPatching* nativeMovRegMemPatching_at (address address) {Unimplemented(); return 0; } 386 }; 387 388 class NativeJump: public NativeInstruction { 389 public: 390 enum { 391 instruction_size = NativeMovConstReg::movw_movt_pair_sz + NativeBranchType::instruction_size, 392 }; 393 address instruction_address() const { 394 return addr(); 395 } 396 397 address next_instruction_address() const; 398 399 address jump_destination() const; 400 void set_jump_destination(address dest); 401 402 // Creation 403 inline friend NativeJump* nativeJump_at(address address); 404 405 void verify(); 406 407 // Unit testing stuff 408 static void test() {} 409 410 // Insertion of native jump instruction 411 static void insert(address code_pos, address entry); 412 // MT-safe insertion of native jump at verified method entry 413 static void check_verified_entry_alignment(address entry, address verified_entry); 414 static void patch_verified_entry(address entry, address verified_entry, address dest); 415 416 static bool is_at(address instr); 417 static NativeJump* from(address instr); 418 }; 419 420 inline NativeJump* nativeJump_at(address addr) { 421 return NativeJump::from(addr); 422 } 423 424 // TODO We don't really need NativeGeneralJump, NativeJump should be able to do 425 // everything that General Jump would. Make this only interface to NativeJump 426 // from share code (c1_Runtime) 427 class NativeGeneralJump: public NativeJump { 428 public: 429 enum { 430 instruction_size = arm_insn_sz, 431 }; 432 433 static void insert_unconditional(address code_pos, address entry); 434 static void replace_mt_safe(address instr_addr, address code_buffer); 435 static void verify(); 436 }; 437 438 inline NativeGeneralJump* nativeGeneralJump_at(address address) { 439 NativeGeneralJump* jump = (NativeGeneralJump*)(address); 440 debug_only(jump->verify();) 441 return jump; 442 } 443 444 class NativePopReg : public NativeInstruction { 445 public: 446 // Insert a pop instruction 447 static void insert(address code_pos, Register reg); 448 }; 449 450 451 class NativeIllegalInstruction: public NativeInstruction { 452 public: 453 // Insert illegal opcode as specific address 454 static void insert(address code_pos); 455 }; 456 457 // return instruction that does not pop values of the stack 458 class NativeReturn: public NativeInstruction { 459 public: 460 }; 461 462 // return instruction that does pop values of the stack 463 class NativeReturnX: public NativeInstruction { 464 public: 465 }; 466 467 // Simple test vs memory 468 class NativeTstRegMem: public NativeInstruction { 469 public: 470 }; 471 472 inline bool NativeInstruction::is_nop() { 473 return (as_uint() & 0x0fffffff) == 0x0320f000; 474 } 475 476 inline bool NativeInstruction::is_barrer() { 477 return (as_uint() == 0xf57ff05b /* dmb ish */ || 478 as_uint() == 0xee070fba /* mcr 15, 0, r0, cr7, cr10, {5}) */); 479 } 480 481 inline bool NativeInstruction::is_jump_or_nop() { 482 return is_nop() || is_jump(); 483 } 484 485 class NativeImmCall: public NativeBranchType { 486 public: 487 address destination() const; 488 void set_destination(address dest); 489 490 static bool is_at(address address); 491 static NativeImmCall* from(address address); 492 }; 493 494 class NativeImmJump: public NativeBranchType { 495 public: 496 497 address destination() const; 498 void set_destination(address r); 499 500 static bool is_at(address address); 501 static NativeImmJump* from(address address); 502 }; 503 504 class NativeRegJump: public NativeBranchType { 505 public: 506 507 Register destination() const; 508 void set_destination(Register r); 509 510 static bool is_at(address address); 511 static NativeRegJump* from(address address); 512 }; 513 514 inline bool NativeInstruction::is_call() const { return NativeCall::is_at(addr()); } 515 inline bool NativeInstruction::is_jump() const { return NativeJump::is_at(addr()); } 516 inline bool NativeInstruction::is_mov_const_reg() const { return NativeMovConstReg::is_at(addr()); } 517 inline bool NativeInstruction::is_imm_call() const { return NativeImmCall::is_at(addr()); } 518 inline bool NativeInstruction::is_reg_call() const { return NativeRegCall::is_at(addr()); } 519 inline bool NativeInstruction::is_imm_jump() const { return NativeImmJump::is_at(addr()); } 520 inline bool NativeInstruction::is_reg_jump() const { return NativeRegJump::is_at(addr()); } 521 522 inline NativeCall* nativeCall_before(address return_address) { 523 if (NativeTrampolineCall::is_at(return_address - NativeCall::instruction_size)) { 524 return NativeCall::from(return_address - NativeCall::instruction_size); 525 } 526 if (NativeMovConstReg::is_at(return_address - NativeCall::instruction_size)) { 527 NativeMovConstReg *nm = NativeMovConstReg::from(return_address - NativeCall::instruction_size); 528 address next_instr = nm->next_instruction_address(); 529 if (NativeRegCall::is_at(next_instr) && 530 NativeRegCall::from(next_instr)->destination() == nm->destination()) { 531 return NativeCall::from(return_address - NativeCall::instruction_size); 532 } 533 } 534 if (NativeImmCall::is_at(return_address - NativeBranchType::instruction_size)) { 535 return NativeCall::from(return_address - NativeBranchType::instruction_size); 536 } 537 538 ShouldNotReachHere(); 539 return NULL; 540 } 541 542 #endif // CPU_AARCH32_VM_NATIVEINST_AARCH32_HPP