1 /*
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2014, Red Hat Inc. All rights reserved.
4 * Copyright (c) 2015, Linaro Ltd. All rights reserved.
5 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
6 *
7 * This code is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License version 2 only, as
9 * published by the Free Software Foundation.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
23 * questions.
24 *
25 */
26
27 #ifndef CPU_AARCH32_VM_NATIVEINST_AARCH32_HPP
28 #define CPU_AARCH32_VM_NATIVEINST_AARCH32_HPP
29
30 #include "asm/assembler.hpp"
31 #include "memory/allocation.hpp"
32 #include "runtime/icache.hpp"
33 #include "runtime/os.hpp"
34 #include "utilities/top.hpp"
35
36 // We have interfaces for the following instructions:
37 // - NativeInstruction
38 // - - NativeCall
39 // - - NativeMovConstReg
40 // - - NativeMovRegMem
41 // - - NativeMovRegMemPatching
42 // - - NativeJump
43 // - - NativeIllegalOpCode
44 // - - NativeGeneralJump
45 // - - NativeReturn
46 // - - NativeReturnX (return with argument)
47 // - - NativePushConst
48 // - - NativeTstRegMem
49
50 // The base class for different kinds of native instruction abstractions.
51 // Provides the primitive operations to manipulate code relative to this.
52
53 class NativeInstruction VALUE_OBJ_CLASS_SPEC {
54 friend class Relocation;
55 friend bool is_NativeCallTrampolineStub_at(address);
56 public:
57 enum { arm_insn_sz = 4 };
58
59 inline bool is_nop();
60 inline bool is_illegal();
61 inline bool is_return();
62 inline bool is_jump_or_nop();
63 inline bool is_cond_jump();
64 bool is_safepoint_poll();
65 bool is_movt();
66 bool is_orr();
67 bool is_sigill_zombie_not_entrant();
68
69 bool is_movt(Register dst, unsigned imm, Assembler::Condition cond = Assembler::C_DFLT);
70 bool is_movw(Register dst, unsigned imm, Assembler::Condition cond = Assembler::C_DFLT);
71 bool is_ldr(Register dst, Address addr, Assembler::Condition cond = Assembler::C_DFLT);
72 bool is_patched_already() const;
73
74 inline bool is_jump() const;
75 inline bool is_call() const;
76
77 inline bool is_mov_const_reg() const;
78 inline bool is_reg_call() const;
79 inline bool is_imm_call() const;
80 inline bool is_reg_jump() const;
81 inline bool is_imm_jump() const;
82
83 protected:
84 address addr() const { return address(this); }
85 // TODO remove this, every command is 4byte long
86 #if 1
87 address addr_at(int offset) const { return addr() + offset; }
88
89 s_char sbyte_at(int offset) const { return *(s_char*) addr_at(offset); }
90 u_char ubyte_at(int offset) const { return *(u_char*) addr_at(offset); }
91
92 jint int_at(int offset) const { return *(jint*) addr_at(offset); }
93 juint uint_at(int offset) const { return *(juint*) addr_at(offset); }
94
95 address ptr_at(int offset) const { return *(address*) addr_at(offset); }
96
97 oop oop_at (int offset) const { return *(oop*) addr_at(offset); }
98
99
100 void set_char_at(int offset, char c) { *addr_at(offset) = (u_char)c; }
101 void set_int_at(int offset, jint i) { *(jint*)addr_at(offset) = i; }
102 void set_uint_at(int offset, jint i) { *(juint*)addr_at(offset) = i; }
103 void set_ptr_at (int offset, address ptr) { *(address*) addr_at(offset) = ptr; }
104 void set_oop_at (int offset, oop o) { *(oop*) addr_at(offset) = o; }
105 #endif
106
107 static juint as_uint(address addr) {
108 return *(juint *) addr;
109 }
110
111 juint as_uint() const {
112 return as_uint(addr());
113 }
114
115 void set_uint(juint v) {
116 *(juint *) addr() = v;
117 }
118
119 void atomic_set_ulong_at(int offset, julong v) {
120 address a = addr() + offset;
121 assert(((uintptr_t) a) % 8 == 0, "should be aligned");
122 Atomic::store(v, (volatile jlong *) a);
123 }
124
125 public:
126
127 // unit test stuff
128 static void test() {} // override for testing
129
130 static bool is_at(address address);
131 static NativeInstruction* from(address address);
132
133 };
134
135 inline NativeInstruction* nativeInstruction_at(address addr) {
136 return NativeInstruction::from(addr);
137 }
138
139 inline NativeInstruction* nativeInstruction_at(uint32_t *addr) {
140 return NativeInstruction::from(address(addr));
141 }
142
143 class NativeBranchType: public NativeInstruction {
144 protected:
145 static bool is_branch_type(uint32_t insn);
146 void patch_offset_to(address addr);
147 public:
148 enum {
149 instruction_size = arm_insn_sz,
150 };
151
152 address next_instruction_address() const {
153 return addr() + arm_insn_sz;
154 }
155 };
156
157 class NativeMovConstReg: public NativeInstruction {
158 protected:
159 static bool is_movw_movt_at(address instr);
160 static bool is_ldr_literal_at(address instr);
161 public:
162 enum {
163 movw_movt_pair_sz = 2 * arm_insn_sz,
164 ldr_sz = arm_insn_sz,
165 max_instruction_size = movw_movt_pair_sz,
166 min_instruction_size = ldr_sz,
167 };
168
169 address next_instruction_address() const {
170 if (is_movw_movt_at(addr())) {
171 return addr() + movw_movt_pair_sz;
172 } else if (is_ldr_literal_at(addr())) {
173 return addr() + ldr_sz;
174 }
175
176 // Unknown instruction in NativeMovConstReg
177 ShouldNotReachHere();
178 return NULL;
179 }
180
181 intptr_t data() const;
182 void set_data(intptr_t x);
183
184 Register destination() const;
185 void set_destination(Register r);
186
187 void flush() {
188 ICache::invalidate_range(addr(), max_instruction_size);
189 }
190
191 void verify();
192 void print();
193
194 // unit test stuff
195 static void test() {}
196
197 // Creation
198 inline friend NativeMovConstReg* nativeMovConstReg_at(address address);
199 inline friend NativeMovConstReg* nativeMovConstReg_before(address address);
200
201 static bool is_at(address instr);
202
203 static NativeMovConstReg* from(address addr);
204 };
205
206 inline NativeMovConstReg* nativeMovConstReg_at(address address) {
207 return NativeMovConstReg::from(address);
208 }
209
210 inline NativeMovConstReg* nativeMovConstReg_before(address addr) {
211 address mov_addr = NULL;
212 if (NativeMovConstReg::is_movw_movt_at(addr - NativeMovConstReg::movw_movt_pair_sz)) {
213 mov_addr = addr - NativeMovConstReg::movw_movt_pair_sz;
214 } else if (NativeMovConstReg::is_ldr_literal_at(addr - NativeMovConstReg::ldr_sz)) {
215 mov_addr = addr - NativeMovConstReg::ldr_sz;
216 } else {
217 ShouldNotReachHere();
218 }
219
220 NativeMovConstReg* test = (NativeMovConstReg*) mov_addr;
221 #ifdef ASSERT
222 test->verify();
223 #endif
224 return test;
225 }
226
227 class NativeTrampolineCall: public NativeBranchType {
228 public:
229 enum {
230 instruction_size = 3 * arm_insn_sz
231 };
232 address destination() const;
233 void set_destination(address dest);
234 void set_destination_mt_safe(address dest, bool assert_lock = true);
235
236 static bool is_at(address address);
237 static NativeTrampolineCall* from(address address);
238
239 address next_instruction_address() const {
240 assert(is_at(addr()), "not call");
241 return addr() + instruction_size;
242 }
243 };
244
245 class NativeRegCall: public NativeBranchType {
246 public:
247
248 Register destination() const;
249 void set_destination(Register r);
250
251 static bool is_at(address address);
252 static NativeRegCall* from(address address);
253 };
254
255 class NativeCall: public NativeInstruction {
256 friend class Relocation;
257 protected:
258 NativeInstruction* is_long_jump_or_call_at(address addr);
259
260 // NativeCall represents:
261 // NativeImmCall,
262 // NativeMovConstReg + NativeBranchType,
263 // NativeTrampolineCall
264 public:
265 enum {
266 instruction_size = 3 * arm_insn_sz
267 };
268 #ifdef ASSERT
269 StaticAssert<(int) NativeTrampolineCall::instruction_size <= (int) instruction_size> dummy1;
270 StaticAssert<NativeMovConstReg::movw_movt_pair_sz
271 + NativeRegCall::instruction_size <= (int) instruction_size> dummy2;
272 #endif
273
274 address destination() const;
275 void set_destination(address dest);
276
277 void verify_alignment() { ; }
278 void verify();
279 void print();
280
281 address instruction_address() const { return addr_at(0); }
282 address next_instruction_address() const;
283 address return_address() const;
284
285 // MT-safe patching of a call instruction.
286 static void insert(address code_pos, address entry);
287
288 // Similar to replace_mt_safe, but just changes the destination. The
289 // important thing is that free-running threads are able to execute
290 // this call instruction at all times. If the call is an immediate BL
291 // instruction we can simply rely on atomicity of 32-bit writes to
292 // make sure other threads will see no intermediate states.
293
294 // We cannot rely on locks here, since the free-running threads must run at
295 // full speed.
296 //
297 // Used in the runtime linkage of calls; see class CompiledIC.
298 // (Cf. 4506997 and 4479829, where threads witnessed garbage displacements.)
299
300 // The parameter assert_lock disables the assertion during code generation.
301 void set_destination_mt_safe(address dest, bool assert_lock = true);
302
303 static bool is_at(address instr);
304 static NativeCall* from(address instr);
305
306 static bool is_call_before(address return_address);
307 };
308
309 inline NativeCall* nativeCall_at(address address) {
310 return NativeCall::from(address);
311 }
312
313 inline NativeCall* nativeCall_before(address return_address) {
314 address call_addr = NULL;
315 if (NativeCall::is_at(return_address - NativeBranchType::instruction_size)) {
316 call_addr = return_address - NativeBranchType::instruction_size;
317 } else if (NativeCall::is_at(return_address - NativeCall::instruction_size)) {
318 call_addr = return_address - NativeCall::instruction_size;
319 } else {
320 ShouldNotReachHere();
321 }
322
323 return NativeCall::from(call_addr);
324 }
325
326
327 // An interface for accessing/manipulating native moves of the form:
328 // mov[b/w/l/q] [reg + offset], reg (instruction_code_reg2mem)
329 // mov[b/w/l/q] reg, [reg+offset] (instruction_code_mem2reg
330 // mov[s/z]x[w/b/q] [reg + offset], reg
331 // fld_s [reg+offset]
332 // fld_d [reg+offset]
333 // fstp_s [reg + offset]
334 // fstp_d [reg + offset]
335 // mov_literal64 scratch,<pointer> ; mov[b/w/l/q] 0(scratch),reg | mov[b/w/l/q] reg,0(scratch)
336 //
337 // Warning: These routines must be able to handle any instruction sequences
338 // that are generated as a result of the load/store byte,word,long
339 // macros. For example: The load_unsigned_byte instruction generates
340 // an xor reg,reg inst prior to generating the movb instruction. This
341 // class must skip the xor instruction.
342
343
344 // TODO Review
345 class NativeMovRegMem: public NativeInstruction {
346 public:
347 enum {
348 instruction_size = 2 * arm_insn_sz, // TODO check this
349 };
350 // helper
351 int instruction_start() const;
352
353 address instruction_address() const;
354
355 address next_instruction_address() const;
356
357 int offset() const;
358
359 void set_offset(int x);
360
361 void add_offset_in_bytes(int add_offset) { set_offset ( ( offset() + add_offset ) ); }
362
363 void verify();
364 void print ();
365
366 // unit test stuff
367 static void test() {}
368
369 private:
370 inline friend NativeMovRegMem* nativeMovRegMem_at (address address);
371 };
372
373 inline NativeMovRegMem* nativeMovRegMem_at (address address) {
374 NativeMovRegMem* test = (NativeMovRegMem*) address;
375 #ifdef ASSERT
376 test->verify();
377 #endif
378 return test;
379 }
380
381 class NativeMovRegMemPatching: public NativeMovRegMem {
382 private:
383 friend NativeMovRegMemPatching* nativeMovRegMemPatching_at (address address) {Unimplemented(); return 0; }
384 };
385
386 class NativeJump: public NativeInstruction {
387 public:
388 enum {
389 instruction_size = NativeMovConstReg::movw_movt_pair_sz + NativeBranchType::instruction_size,
390 };
391 address instruction_address() const {
392 return addr();
393 }
394
395 address next_instruction_address() const;
396
397 address jump_destination() const;
398 void set_jump_destination(address dest);
399
400 // Creation
401 inline friend NativeJump* nativeJump_at(address address);
402
403 void verify();
404
405 // Unit testing stuff
406 static void test() {}
407
408 // Insertion of native jump instruction
409 static void insert(address code_pos, address entry);
410 // MT-safe insertion of native jump at verified method entry
411 static void check_verified_entry_alignment(address entry, address verified_entry);
412 static void patch_verified_entry(address entry, address verified_entry, address dest);
413
414 static bool is_at(address instr);
415 static NativeJump* from(address instr);
416 };
417
418 inline NativeJump* nativeJump_at(address addr) {
419 return NativeJump::from(addr);
420 }
421
422 // TODO We don't really need NativeGeneralJump, NativeJump should be able to do
423 // everything that General Jump would. Make this only interface to NativeJump
424 // from share code (c1_Runtime)
425 class NativeGeneralJump: public NativeJump {
426 public:
427 enum {
428 instruction_size = arm_insn_sz,
429 };
430
431 static void insert_unconditional(address code_pos, address entry);
432 static void replace_mt_safe(address instr_addr, address code_buffer);
433 static void verify();
434 };
435
436 inline NativeGeneralJump* nativeGeneralJump_at(address address) {
437 NativeGeneralJump* jump = (NativeGeneralJump*)(address);
438 debug_only(jump->verify();)
439 return jump;
440 }
441
442 class NativePopReg : public NativeInstruction {
443 public:
444 // Insert a pop instruction
445 static void insert(address code_pos, Register reg);
446 };
447
448
449 class NativeIllegalInstruction: public NativeInstruction {
450 public:
451 // Insert illegal opcode as specific address
452 static void insert(address code_pos);
453 };
454
455 // return instruction that does not pop values of the stack
456 class NativeReturn: public NativeInstruction {
457 public:
458 };
459
460 // return instruction that does pop values of the stack
461 class NativeReturnX: public NativeInstruction {
462 public:
463 };
464
465 // Simple test vs memory
466 class NativeTstRegMem: public NativeInstruction {
467 public:
468 };
469
470 inline bool NativeInstruction::is_nop() {
471 return (as_uint() & 0x0fffffff) == 0x0320f000;
472 }
473
474 inline bool NativeInstruction::is_jump_or_nop() {
475 return is_nop() || is_jump();
476 }
477
478 class NativeImmCall: public NativeBranchType {
479 public:
480 address destination() const;
481 void set_destination(address dest);
482
483 static bool is_at(address address);
484 static NativeImmCall* from(address address);
485 };
486
487 class NativeImmJump: public NativeBranchType {
488 public:
489
490 address destination() const;
491 void set_destination(address r);
492
493 static bool is_at(address address);
494 static NativeImmJump* from(address address);
495 };
496
497 class NativeRegJump: public NativeBranchType {
498 public:
499
500 Register destination() const;
501 void set_destination(Register r);
502
503 static bool is_at(address address);
504 static NativeRegJump* from(address address);
505 };
506
507 inline bool NativeInstruction::is_call() const { return NativeCall::is_at(addr()); }
508 inline bool NativeInstruction::is_jump() const { return NativeJump::is_at(addr()); }
509 inline bool NativeInstruction::is_mov_const_reg() const { return NativeMovConstReg::is_at(addr()); }
510 inline bool NativeInstruction::is_imm_call() const { return NativeImmCall::is_at(addr()); }
511 inline bool NativeInstruction::is_reg_call() const { return NativeRegCall::is_at(addr()); }
512 inline bool NativeInstruction::is_imm_jump() const { return NativeImmJump::is_at(addr()); }
513 inline bool NativeInstruction::is_reg_jump() const { return NativeRegJump::is_at(addr()); }
514
515 #endif // CPU_AARCH32_VM_NATIVEINST_AARCH32_HPP
--- EOF ---