243 void translate_ot(Register dst, Register src, uint mask);
244 void translate_to(Register dst, Register src, uint mask);
245 void translate_tt(Register dst, Register src, uint mask);
246
247 // Crypto instructions.
248 // Being interruptible, these instructions need a retry-loop.
249 void cksm(Register crcBuff, Register srcBuff);
250 void km( Register dstBuff, Register srcBuff);
251 void kmc(Register dstBuff, Register srcBuff);
252 void kimd(Register srcBuff);
253 void klmd(Register srcBuff);
254 void kmac(Register srcBuff);
255
256 // nop padding
257 void align(int modulus);
258 void align_address(int modulus);
259
260 //
261 // Constants, loading constants, TOC support
262 //
263 // Safepoint check factored out.
264 void generate_safepoint_check(Label& slow_path, Register scratch = noreg, bool may_relocate = true);
265
266 // Load generic address: d <- base(a) + index(a) + disp(a).
267 inline void load_address(Register d, const Address &a);
268 // Load absolute address (and try to optimize).
269 void load_absolute_address(Register d, address addr);
270
271 // Address of Z_ARG1 and argument_offset.
272 // If temp_reg == arg_slot, arg_slot will be overwritten.
273 Address argument_address(RegisterOrConstant arg_slot,
274 Register temp_reg = noreg,
275 int64_t extra_slot_offset = 0);
276
277 // Load a narrow ptr constant (oop or klass ptr).
278 void load_narrow_oop( Register t, narrowOop a);
279 void load_narrow_klass(Register t, Klass* k);
280
281 static bool is_load_const_32to64(address pos);
282 static bool is_load_narrow_oop(address pos) { return is_load_const_32to64(pos); }
283 static bool is_load_narrow_klass(address pos) { return is_load_const_32to64(pos); }
284
426 Register r2,
427 Assembler::branch_condition cond,
428 Label& branch_target,
429 bool len64,
430 bool has_sign);
431
432 //
433 // Support for frame handling
434 //
435 // Specify the register that should be stored as the return pc in the
436 // current frame (default is R14).
437 inline void save_return_pc(Register pc = Z_R14);
438 inline void restore_return_pc();
439
440 // Get current PC.
441 address get_PC(Register result);
442
443 // Get current PC + offset. Offset given in bytes, must be even!
444 address get_PC(Register result, int64_t offset);
445
446 // Accessing, and in particular modifying, a stack location is only safe if
447 // the stack pointer (Z_SP) is set such that the accessed stack location is
448 // in the reserved range.
449 //
450 // From a performance point of view, it is desirable not to change the SP
451 // first and then immediately use it to access the freshly reserved space.
452 // That opens a small gap, though. If, just after storing some value (the
453 // frame pointer) into the to-be-reserved space, an interrupt is caught,
454 // the handler might use the space beyond Z_SP for it's own purpose.
455 // If that happens, the stored value might get altered.
456
457 // Resize current frame either relatively wrt to current SP or absolute.
458 void resize_frame_sub(Register offset, Register fp, bool load_fp=true);
459 void resize_frame_abs_with_offset(Register newSP, Register fp, int offset, bool load_fp);
460 void resize_frame_absolute(Register addr, Register fp, bool load_fp);
461 void resize_frame(RegisterOrConstant offset, Register fp, bool load_fp=true);
462
463 // Push a frame of size bytes, if copy_sp is false, old_sp must already
464 // contain a copy of Z_SP.
465 void push_frame(Register bytes, Register old_sp, bool copy_sp = true, bool bytes_with_inverted_sign = false);
623 mask_profiling = 0xba // 186 (dec)
624 };
625
626 // Read from the polling page.
627 void load_from_polling_page(Register polling_page_address, int64_t offset = 0);
628
629 // Check if given instruction is a read from the polling page
630 // as emitted by load_from_polling_page.
631 static bool is_load_from_polling_page(address instr_loc);
632 // Extract poll address from instruction and ucontext.
633 static address get_poll_address(address instr_loc, void* ucontext);
634 // Extract poll register from instruction.
635 static uint get_poll_register(address instr_loc);
636
637 // Check if instruction is a write access to the memory serialization page
638 // realized by one of the instructions stw, stwu, stwx, or stwux.
639 static bool is_memory_serialization(int instruction, JavaThread* thread, void* ucontext);
640
641 // Support for serializing memory accesses between threads.
642 void serialize_memory(Register thread, Register tmp1, Register tmp2);
643
644 // Stack overflow checking
645 void bang_stack_with_offset(int offset);
646
647 // Check for reserved stack access in method being exited. If the reserved
648 // stack area was accessed, protect it again and throw StackOverflowError.
649 // Uses Z_R1.
650 void reserved_stack_check(Register return_pc);
651
652 // Atomics
653 // -- none?
654
655 void tlab_allocate(Register obj, // Result: pointer to object after successful allocation
656 Register var_size_in_bytes, // Object size in bytes if unknown at compile time; invalid otherwise.
657 int con_size_in_bytes, // Object size in bytes if known at compile time.
658 Register t1, // temp register
659 Label& slow_case); // Continuation point if fast allocation fails.
660
661 // Emitter for interface method lookup.
662 // input: recv_klass, intf_klass, itable_index
|
243 void translate_ot(Register dst, Register src, uint mask);
244 void translate_to(Register dst, Register src, uint mask);
245 void translate_tt(Register dst, Register src, uint mask);
246
247 // Crypto instructions.
248 // Being interruptible, these instructions need a retry-loop.
249 void cksm(Register crcBuff, Register srcBuff);
250 void km( Register dstBuff, Register srcBuff);
251 void kmc(Register dstBuff, Register srcBuff);
252 void kimd(Register srcBuff);
253 void klmd(Register srcBuff);
254 void kmac(Register srcBuff);
255
256 // nop padding
257 void align(int modulus);
258 void align_address(int modulus);
259
260 //
261 // Constants, loading constants, TOC support
262 //
263
264 // Load generic address: d <- base(a) + index(a) + disp(a).
265 inline void load_address(Register d, const Address &a);
266 // Load absolute address (and try to optimize).
267 void load_absolute_address(Register d, address addr);
268
269 // Address of Z_ARG1 and argument_offset.
270 // If temp_reg == arg_slot, arg_slot will be overwritten.
271 Address argument_address(RegisterOrConstant arg_slot,
272 Register temp_reg = noreg,
273 int64_t extra_slot_offset = 0);
274
275 // Load a narrow ptr constant (oop or klass ptr).
276 void load_narrow_oop( Register t, narrowOop a);
277 void load_narrow_klass(Register t, Klass* k);
278
279 static bool is_load_const_32to64(address pos);
280 static bool is_load_narrow_oop(address pos) { return is_load_const_32to64(pos); }
281 static bool is_load_narrow_klass(address pos) { return is_load_const_32to64(pos); }
282
424 Register r2,
425 Assembler::branch_condition cond,
426 Label& branch_target,
427 bool len64,
428 bool has_sign);
429
430 //
431 // Support for frame handling
432 //
433 // Specify the register that should be stored as the return pc in the
434 // current frame (default is R14).
435 inline void save_return_pc(Register pc = Z_R14);
436 inline void restore_return_pc();
437
438 // Get current PC.
439 address get_PC(Register result);
440
441 // Get current PC + offset. Offset given in bytes, must be even!
442 address get_PC(Register result, int64_t offset);
443
444 // Replace pc of valid code by next pc.
445 void instr_size(Register size, Register pc);
446
447 // Accessing, and in particular modifying, a stack location is only safe if
448 // the stack pointer (Z_SP) is set such that the accessed stack location is
449 // in the reserved range.
450 //
451 // From a performance point of view, it is desirable not to change the SP
452 // first and then immediately use it to access the freshly reserved space.
453 // That opens a small gap, though. If, just after storing some value (the
454 // frame pointer) into the to-be-reserved space, an interrupt is caught,
455 // the handler might use the space beyond Z_SP for it's own purpose.
456 // If that happens, the stored value might get altered.
457
458 // Resize current frame either relatively wrt to current SP or absolute.
459 void resize_frame_sub(Register offset, Register fp, bool load_fp=true);
460 void resize_frame_abs_with_offset(Register newSP, Register fp, int offset, bool load_fp);
461 void resize_frame_absolute(Register addr, Register fp, bool load_fp);
462 void resize_frame(RegisterOrConstant offset, Register fp, bool load_fp=true);
463
464 // Push a frame of size bytes, if copy_sp is false, old_sp must already
465 // contain a copy of Z_SP.
466 void push_frame(Register bytes, Register old_sp, bool copy_sp = true, bool bytes_with_inverted_sign = false);
624 mask_profiling = 0xba // 186 (dec)
625 };
626
627 // Read from the polling page.
628 void load_from_polling_page(Register polling_page_address, int64_t offset = 0);
629
630 // Check if given instruction is a read from the polling page
631 // as emitted by load_from_polling_page.
632 static bool is_load_from_polling_page(address instr_loc);
633 // Extract poll address from instruction and ucontext.
634 static address get_poll_address(address instr_loc, void* ucontext);
635 // Extract poll register from instruction.
636 static uint get_poll_register(address instr_loc);
637
638 // Check if instruction is a write access to the memory serialization page
639 // realized by one of the instructions stw, stwu, stwx, or stwux.
640 static bool is_memory_serialization(int instruction, JavaThread* thread, void* ucontext);
641
642 // Support for serializing memory accesses between threads.
643 void serialize_memory(Register thread, Register tmp1, Register tmp2);
644
645 void safepoint_poll(Label& slow_path, Register temp_reg);
646
647 // Stack overflow checking
648 void bang_stack_with_offset(int offset);
649
650 // Check for reserved stack access in method being exited. If the reserved
651 // stack area was accessed, protect it again and throw StackOverflowError.
652 // Uses Z_R1.
653 void reserved_stack_check(Register return_pc);
654
655 // Atomics
656 // -- none?
657
658 void tlab_allocate(Register obj, // Result: pointer to object after successful allocation
659 Register var_size_in_bytes, // Object size in bytes if unknown at compile time; invalid otherwise.
660 int con_size_in_bytes, // Object size in bytes if known at compile time.
661 Register t1, // temp register
662 Label& slow_case); // Continuation point if fast allocation fails.
663
664 // Emitter for interface method lookup.
665 // input: recv_klass, intf_klass, itable_index
|