22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "c1/c1_Defs.hpp"
28 #include "c1/c1_MacroAssembler.hpp"
29 #include "c1/c1_Runtime1.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "nativeInst_x86.hpp"
32 #include "oops/compiledICHolder.hpp"
33 #include "oops/oop.inline.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "register_x86.hpp"
36 #include "runtime/sharedRuntime.hpp"
37 #include "runtime/signature.hpp"
38 #include "runtime/vframeArray.hpp"
39 #include "utilities/macros.hpp"
40 #include "vmreg_x86.inline.hpp"
41 #if INCLUDE_ALL_GCS
42 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
43 #endif
44
45
46 // Implementation of StubAssembler
47
48 int StubAssembler::call_RT(Register oop_result1, Register metadata_result, address entry, int args_size) {
49 // setup registers
50 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread); // is callee-saved register (Visual C++ calling conventions)
51 assert(!(oop_result1->is_valid() || metadata_result->is_valid()) || oop_result1 != metadata_result, "registers must be different");
52 assert(oop_result1 != thread && metadata_result != thread, "registers must be different");
53 assert(args_size >= 0, "illegal args_size");
54 bool align_stack = false;
55 #ifdef _LP64
56 // At a method handle call, the stack may not be properly aligned
57 // when returning with an exception.
58 align_stack = (stub_id() == Runtime1::handle_exception_from_callee_id);
59 #endif
60
61 #ifdef _LP64
1640
1641 __ bind(return0);
1642 __ fpop();
1643 #ifndef _LP64
1644 __ xorptr(rdx,rdx);
1645 __ xorptr(rax,rax);
1646 #else
1647 __ xorptr(rax, rax);
1648 #endif // _LP64
1649
1650 __ bind(do_return);
1651 __ addptr(rsp, 32);
1652 LP64_ONLY(__ pop(rdx);)
1653 __ pop(rcx);
1654 __ pop(rsi);
1655 __ ret(0);
1656 }
1657 break;
1658
1659 #if INCLUDE_ALL_GCS
1660 case g1_pre_barrier_slow_id:
1661 {
1662 StubFrame f(sasm, "g1_pre_barrier", dont_gc_arguments);
1663 // arg0 : previous value of memory
1664
1665 BarrierSet* bs = Universe::heap()->barrier_set();
1666 if (bs->kind() != BarrierSet::G1SATBCTLogging) {
1667 __ movptr(rax, (int)id);
1668 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), rax);
1669 __ should_not_reach_here();
1670 break;
1671 }
1672 __ push(rax);
1673 __ push(rdx);
1674
1675 const Register pre_val = rax;
1676 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1677 const Register tmp = rdx;
1678
1679 NOT_LP64(__ get_thread(thread);)
1680
1681 Address queue_index(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1682 PtrQueue::byte_offset_of_index()));
1683 Address buffer(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1684 PtrQueue::byte_offset_of_buf()));
1685
1686 Label done;
1724 __ pop(r10);
1725 __ pop(r9);
1726 __ pop(r8);
1727 #endif
1728 __ pop(rcx);
1729 __ bind(done);
1730
1731 __ pop(rdx);
1732 __ pop(rax);
1733 }
1734 break;
1735
1736 case g1_post_barrier_slow_id:
1737 {
1738 StubFrame f(sasm, "g1_post_barrier", dont_gc_arguments);
1739
1740
1741 // arg0: store_address
1742 Address store_addr(rbp, 2*BytesPerWord);
1743
1744 CardTableModRefBS* ct =
1745 barrier_set_cast<CardTableModRefBS>(Universe::heap()->barrier_set());
1746 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
1747
1748 Label done;
1749 Label enqueued;
1750 Label runtime;
1751
1752 // At this point we know new_value is non-NULL and the new_value crosses regions.
1753 // Must check to see if card is already dirty
1754
1755 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1756
1757 Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1758 PtrQueue::byte_offset_of_index()));
1759 Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1760 PtrQueue::byte_offset_of_buf()));
1761
1762 __ push(rax);
1763 __ push(rcx);
1764
1765 const Register cardtable = rax;
|
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "c1/c1_Defs.hpp"
28 #include "c1/c1_MacroAssembler.hpp"
29 #include "c1/c1_Runtime1.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "nativeInst_x86.hpp"
32 #include "oops/compiledICHolder.hpp"
33 #include "oops/oop.inline.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "register_x86.hpp"
36 #include "runtime/sharedRuntime.hpp"
37 #include "runtime/signature.hpp"
38 #include "runtime/vframeArray.hpp"
39 #include "utilities/macros.hpp"
40 #include "vmreg_x86.inline.hpp"
41 #if INCLUDE_ALL_GCS
42 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
43 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
44 #endif
45
46
47 // Implementation of StubAssembler
48
49 int StubAssembler::call_RT(Register oop_result1, Register metadata_result, address entry, int args_size) {
50 // setup registers
51 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread); // is callee-saved register (Visual C++ calling conventions)
52 assert(!(oop_result1->is_valid() || metadata_result->is_valid()) || oop_result1 != metadata_result, "registers must be different");
53 assert(oop_result1 != thread && metadata_result != thread, "registers must be different");
54 assert(args_size >= 0, "illegal args_size");
55 bool align_stack = false;
56 #ifdef _LP64
57 // At a method handle call, the stack may not be properly aligned
58 // when returning with an exception.
59 align_stack = (stub_id() == Runtime1::handle_exception_from_callee_id);
60 #endif
61
62 #ifdef _LP64
1641
1642 __ bind(return0);
1643 __ fpop();
1644 #ifndef _LP64
1645 __ xorptr(rdx,rdx);
1646 __ xorptr(rax,rax);
1647 #else
1648 __ xorptr(rax, rax);
1649 #endif // _LP64
1650
1651 __ bind(do_return);
1652 __ addptr(rsp, 32);
1653 LP64_ONLY(__ pop(rdx);)
1654 __ pop(rcx);
1655 __ pop(rsi);
1656 __ ret(0);
1657 }
1658 break;
1659
1660 #if INCLUDE_ALL_GCS
1661 case shenandoah_write_barrier_slow_id:
1662 {
1663 StubFrame f(sasm, "shenandoah_write_barrier", dont_gc_arguments);
1664
1665 save_live_registers(sasm, 1);
1666 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahBarrierSet::write_barrier_c1), r15_thread, rax);
1667 restore_live_registers_except_rax(sasm);
1668 __ verify_oop(rax);
1669
1670 }
1671 break;
1672 case g1_pre_barrier_slow_id:
1673 {
1674 StubFrame f(sasm, "g1_pre_barrier", dont_gc_arguments);
1675 // arg0 : previous value of memory
1676
1677 BarrierSet* bs = Universe::heap()->barrier_set();
1678 if (bs->kind() != BarrierSet::G1SATBCTLogging && bs->kind() != BarrierSet::ShenandoahBarrierSet) {
1679 __ movptr(rax, (int)id);
1680 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), rax);
1681 __ should_not_reach_here();
1682 break;
1683 }
1684 __ push(rax);
1685 __ push(rdx);
1686
1687 const Register pre_val = rax;
1688 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1689 const Register tmp = rdx;
1690
1691 NOT_LP64(__ get_thread(thread);)
1692
1693 Address queue_index(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1694 PtrQueue::byte_offset_of_index()));
1695 Address buffer(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1696 PtrQueue::byte_offset_of_buf()));
1697
1698 Label done;
1736 __ pop(r10);
1737 __ pop(r9);
1738 __ pop(r8);
1739 #endif
1740 __ pop(rcx);
1741 __ bind(done);
1742
1743 __ pop(rdx);
1744 __ pop(rax);
1745 }
1746 break;
1747
1748 case g1_post_barrier_slow_id:
1749 {
1750 StubFrame f(sasm, "g1_post_barrier", dont_gc_arguments);
1751
1752
1753 // arg0: store_address
1754 Address store_addr(rbp, 2*BytesPerWord);
1755
1756 BarrierSet* bs = Universe::heap()->barrier_set();
1757 if (bs->kind() == BarrierSet::ShenandoahBarrierSet) {
1758 __ movptr(rax, (int)id);
1759 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), rax);
1760 __ should_not_reach_here();
1761 break;
1762 }
1763 CardTableModRefBS* ct =
1764 barrier_set_cast<CardTableModRefBS>(bs);
1765 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
1766
1767 Label done;
1768 Label enqueued;
1769 Label runtime;
1770
1771 // At this point we know new_value is non-NULL and the new_value crosses regions.
1772 // Must check to see if card is already dirty
1773
1774 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1775
1776 Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1777 PtrQueue::byte_offset_of_index()));
1778 Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1779 PtrQueue::byte_offset_of_buf()));
1780
1781 __ push(rax);
1782 __ push(rcx);
1783
1784 const Register cardtable = rax;
|