22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "c1/c1_Defs.hpp"
28 #include "c1/c1_MacroAssembler.hpp"
29 #include "c1/c1_Runtime1.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "nativeInst_x86.hpp"
32 #include "oops/compiledICHolder.hpp"
33 #include "oops/oop.inline.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "register_x86.hpp"
36 #include "runtime/sharedRuntime.hpp"
37 #include "runtime/signature.hpp"
38 #include "runtime/vframeArray.hpp"
39 #include "utilities/macros.hpp"
40 #include "vmreg_x86.inline.hpp"
41 #if INCLUDE_ALL_GCS
42 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
43 #endif
44
45
46 // Implementation of StubAssembler
47
48 int StubAssembler::call_RT(Register oop_result1, Register metadata_result, address entry, int args_size) {
49 // setup registers
50 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread); // is callee-saved register (Visual C++ calling conventions)
51 assert(!(oop_result1->is_valid() || metadata_result->is_valid()) || oop_result1 != metadata_result, "registers must be different");
52 assert(oop_result1 != thread && metadata_result != thread, "registers must be different");
53 assert(args_size >= 0, "illegal args_size");
54 bool align_stack = false;
55 #ifdef _LP64
56 // At a method handle call, the stack may not be properly aligned
57 // when returning with an exception.
58 align_stack = (stub_id() == Runtime1::handle_exception_from_callee_id);
59 #endif
60
61 #ifdef _LP64
1640
1641 __ bind(return0);
1642 __ fpop();
1643 #ifndef _LP64
1644 __ xorptr(rdx,rdx);
1645 __ xorptr(rax,rax);
1646 #else
1647 __ xorptr(rax, rax);
1648 #endif // _LP64
1649
1650 __ bind(do_return);
1651 __ addptr(rsp, 32);
1652 LP64_ONLY(__ pop(rdx);)
1653 __ pop(rcx);
1654 __ pop(rsi);
1655 __ ret(0);
1656 }
1657 break;
1658
1659 #if INCLUDE_ALL_GCS
1660 case g1_pre_barrier_slow_id:
1661 {
1662 StubFrame f(sasm, "g1_pre_barrier", dont_gc_arguments);
1663 // arg0 : previous value of memory
1664
1665 BarrierSet* bs = Universe::heap()->barrier_set();
1666 if (bs->kind() != BarrierSet::G1SATBCTLogging) {
1667 __ movptr(rax, (int)id);
1668 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), rax);
1669 __ should_not_reach_here();
1670 break;
1671 }
1672 __ push(rax);
1673 __ push(rdx);
1674
1675 const Register pre_val = rax;
1676 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1677 const Register tmp = rdx;
1678
1679 NOT_LP64(__ get_thread(thread);)
1680
1681 Address queue_index(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1682 PtrQueue::byte_offset_of_index()));
1683 Address buffer(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1684 PtrQueue::byte_offset_of_buf()));
1685
1686 Label done;
1724 __ pop(r10);
1725 __ pop(r9);
1726 __ pop(r8);
1727 #endif
1728 __ pop(rcx);
1729 __ bind(done);
1730
1731 __ pop(rdx);
1732 __ pop(rax);
1733 }
1734 break;
1735
1736 case g1_post_barrier_slow_id:
1737 {
1738 StubFrame f(sasm, "g1_post_barrier", dont_gc_arguments);
1739
1740
1741 // arg0: store_address
1742 Address store_addr(rbp, 2*BytesPerWord);
1743
1744 CardTableModRefBS* ct =
1745 barrier_set_cast<CardTableModRefBS>(Universe::heap()->barrier_set());
1746 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
1747
1748 Label done;
1749 Label enqueued;
1750 Label runtime;
1751
1752 // At this point we know new_value is non-NULL and the new_value crosses regions.
1753 // Must check to see if card is already dirty
1754
1755 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1756
1757 Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1758 PtrQueue::byte_offset_of_index()));
1759 Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1760 PtrQueue::byte_offset_of_buf()));
1761
1762 __ push(rax);
1763 __ push(rcx);
1764
1765 const Register cardtable = rax;
|
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "c1/c1_Defs.hpp"
28 #include "c1/c1_MacroAssembler.hpp"
29 #include "c1/c1_Runtime1.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "nativeInst_x86.hpp"
32 #include "oops/compiledICHolder.hpp"
33 #include "oops/oop.inline.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "register_x86.hpp"
36 #include "runtime/sharedRuntime.hpp"
37 #include "runtime/signature.hpp"
38 #include "runtime/vframeArray.hpp"
39 #include "utilities/macros.hpp"
40 #include "vmreg_x86.inline.hpp"
41 #if INCLUDE_ALL_GCS
42 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
43 #include "gc/shenandoah/shenandoahHeap.hpp"
44 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
45 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
46 #endif
47
48
49 // Implementation of StubAssembler
50
51 int StubAssembler::call_RT(Register oop_result1, Register metadata_result, address entry, int args_size) {
52 // setup registers
53 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread); // is callee-saved register (Visual C++ calling conventions)
54 assert(!(oop_result1->is_valid() || metadata_result->is_valid()) || oop_result1 != metadata_result, "registers must be different");
55 assert(oop_result1 != thread && metadata_result != thread, "registers must be different");
56 assert(args_size >= 0, "illegal args_size");
57 bool align_stack = false;
58 #ifdef _LP64
59 // At a method handle call, the stack may not be properly aligned
60 // when returning with an exception.
61 align_stack = (stub_id() == Runtime1::handle_exception_from_callee_id);
62 #endif
63
64 #ifdef _LP64
1643
1644 __ bind(return0);
1645 __ fpop();
1646 #ifndef _LP64
1647 __ xorptr(rdx,rdx);
1648 __ xorptr(rax,rax);
1649 #else
1650 __ xorptr(rax, rax);
1651 #endif // _LP64
1652
1653 __ bind(do_return);
1654 __ addptr(rsp, 32);
1655 LP64_ONLY(__ pop(rdx);)
1656 __ pop(rcx);
1657 __ pop(rsi);
1658 __ ret(0);
1659 }
1660 break;
1661
1662 #if INCLUDE_ALL_GCS
1663 case shenandoah_write_barrier_slow_id:
1664 {
1665 StubFrame f(sasm, "shenandoah_write_barrier", dont_gc_arguments);
1666
1667 save_live_registers(sasm, 1);
1668 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahBarrierSet::resolve_and_maybe_copy_oop_c1), r15_thread, rax);
1669 restore_live_registers_except_rax(sasm);
1670 __ verify_oop(rax);
1671
1672 }
1673 break;
1674 case g1_pre_barrier_slow_id:
1675 {
1676 StubFrame f(sasm, "g1_pre_barrier", dont_gc_arguments);
1677 // arg0 : previous value of memory
1678
1679 BarrierSet* bs = Universe::heap()->barrier_set();
1680 if (bs->kind() != BarrierSet::G1SATBCTLogging && bs->kind() != BarrierSet::ShenandoahBarrierSet) {
1681 __ movptr(rax, (int)id);
1682 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), rax);
1683 __ should_not_reach_here();
1684 break;
1685 }
1686 __ push(rax);
1687 __ push(rdx);
1688
1689 const Register pre_val = rax;
1690 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1691 const Register tmp = rdx;
1692
1693 NOT_LP64(__ get_thread(thread);)
1694
1695 Address queue_index(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1696 PtrQueue::byte_offset_of_index()));
1697 Address buffer(thread, in_bytes(JavaThread::satb_mark_queue_offset() +
1698 PtrQueue::byte_offset_of_buf()));
1699
1700 Label done;
1738 __ pop(r10);
1739 __ pop(r9);
1740 __ pop(r8);
1741 #endif
1742 __ pop(rcx);
1743 __ bind(done);
1744
1745 __ pop(rdx);
1746 __ pop(rax);
1747 }
1748 break;
1749
1750 case g1_post_barrier_slow_id:
1751 {
1752 StubFrame f(sasm, "g1_post_barrier", dont_gc_arguments);
1753
1754
1755 // arg0: store_address
1756 Address store_addr(rbp, 2*BytesPerWord);
1757
1758 BarrierSet* bs = Universe::heap()->barrier_set();
1759 if (bs->kind() == BarrierSet::ShenandoahBarrierSet) {
1760 __ movptr(rax, (int)id);
1761 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), rax);
1762 __ should_not_reach_here();
1763 break;
1764 }
1765 CardTableModRefBS* ct =
1766 barrier_set_cast<CardTableModRefBS>(bs);
1767 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
1768
1769 Label done;
1770 Label enqueued;
1771 Label runtime;
1772
1773 // At this point we know new_value is non-NULL and the new_value crosses regions.
1774 // Must check to see if card is already dirty
1775
1776 const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
1777
1778 Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1779 PtrQueue::byte_offset_of_index()));
1780 Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
1781 PtrQueue::byte_offset_of_buf()));
1782
1783 __ push(rax);
1784 __ push(rcx);
1785
1786 const Register cardtable = rax;
|