30
31 source %{
32
33 static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
34 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, weak);
35 __ ldr(tmp, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
36 __ andr(tmp, tmp, ref);
37 __ cbnz(tmp, *stub->entry());
38 __ bind(*stub->continuation());
39 }
40
41 static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) {
42 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, false /* weak */);
43 __ b(*stub->entry());
44 __ bind(*stub->continuation());
45 }
46
47 %}
48
49 // Load Pointer
50 instruct zLoadP(iRegPNoSp dst, memory mem, rFlagsReg cr)
51 %{
52 match(Set dst (LoadP mem));
53 predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierStrong));
54 effect(TEMP dst, KILL cr);
55
56 ins_cost(4 * INSN_COST);
57
58 format %{ "ldr $dst, $mem" %}
59
60 ins_encode %{
61 const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
62 __ ldr($dst$$Register, ref_addr);
63 if (barrier_data() != ZLoadBarrierElided) {
64 z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, false /* weak */);
65 }
66 %}
67
68 ins_pipe(iload_reg_mem);
69 %}
70
71 // Load Weak Pointer
72 instruct zLoadWeakP(iRegPNoSp dst, memory mem, rFlagsReg cr)
73 %{
74 match(Set dst (LoadP mem));
75 predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierWeak));
76 effect(TEMP dst, KILL cr);
77
78 ins_cost(4 * INSN_COST);
79
80 format %{ "ldr $dst, $mem" %}
81
82 ins_encode %{
83 const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
84 __ ldr($dst$$Register, ref_addr);
85 z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, true /* weak */);
86 %}
87
88 ins_pipe(iload_reg_mem);
89 %}
90
91 // Load Pointer Volatile
92 instruct zLoadPVolatile(iRegPNoSp dst, indirect mem /* sync_memory */, rFlagsReg cr)
|
30
31 source %{
32
33 static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
34 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, weak);
35 __ ldr(tmp, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
36 __ andr(tmp, tmp, ref);
37 __ cbnz(tmp, *stub->entry());
38 __ bind(*stub->continuation());
39 }
40
41 static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) {
42 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, false /* weak */);
43 __ b(*stub->entry());
44 __ bind(*stub->continuation());
45 }
46
47 %}
48
49 // Load Pointer
50 instruct zLoadP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
51 %{
52 match(Set dst (LoadP mem));
53 predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierStrong));
54 effect(TEMP dst, KILL cr);
55
56 ins_cost(4 * INSN_COST);
57
58 format %{ "ldr $dst, $mem" %}
59
60 ins_encode %{
61 const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
62 __ ldr($dst$$Register, ref_addr);
63 if (barrier_data() != ZLoadBarrierElided) {
64 z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, false /* weak */);
65 }
66 %}
67
68 ins_pipe(iload_reg_mem);
69 %}
70
71 // Load Weak Pointer
72 instruct zLoadWeakP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
73 %{
74 match(Set dst (LoadP mem));
75 predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierWeak));
76 effect(TEMP dst, KILL cr);
77
78 ins_cost(4 * INSN_COST);
79
80 format %{ "ldr $dst, $mem" %}
81
82 ins_encode %{
83 const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
84 __ ldr($dst$$Register, ref_addr);
85 z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, true /* weak */);
86 %}
87
88 ins_pipe(iload_reg_mem);
89 %}
90
91 // Load Pointer Volatile
92 instruct zLoadPVolatile(iRegPNoSp dst, indirect mem /* sync_memory */, rFlagsReg cr)
|