< prev index next >

src/hotspot/cpu/x86/gc/z/z_x86_64.ad

Print this page
rev 54995 : 8224675: Late GC barrier insertion for ZGC
Reviewed-by:

*** 43,53 **** rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ match(Set dst (LoadBarrierSlowReg src)); ! predicate(UseAVX <= 2); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11, --- 43,53 ---- rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ match(Set dst (LoadBarrierSlowReg src)); ! predicate((UseAVX <= 2) && !n->as_LoadBarrierSlowReg()->is_weak()); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11,
*** 72,82 **** rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ match(Set dst (LoadBarrierSlowReg src)); ! predicate(UseAVX == 3); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11, --- 72,82 ---- rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ match(Set dst (LoadBarrierSlowReg src)); ! predicate((UseAVX == 3) && !n->as_LoadBarrierSlowReg()->is_weak()); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11,
*** 100,111 **** rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ ! match(Set dst (LoadBarrierWeakSlowReg src)); ! predicate(UseAVX <= 2); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11, --- 100,111 ---- rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ ! match(Set dst (LoadBarrierSlowReg src)); ! predicate((UseAVX <= 2) && n->as_LoadBarrierSlowReg()->is_weak()); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11,
*** 129,140 **** rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19, rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ ! match(Set dst (LoadBarrierWeakSlowReg src)); ! predicate(UseAVX == 3); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11, --- 129,140 ---- rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19, rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ ! match(Set dst (LoadBarrierSlowReg src)); ! predicate((UseAVX == 3) && n->as_LoadBarrierSlowReg()->is_weak()); effect(DEF dst, KILL cr, KILL x0, KILL x1, KILL x2, KILL x3, KILL x4, KILL x5, KILL x6, KILL x7, KILL x8, KILL x9, KILL x10, KILL x11,
*** 150,154 **** --- 150,209 ---- z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */); %} ins_pipe(pipe_slow); %} + + // Specialized versions of compareAndExchangeP that adds a keepalive that is consumed + // but doesn't affect output. + + instruct z_compareAndExchangeP( + memory mem_ptr, + rax_RegP oldval, rRegP newval, rRegP keepalive, + rFlagsReg cr) %{ + predicate(VM_Version::supports_cx8()); + match(Set oldval (ZCompareAndExchangeP (Binary mem_ptr keepalive) (Binary oldval newval))); + effect(KILL cr); + + format %{ "cmpxchgq $mem_ptr,$newval\t# " + "If rax == $mem_ptr then store $newval into $mem_ptr\n\t" %} + opcode(0x0F, 0xB1); + ins_encode(lock_prefix, + REX_reg_mem_wide(newval, mem_ptr), + OpcP, OpcS, + reg_mem(newval, mem_ptr) // lock cmpxchg + ); + ins_pipe( pipe_cmpxchg ); + %} + + instruct z_compareAndSwapP(rRegI res, + memory mem_ptr, + rax_RegP oldval, rRegP newval, rRegP keepalive, + rFlagsReg cr) %{ + predicate(VM_Version::supports_cx8()); + match(Set res (ZCompareAndSwapP (Binary mem_ptr keepalive) (Binary oldval newval))); + match(Set res (ZWeakCompareAndSwapP (Binary mem_ptr keepalive) (Binary oldval newval))); + effect(KILL cr, KILL oldval); + + format %{ "cmpxchgq $mem_ptr,$newval\t# " + "If rax == $mem_ptr then store $newval into $mem_ptr\n\t" + "sete $res\n\t" + "movzbl $res, $res" %} + opcode(0x0F, 0xB1); + ins_encode(lock_prefix, + REX_reg_mem_wide(newval, mem_ptr), + OpcP, OpcS, + reg_mem(newval, mem_ptr), + REX_breg(res), Opcode(0x0F), Opcode(0x94), reg(res), // sete + REX_reg_breg(res, res), // movzbl + Opcode(0xF), Opcode(0xB6), reg_reg(res, res)); + ins_pipe( pipe_cmpxchg ); + %} + + instruct z_xchgP( memory mem, rRegP newval, rRegP keepalive) %{ + match(Set newval (ZGetAndSetP mem (Binary newval keepalive))); + format %{ "XCHGQ $newval,[$mem]" %} + ins_encode %{ + __ xchgq($newval$$Register, $mem$$Address); + %} + ins_pipe( pipe_cmpxchg ); + %}
< prev index next >