< prev index next >

src/cpu/aarch64/vm/aarch64_ad.m4

Print this page
rev 11914 : 8221658: aarch64: add necessary predicate for ubfx patterns
Reviewed-by: aph

*** 179,213 **** // Bitfield extract with shift & mask define(`BFX_INSN', `instruct $3$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI rshift, imm$1_bitmask mask) %{ match(Set dst (And$1 ($2$1 src rshift) mask)); ins_cost(INSN_COST); format %{ "$3 $dst, $src, $mask" %} ins_encode %{ ! int rshift = $rshift$$constant; long mask = $mask$$constant; ! int width = exact_log2(mask+1); __ $3(as_Register($dst$$reg), as_Register($src$$reg), rshift, width); %} ins_pipe(ialu_reg_shift); %}') ! BFX_INSN(I,URShift,ubfxw) ! BFX_INSN(L,URShift,ubfx) // We can use ubfx when extending an And with a mask when we know mask // is positive. We know that because immI_bitmask guarantees it. instruct ubfxIConvI2L(iRegLNoSp dst, iRegIorL2I src, immI rshift, immI_bitmask mask) %{ match(Set dst (ConvI2L (AndI (URShiftI src rshift) mask))); ins_cost(INSN_COST * 2); format %{ "ubfx $dst, $src, $mask" %} ins_encode %{ ! int rshift = $rshift$$constant; long mask = $mask$$constant; int width = exact_log2(mask+1); __ ubfx(as_Register($dst$$reg), as_Register($src$$reg), rshift, width); %} --- 179,217 ---- // Bitfield extract with shift & mask define(`BFX_INSN', `instruct $3$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI rshift, imm$1_bitmask mask) %{ match(Set dst (And$1 ($2$1 src rshift) mask)); + // Make sure we are not going to exceed what $3 can do. + predicate((exact_log2$6(n->in(2)->get_$5() + 1) + (n->in(1)->in(2)->get_int() & $4)) <= ($4 + 1)); ins_cost(INSN_COST); format %{ "$3 $dst, $src, $mask" %} ins_encode %{ ! int rshift = $rshift$$constant & $4; long mask = $mask$$constant; ! int width = exact_log2$6(mask+1); __ $3(as_Register($dst$$reg), as_Register($src$$reg), rshift, width); %} ins_pipe(ialu_reg_shift); %}') ! BFX_INSN(I, URShift, ubfxw, 31, int) ! BFX_INSN(L, URShift, ubfx, 63, long, _long) // We can use ubfx when extending an And with a mask when we know mask // is positive. We know that because immI_bitmask guarantees it. instruct ubfxIConvI2L(iRegLNoSp dst, iRegIorL2I src, immI rshift, immI_bitmask mask) %{ match(Set dst (ConvI2L (AndI (URShiftI src rshift) mask))); + // Make sure we are not going to exceed what ubfxw can do. + predicate((exact_log2(n->in(1)->in(2)->get_int() + 1) + (n->in(1)->in(1)->in(2)->get_int() & 31)) <= (31 + 1)); ins_cost(INSN_COST * 2); format %{ "ubfx $dst, $src, $mask" %} ins_encode %{ ! int rshift = $rshift$$constant & 31; long mask = $mask$$constant; int width = exact_log2(mask+1); __ ubfx(as_Register($dst$$reg), as_Register($src$$reg), rshift, width); %}
< prev index next >