< prev index next >

src/cpu/aarch64/vm/c1_LIRAssembler_aarch64.cpp

Print this page
rev 10528 : 8151775: aarch64: add support for 8.1 LSE atomic operations
Reviewed-by: aph


1539     if (dst != obj) {
1540       __ mov(dst, obj);
1541     }
1542   } else if (code == lir_instanceof) {
1543     Register obj = op->object()->as_register();
1544     Register dst = op->result_opr()->as_register();
1545     Label success, failure, done;
1546     emit_typecheck_helper(op, &success, &failure, &failure);
1547     __ bind(failure);
1548     __ mov(dst, zr);
1549     __ b(done);
1550     __ bind(success);
1551     __ mov(dst, 1);
1552     __ bind(done);
1553   } else {
1554     ShouldNotReachHere();
1555   }
1556 }
1557 
1558 void LIR_Assembler::casw(Register addr, Register newval, Register cmpval) {
1559   if (UseLSE) {
1560     __ mov(rscratch1, cmpval);
1561     __ casal(Assembler::word, rscratch1, newval, addr);
1562     __ cmpw(rscratch1, cmpval);
1563     __ cset(rscratch1, Assembler::NE);
1564   } else {
1565     Label retry_load, nope;
1566     // flush and load exclusive from the memory location
1567     // and fail if it is not what we expect
1568     __ prfm(Address(addr), PSTL1STRM);
1569     __ bind(retry_load);
1570     __ ldaxrw(rscratch1, addr);
1571     __ cmpw(rscratch1, cmpval);
1572     __ cset(rscratch1, Assembler::NE);
1573     __ br(Assembler::NE, nope);
1574     // if we store+flush with no intervening write rscratch1 wil be zero
1575     __ stlxrw(rscratch1, newval, addr);
1576     // retry so we only ever return after a load fails to compare
1577     // ensures we don't return a stale value after a failed write.
1578     __ cbnzw(rscratch1, retry_load);
1579     __ bind(nope);
1580   }
1581   __ membar(__ AnyAny);
1582 }
1583 
1584 void LIR_Assembler::casl(Register addr, Register newval, Register cmpval) {
1585   if (UseLSE) {
1586     __ mov(rscratch1, cmpval);
1587     __ casal(Assembler::xword, rscratch1, newval, addr);
1588     __ cmp(rscratch1, cmpval);
1589     __ cset(rscratch1, Assembler::NE);
1590   } else {
1591     Label retry_load, nope;
1592     // flush and load exclusive from the memory location
1593     // and fail if it is not what we expect
1594     __ prfm(Address(addr), PSTL1STRM);
1595     __ bind(retry_load);
1596     __ ldaxr(rscratch1, addr);
1597     __ cmp(rscratch1, cmpval);
1598     __ cset(rscratch1, Assembler::NE);
1599     __ br(Assembler::NE, nope);
1600     // if we store+flush with no intervening write rscratch1 wil be zero
1601     __ stlxr(rscratch1, newval, addr);
1602     // retry so we only ever return after a load fails to compare
1603     // ensures we don't return a stale value after a failed write.
1604     __ cbnz(rscratch1, retry_load);
1605     __ bind(nope);
1606   }
1607   __ membar(__ AnyAny);
1608 }
1609 
1610 
1611 void LIR_Assembler::emit_compare_and_swap(LIR_OpCompareAndSwap* op) {
1612   assert(VM_Version::supports_cx8(), "wrong machine");
1613   Register addr = as_reg(op->addr());
1614   Register newval = as_reg(op->new_value());
1615   Register cmpval = as_reg(op->cmp_value());
1616   Label succeed, fail, around;
1617 
1618   if (op->code() == lir_cas_obj) {
1619     if (UseCompressedOops) {
1620       Register t1 = op->tmp1()->as_register();
1621       assert(op->tmp1()->is_valid(), "must be");
1622       __ encode_heap_oop(t1, cmpval);
1623       cmpval = t1;
1624       __ encode_heap_oop(rscratch2, newval);
1625       newval = rscratch2;
1626       casw(addr, newval, cmpval);


3104                             new LIR_Op2(lir_cmp, lir_cond_always,
3105                                         LIR_OprFact::intConst(tableswitch_count),
3106                                         reg_opr));
3107         inst->insert_before(start_insn + 1, new LIR_OpLabel(&sw->_branches));
3108         tableswitch_count++;
3109       }
3110       reg = noreg;
3111       last_key = -2147483648;
3112     }
3113   next_state:
3114     ;
3115   }
3116 #endif
3117 }
3118 
3119 void LIR_Assembler::atomic_op(LIR_Code code, LIR_Opr src, LIR_Opr data, LIR_Opr dest, LIR_Opr tmp_op) {
3120   Address addr = as_Address(src->as_address_ptr(), noreg);
3121   BasicType type = src->type();
3122   bool is_oop = type == T_OBJECT || type == T_ARRAY;
3123 
3124   void (MacroAssembler::* lda)(Register Rd, Register Ra);
3125   void (MacroAssembler::* add)(Register Rd, Register Rn, RegisterOrConstant increment);
3126   void (MacroAssembler::* stl)(Register Rs, Register Rt, Register Rn);
3127 
3128   switch(type) {
3129   case T_INT:
3130     lda = &MacroAssembler::ldaxrw;
3131     add = &MacroAssembler::addw;
3132     stl = &MacroAssembler::stlxrw;
3133     break;
3134   case T_LONG:
3135     lda = &MacroAssembler::ldaxr;
3136     add = &MacroAssembler::add;
3137     stl = &MacroAssembler::stlxr;
3138     break;
3139   case T_OBJECT:
3140   case T_ARRAY:
3141     if (UseCompressedOops) {
3142       lda = &MacroAssembler::ldaxrw;
3143       add = &MacroAssembler::addw;
3144       stl = &MacroAssembler::stlxrw;
3145     } else {
3146       lda = &MacroAssembler::ldaxr;
3147       add = &MacroAssembler::add;
3148       stl = &MacroAssembler::stlxr;
3149     }
3150     break;
3151   default:
3152     ShouldNotReachHere();
3153     lda = &MacroAssembler::ldaxr;
3154     add = &MacroAssembler::add;
3155     stl = &MacroAssembler::stlxr;  // unreachable
3156   }
3157 
3158   switch (code) {
3159   case lir_xadd:
3160     {
3161       RegisterOrConstant inc;
3162       Register tmp = as_reg(tmp_op);
3163       Register dst = as_reg(dest);
3164       if (data->is_constant()) {
3165         inc = RegisterOrConstant(as_long(data));
3166         assert_different_registers(dst, addr.base(), tmp,
3167                                    rscratch1, rscratch2);
3168       } else {
3169         inc = RegisterOrConstant(as_reg(data));
3170         assert_different_registers(inc.as_register(), dst, addr.base(), tmp,
3171                                    rscratch1, rscratch2);
3172       }
3173       Label again;
3174       __ lea(tmp, addr);
3175       __ prfm(Address(tmp), PSTL1STRM);
3176       __ bind(again);
3177       (_masm->*lda)(dst, tmp);
3178       (_masm->*add)(rscratch1, dst, inc);
3179       (_masm->*stl)(rscratch2, rscratch1, tmp);
3180       __ cbnzw(rscratch2, again);
3181       break;
3182     }
3183   case lir_xchg:
3184     {
3185       Register tmp = tmp_op->as_register();
3186       Register obj = as_reg(data);
3187       Register dst = as_reg(dest);
3188       if (is_oop && UseCompressedOops) {
3189         __ encode_heap_oop(rscratch1, obj);
3190         obj = rscratch1;
3191       }
3192       assert_different_registers(obj, addr.base(), tmp, rscratch2, dst);
3193       Label again;
3194       __ lea(tmp, addr);
3195       __ prfm(Address(tmp), PSTL1STRM);
3196       __ bind(again);
3197       (_masm->*lda)(dst, tmp);
3198       (_masm->*stl)(rscratch2, obj, tmp);
3199       __ cbnzw(rscratch2, again);
3200       if (is_oop && UseCompressedOops) {
3201         __ decode_heap_oop(dst);
3202       }
3203     }
3204     break;
3205   default:
3206     ShouldNotReachHere();
3207   }
3208   __ membar(__ AnyAny);
3209 }
3210 
3211 #undef __


1539     if (dst != obj) {
1540       __ mov(dst, obj);
1541     }
1542   } else if (code == lir_instanceof) {
1543     Register obj = op->object()->as_register();
1544     Register dst = op->result_opr()->as_register();
1545     Label success, failure, done;
1546     emit_typecheck_helper(op, &success, &failure, &failure);
1547     __ bind(failure);
1548     __ mov(dst, zr);
1549     __ b(done);
1550     __ bind(success);
1551     __ mov(dst, 1);
1552     __ bind(done);
1553   } else {
1554     ShouldNotReachHere();
1555   }
1556 }
1557 
1558 void LIR_Assembler::casw(Register addr, Register newval, Register cmpval) {
1559   __ cmpxchg(addr, cmpval, newval, Assembler::word, /* acquire*/ true, /* release*/ true, rscratch1);



1560   __ cset(rscratch1, Assembler::NE);

















1561   __ membar(__ AnyAny);
1562 }
1563 
1564 void LIR_Assembler::casl(Register addr, Register newval, Register cmpval) {
1565   __ cmpxchg(addr, cmpval, newval, Assembler::xword, /* acquire*/ true, /* release*/ true, rscratch1);



1566   __ cset(rscratch1, Assembler::NE);

















1567   __ membar(__ AnyAny);
1568 }
1569 
1570 
1571 void LIR_Assembler::emit_compare_and_swap(LIR_OpCompareAndSwap* op) {
1572   assert(VM_Version::supports_cx8(), "wrong machine");
1573   Register addr = as_reg(op->addr());
1574   Register newval = as_reg(op->new_value());
1575   Register cmpval = as_reg(op->cmp_value());
1576   Label succeed, fail, around;
1577 
1578   if (op->code() == lir_cas_obj) {
1579     if (UseCompressedOops) {
1580       Register t1 = op->tmp1()->as_register();
1581       assert(op->tmp1()->is_valid(), "must be");
1582       __ encode_heap_oop(t1, cmpval);
1583       cmpval = t1;
1584       __ encode_heap_oop(rscratch2, newval);
1585       newval = rscratch2;
1586       casw(addr, newval, cmpval);


3064                             new LIR_Op2(lir_cmp, lir_cond_always,
3065                                         LIR_OprFact::intConst(tableswitch_count),
3066                                         reg_opr));
3067         inst->insert_before(start_insn + 1, new LIR_OpLabel(&sw->_branches));
3068         tableswitch_count++;
3069       }
3070       reg = noreg;
3071       last_key = -2147483648;
3072     }
3073   next_state:
3074     ;
3075   }
3076 #endif
3077 }
3078 
3079 void LIR_Assembler::atomic_op(LIR_Code code, LIR_Opr src, LIR_Opr data, LIR_Opr dest, LIR_Opr tmp_op) {
3080   Address addr = as_Address(src->as_address_ptr(), noreg);
3081   BasicType type = src->type();
3082   bool is_oop = type == T_OBJECT || type == T_ARRAY;
3083 
3084   void (MacroAssembler::* add)(Register prev, RegisterOrConstant incr, Register addr);
3085   void (MacroAssembler::* xchg)(Register prev, Register newv, Register addr);

3086 
3087   switch(type) {
3088   case T_INT:
3089     xchg = &MacroAssembler::atomic_xchgalw;
3090     add = &MacroAssembler::atomic_addalw;

3091     break;
3092   case T_LONG:
3093     xchg = &MacroAssembler::atomic_xchgal;
3094     add = &MacroAssembler::atomic_addal;

3095     break;
3096   case T_OBJECT:
3097   case T_ARRAY:
3098     if (UseCompressedOops) {
3099       xchg = &MacroAssembler::atomic_xchgalw;
3100       add = &MacroAssembler::atomic_addalw;
3101     } else {
3102       xchg = &MacroAssembler::atomic_xchgal;
3103       add = &MacroAssembler::atomic_addal;


3104     }
3105     break;
3106   default:
3107     ShouldNotReachHere();
3108     xchg = &MacroAssembler::atomic_xchgal;
3109     add = &MacroAssembler::atomic_addal; // unreachable

3110   }
3111 
3112   switch (code) {
3113   case lir_xadd:
3114     {
3115       RegisterOrConstant inc;
3116       Register tmp = as_reg(tmp_op);
3117       Register dst = as_reg(dest);
3118       if (data->is_constant()) {
3119         inc = RegisterOrConstant(as_long(data));
3120         assert_different_registers(dst, addr.base(), tmp,
3121                                    rscratch1, rscratch2);
3122       } else {
3123         inc = RegisterOrConstant(as_reg(data));
3124         assert_different_registers(inc.as_register(), dst, addr.base(), tmp,
3125                                    rscratch1, rscratch2);
3126       }

3127       __ lea(tmp, addr);
3128       (_masm->*add)(dst, inc, tmp);





3129       break;
3130     }
3131   case lir_xchg:
3132     {
3133       Register tmp = tmp_op->as_register();
3134       Register obj = as_reg(data);
3135       Register dst = as_reg(dest);
3136       if (is_oop && UseCompressedOops) {
3137         __ encode_heap_oop(rscratch1, obj);
3138         obj = rscratch1;
3139       }
3140       assert_different_registers(obj, addr.base(), tmp, rscratch2, dst);

3141       __ lea(tmp, addr);
3142       (_masm->*xchg)(dst, obj, tmp);




3143       if (is_oop && UseCompressedOops) {
3144         __ decode_heap_oop(dst);
3145       }
3146     }
3147     break;
3148   default:
3149     ShouldNotReachHere();
3150   }
3151   __ membar(__ AnyAny);
3152 }
3153 
3154 #undef __
< prev index next >