1539 if (dst != obj) {
1540 __ mov(dst, obj);
1541 }
1542 } else if (code == lir_instanceof) {
1543 Register obj = op->object()->as_register();
1544 Register dst = op->result_opr()->as_register();
1545 Label success, failure, done;
1546 emit_typecheck_helper(op, &success, &failure, &failure);
1547 __ bind(failure);
1548 __ mov(dst, zr);
1549 __ b(done);
1550 __ bind(success);
1551 __ mov(dst, 1);
1552 __ bind(done);
1553 } else {
1554 ShouldNotReachHere();
1555 }
1556 }
1557
1558 void LIR_Assembler::casw(Register addr, Register newval, Register cmpval) {
1559 Label retry_load, nope;
1560 // flush and load exclusive from the memory location
1561 // and fail if it is not what we expect
1562 __ bind(retry_load);
1563 __ ldaxrw(rscratch1, addr);
1564 __ cmpw(rscratch1, cmpval);
1565 __ cset(rscratch1, Assembler::NE);
1566 __ br(Assembler::NE, nope);
1567 // if we store+flush with no intervening write rscratch1 wil be zero
1568 __ stlxrw(rscratch1, newval, addr);
1569 // retry so we only ever return after a load fails to compare
1570 // ensures we don't return a stale value after a failed write.
1571 __ cbnzw(rscratch1, retry_load);
1572 __ bind(nope);
1573 __ membar(__ AnyAny);
1574 }
1575
1576 void LIR_Assembler::casl(Register addr, Register newval, Register cmpval) {
1577 Label retry_load, nope;
1578 // flush and load exclusive from the memory location
1579 // and fail if it is not what we expect
1580 __ bind(retry_load);
1581 __ ldaxr(rscratch1, addr);
1582 __ cmp(rscratch1, cmpval);
1583 __ cset(rscratch1, Assembler::NE);
1584 __ br(Assembler::NE, nope);
1585 // if we store+flush with no intervening write rscratch1 wil be zero
1586 __ stlxr(rscratch1, newval, addr);
1587 // retry so we only ever return after a load fails to compare
1588 // ensures we don't return a stale value after a failed write.
1589 __ cbnz(rscratch1, retry_load);
1590 __ bind(nope);
1591 __ membar(__ AnyAny);
1592 }
1593
1594
1595 void LIR_Assembler::emit_compare_and_swap(LIR_OpCompareAndSwap* op) {
1596 assert(VM_Version::supports_cx8(), "wrong machine");
1597 Register addr = as_reg(op->addr());
1598 Register newval = as_reg(op->new_value());
1599 Register cmpval = as_reg(op->cmp_value());
1600 Label succeed, fail, around;
1601
1602 if (op->code() == lir_cas_obj) {
1603 if (UseCompressedOops) {
1604 Register t1 = op->tmp1()->as_register();
1605 assert(op->tmp1()->is_valid(), "must be");
1606 __ encode_heap_oop(t1, cmpval);
1607 cmpval = t1;
1608 __ encode_heap_oop(rscratch2, newval);
1609 newval = rscratch2;
1610 casw(addr, newval, cmpval);
|
1539 if (dst != obj) {
1540 __ mov(dst, obj);
1541 }
1542 } else if (code == lir_instanceof) {
1543 Register obj = op->object()->as_register();
1544 Register dst = op->result_opr()->as_register();
1545 Label success, failure, done;
1546 emit_typecheck_helper(op, &success, &failure, &failure);
1547 __ bind(failure);
1548 __ mov(dst, zr);
1549 __ b(done);
1550 __ bind(success);
1551 __ mov(dst, 1);
1552 __ bind(done);
1553 } else {
1554 ShouldNotReachHere();
1555 }
1556 }
1557
1558 void LIR_Assembler::casw(Register addr, Register newval, Register cmpval) {
1559 if (UseLSE) {
1560 __ mov(rscratch1, cmpval);
1561 __ casalw(cmpval, newval, addr);
1562 __ cmpw(rscratch1, cmpval);
1563 __ cset(rscratch1, Assembler::NE);
1564 } else {
1565 Label retry_load, nope;
1566 // flush and load exclusive from the memory location
1567 // and fail if it is not what we expect
1568 __ bind(retry_load);
1569 __ ldaxrw(rscratch1, addr);
1570 __ cmpw(rscratch1, cmpval);
1571 __ cset(rscratch1, Assembler::NE);
1572 __ br(Assembler::NE, nope);
1573 // if we store+flush with no intervening write rscratch1 wil be zero
1574 __ stlxrw(rscratch1, newval, addr);
1575 // retry so we only ever return after a load fails to compare
1576 // ensures we don't return a stale value after a failed write.
1577 __ cbnzw(rscratch1, retry_load);
1578 __ bind(nope);
1579 }
1580 __ membar(__ AnyAny);
1581 }
1582
1583 void LIR_Assembler::casl(Register addr, Register newval, Register cmpval) {
1584 if (UseLSE) {
1585 __ mov(rscratch1, cmpval);
1586 __ casal(cmpval, newval, addr);
1587 __ cmp(rscratch1, cmpval);
1588 __ cset(rscratch1, Assembler::NE);
1589 } else {
1590 Label retry_load, nope;
1591 // flush and load exclusive from the memory location
1592 // and fail if it is not what we expect
1593 __ bind(retry_load);
1594 __ ldaxr(rscratch1, addr);
1595 __ cmp(rscratch1, cmpval);
1596 __ cset(rscratch1, Assembler::NE);
1597 __ br(Assembler::NE, nope);
1598 // if we store+flush with no intervening write rscratch1 wil be zero
1599 __ stlxr(rscratch1, newval, addr);
1600 // retry so we only ever return after a load fails to compare
1601 // ensures we don't return a stale value after a failed write.
1602 __ cbnz(rscratch1, retry_load);
1603 __ bind(nope);
1604 }
1605 __ membar(__ AnyAny);
1606 }
1607
1608
1609 void LIR_Assembler::emit_compare_and_swap(LIR_OpCompareAndSwap* op) {
1610 assert(VM_Version::supports_cx8(), "wrong machine");
1611 Register addr = as_reg(op->addr());
1612 Register newval = as_reg(op->new_value());
1613 Register cmpval = as_reg(op->cmp_value());
1614 Label succeed, fail, around;
1615
1616 if (op->code() == lir_cas_obj) {
1617 if (UseCompressedOops) {
1618 Register t1 = op->tmp1()->as_register();
1619 assert(op->tmp1()->is_valid(), "must be");
1620 __ encode_heap_oop(t1, cmpval);
1621 cmpval = t1;
1622 __ encode_heap_oop(rscratch2, newval);
1623 newval = rscratch2;
1624 casw(addr, newval, cmpval);
|