< prev index next >

src/hotspot/cpu/arm/macroAssembler_arm.cpp

Print this page




1640 
1641 FixedSizeCodeBlock::~FixedSizeCodeBlock() {
1642   if (_enabled) {
1643     address curr_pc = _masm->pc();
1644 
1645     assert(_start < curr_pc, "invalid current pc");
1646     guarantee(curr_pc <= _start + _size_in_instrs * Assembler::InstructionSize, "code block is too long");
1647 
1648     int nops_count = (_start - curr_pc) / Assembler::InstructionSize + _size_in_instrs;
1649     for (int i = 0; i < nops_count; i++) {
1650       _masm->nop();
1651     }
1652   }
1653 }
1654 
1655 #ifdef AARCH64
1656 
1657 // Serializes memory.
1658 // tmp register is not used on AArch64, this parameter is provided solely for better compatibility with 32-bit ARM
1659 void MacroAssembler::membar(Membar_mask_bits order_constraint, Register tmp) {
1660   if (!os::is_MP()) return;
1661 
1662   // TODO-AARCH64 investigate dsb vs dmb effects
1663   if (order_constraint == StoreStore) {
1664     dmb(DMB_st);
1665   } else if ((order_constraint & ~(LoadLoad | LoadStore)) == 0) {
1666     dmb(DMB_ld);
1667   } else {
1668     dmb(DMB_all);
1669   }
1670 }
1671 
1672 #else
1673 
1674 // Serializes memory. Potentially blows flags and reg.
1675 // tmp is a scratch for v6 co-processor write op (could be noreg for other architecure versions)
1676 // preserve_flags takes a longer path in LoadStore case (dmb rather then control dependency) to preserve status flags. Optional.
1677 // load_tgt is an ordered load target in a LoadStore case only, to create dependency between the load operation and conditional branch. Optional.
1678 void MacroAssembler::membar(Membar_mask_bits order_constraint,
1679                             Register tmp,
1680                             bool preserve_flags,
1681                             Register load_tgt) {
1682   if (!os::is_MP()) return;
1683 
1684   if (order_constraint == StoreStore) {
1685     dmb(DMB_st, tmp);
1686   } else if ((order_constraint & StoreLoad)  ||
1687              (order_constraint & LoadLoad)   ||
1688              (order_constraint & StoreStore) ||
1689              (load_tgt == noreg)             ||
1690              preserve_flags) {
1691     dmb(DMB_all, tmp);
1692   } else {
1693     // LoadStore: speculative stores reordeing is prohibited
1694 
1695     // By providing an ordered load target register, we avoid an extra memory load reference
1696     Label not_taken;
1697     bind(not_taken);
1698     cmp(load_tgt, load_tgt);
1699     b(not_taken, ne);
1700   }
1701 }
1702 
1703 #endif // AARCH64




1640 
1641 FixedSizeCodeBlock::~FixedSizeCodeBlock() {
1642   if (_enabled) {
1643     address curr_pc = _masm->pc();
1644 
1645     assert(_start < curr_pc, "invalid current pc");
1646     guarantee(curr_pc <= _start + _size_in_instrs * Assembler::InstructionSize, "code block is too long");
1647 
1648     int nops_count = (_start - curr_pc) / Assembler::InstructionSize + _size_in_instrs;
1649     for (int i = 0; i < nops_count; i++) {
1650       _masm->nop();
1651     }
1652   }
1653 }
1654 
1655 #ifdef AARCH64
1656 
1657 // Serializes memory.
1658 // tmp register is not used on AArch64, this parameter is provided solely for better compatibility with 32-bit ARM
1659 void MacroAssembler::membar(Membar_mask_bits order_constraint, Register tmp) {


1660   // TODO-AARCH64 investigate dsb vs dmb effects
1661   if (order_constraint == StoreStore) {
1662     dmb(DMB_st);
1663   } else if ((order_constraint & ~(LoadLoad | LoadStore)) == 0) {
1664     dmb(DMB_ld);
1665   } else {
1666     dmb(DMB_all);
1667   }
1668 }
1669 
1670 #else
1671 
1672 // Serializes memory. Potentially blows flags and reg.
1673 // tmp is a scratch for v6 co-processor write op (could be noreg for other architecure versions)
1674 // preserve_flags takes a longer path in LoadStore case (dmb rather then control dependency) to preserve status flags. Optional.
1675 // load_tgt is an ordered load target in a LoadStore case only, to create dependency between the load operation and conditional branch. Optional.
1676 void MacroAssembler::membar(Membar_mask_bits order_constraint,
1677                             Register tmp,
1678                             bool preserve_flags,
1679                             Register load_tgt) {


1680   if (order_constraint == StoreStore) {
1681     dmb(DMB_st, tmp);
1682   } else if ((order_constraint & StoreLoad)  ||
1683              (order_constraint & LoadLoad)   ||
1684              (order_constraint & StoreStore) ||
1685              (load_tgt == noreg)             ||
1686              preserve_flags) {
1687     dmb(DMB_all, tmp);
1688   } else {
1689     // LoadStore: speculative stores reordeing is prohibited
1690 
1691     // By providing an ordered load target register, we avoid an extra memory load reference
1692     Label not_taken;
1693     bind(not_taken);
1694     cmp(load_tgt, load_tgt);
1695     b(not_taken, ne);
1696   }
1697 }
1698 
1699 #endif // AARCH64


< prev index next >