< prev index next >

src/hotspot/share/opto/escape.cpp

Print this page




1709 
1710   for (EdgeIterator j(jobj); j.has_next(); j.next()) {
1711     if (j.get()->is_Arraycopy()) {
1712       continue;
1713     }
1714 
1715     // Non-escaping object node should point only to field nodes.
1716     FieldNode* field = j.get()->as_Field();
1717     int offset = field->as_Field()->offset();
1718 
1719     // 4. An object is not scalar replaceable if it has a field with unknown
1720     // offset (array's element is accessed in loop).
1721     if (offset == Type::OffsetBot) {
1722       jobj->set_scalar_replaceable(false);
1723       return;
1724     }
1725     // 5. Currently an object is not scalar replaceable if a LoadStore node
1726     // access its field since the field value is unknown after it.
1727     //
1728     Node* n = field->ideal_node();










1729     for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
1730       Node* u = n->fast_out(i);
1731       if (u->is_LoadStore() || (u->is_Mem() && u->as_Mem()->is_mismatched_access())) {
1732         jobj->set_scalar_replaceable(false);
1733         return;
1734       }
1735     }
1736 
1737     // 6. Or the address may point to more then one object. This may produce
1738     // the false positive result (set not scalar replaceable)
1739     // since the flow-insensitive escape analysis can't separate
1740     // the case when stores overwrite the field's value from the case
1741     // when stores happened on different control branches.
1742     //
1743     // Note: it will disable scalar replacement in some cases:
1744     //
1745     //    Point p[] = new Point[1];
1746     //    p[0] = new Point(); // Will be not scalar replaced
1747     //
1748     // but it will save us from incorrect optimizations in next cases:




1709 
1710   for (EdgeIterator j(jobj); j.has_next(); j.next()) {
1711     if (j.get()->is_Arraycopy()) {
1712       continue;
1713     }
1714 
1715     // Non-escaping object node should point only to field nodes.
1716     FieldNode* field = j.get()->as_Field();
1717     int offset = field->as_Field()->offset();
1718 
1719     // 4. An object is not scalar replaceable if it has a field with unknown
1720     // offset (array's element is accessed in loop).
1721     if (offset == Type::OffsetBot) {
1722       jobj->set_scalar_replaceable(false);
1723       return;
1724     }
1725     // 5. Currently an object is not scalar replaceable if a LoadStore node
1726     // access its field since the field value is unknown after it.
1727     //
1728     Node* n = field->ideal_node();
1729 
1730     assert(n->is_AddP(), "expect an address computation");
1731     if (n->in(AddPNode::Base)->is_top() &&
1732         n->in(AddPNode::Address)->Opcode() == Op_CheckCastPP) {
1733       assert(n->in(AddPNode::Address)->bottom_type()->isa_rawptr(), "raw address so raw cast expected");
1734       assert(_igvn->type(n->in(AddPNode::Address)->in(1))->isa_oopptr(), "cast pattern at unsafe access expected");
1735       jobj->set_scalar_replaceable(false);
1736       return;
1737     }
1738 
1739     for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
1740       Node* u = n->fast_out(i);
1741       if (u->is_LoadStore() || (u->is_Mem() && u->as_Mem()->is_mismatched_access())) {
1742         jobj->set_scalar_replaceable(false);
1743         return;
1744       }
1745     }
1746 
1747     // 6. Or the address may point to more then one object. This may produce
1748     // the false positive result (set not scalar replaceable)
1749     // since the flow-insensitive escape analysis can't separate
1750     // the case when stores overwrite the field's value from the case
1751     // when stores happened on different control branches.
1752     //
1753     // Note: it will disable scalar replacement in some cases:
1754     //
1755     //    Point p[] = new Point[1];
1756     //    p[0] = new Point(); // Will be not scalar replaced
1757     //
1758     // but it will save us from incorrect optimizations in next cases:


< prev index next >