1578 if (noop_null->outcnt() == 0)
1579 igvn->hash_delete(noop_null);
1580 }
1581
1582 bool ConnectionGraph::compute_escape() {
1583 Compile* C = _compile;
1584
1585 // 1. Populate Connection Graph (CG) with Ideal nodes.
1586
1587 Unique_Node_List worklist_init;
1588 worklist_init.map(C->unique(), NULL); // preallocate space
1589
1590 // Initialize worklist
1591 if (C->root() != NULL) {
1592 worklist_init.push(C->root());
1593 }
1594
1595 GrowableArray<Node*> alloc_worklist;
1596 GrowableArray<Node*> addp_worklist;
1597 GrowableArray<Node*> ptr_cmp_worklist;
1598 PhaseGVN* igvn = _igvn;
1599
1600 // Push all useful nodes onto CG list and set their type.
1601 for( uint next = 0; next < worklist_init.size(); ++next ) {
1602 Node* n = worklist_init.at(next);
1603 record_for_escape_analysis(n, igvn);
1604 // Only allocations and java static calls results are checked
1605 // for an escape status. See process_call_result() below.
1606 if (n->is_Allocate() || n->is_CallStaticJava() &&
1607 ptnode_adr(n->_idx)->node_type() == PointsToNode::JavaObject) {
1608 alloc_worklist.append(n);
1609 } else if(n->is_AddP()) {
1610 // Collect address nodes. Use them during stage 3 below
1611 // to build initial connection graph field edges.
1612 addp_worklist.append(n);
1613 } else if (n->is_MergeMem()) {
1614 // Collect all MergeMem nodes to add memory slices for
1615 // scalar replaceable objects in split_unique_types().
1616 _mergemem_worklist.append(n->as_MergeMem());
1617 } else if (OptimizePtrCompare && n->is_Cmp() &&
1618 (n->Opcode() == Op_CmpP || n->Opcode() == Op_CmpN)) {
1619 // Compare pointers nodes
1620 ptr_cmp_worklist.append(n);
1621 }
1622 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
1623 Node* m = n->fast_out(i); // Get user
1624 worklist_init.push(m);
1625 }
1626 }
1627
1628 if (alloc_worklist.length() == 0) {
1629 _collecting = false;
1630 return false; // Nothing to do.
1631 }
1632
1633 // 2. First pass to create simple CG edges (doesn't require to walk CG).
1634 uint delayed_size = _delayed_worklist.size();
1635 for( uint next = 0; next < delayed_size; ++next ) {
1636 Node* n = _delayed_worklist.at(next);
1637 build_connection_graph(n, igvn);
1638 }
1639
1640 // 3. Pass to create initial fields edges (JavaObject -F-> AddP)
1707 return false;
1708 }
1709 #undef CG_BUILD_ITER_LIMIT
1710
1711 // 5. Propagate escaped states.
1712 worklist.clear();
1713
1714 // mark all nodes reachable from GlobalEscape nodes
1715 (void)propagate_escape_state(&cg_worklist, &worklist, PointsToNode::GlobalEscape);
1716
1717 // mark all nodes reachable from ArgEscape nodes
1718 bool has_non_escaping_obj = propagate_escape_state(&cg_worklist, &worklist, PointsToNode::ArgEscape);
1719
1720 Arena* arena = Thread::current()->resource_area();
1721 VectorSet visited(arena);
1722
1723 // 6. Find fields initializing values for not escaped allocations
1724 uint alloc_length = alloc_worklist.length();
1725 for (uint next = 0; next < alloc_length; ++next) {
1726 Node* n = alloc_worklist.at(next);
1727 if (ptnode_adr(n->_idx)->escape_state() == PointsToNode::NoEscape) {
1728 has_non_escaping_obj = true;
1729 if (n->is_Allocate()) {
1730 find_init_values(n, &visited, igvn);
1731 }
1732 }
1733 }
1734
1735 uint cg_length = cg_worklist.length();
1736
1737 // Skip the rest of code if all objects escaped.
1738 if (!has_non_escaping_obj) {
1739 cg_length = 0;
1740 addp_length = 0;
1741 }
1742
1743 for (uint next = 0; next < cg_length; ++next) {
1744 int ni = cg_worklist.at(next);
1745 PointsToNode* ptn = ptnode_adr(ni);
1746 PointsToNode::NodeType nt = ptn->node_type();
1747 if (nt == PointsToNode::LocalVar || nt == PointsToNode::Field) {
1748 if (ptn->edge_count() == 0) {
1749 // No values were found. Assume the value was set
1750 // outside this method - add edge to phantom object.
1751 add_pointsto_edge(ni, _phantom_object);
1855 while (ptr_cmp_worklist.length() != 0) {
1856 Node *n = ptr_cmp_worklist.pop();
1857 Node *res = optimize_ptr_compare(n);
1858 if (res != NULL) {
1859 #ifndef PRODUCT
1860 if (PrintOptimizePtrCompare) {
1861 tty->print_cr("++++ Replaced: %d %s(%d,%d) --> %s", n->_idx, (n->Opcode() == Op_CmpP ? "CmpP" : "CmpN"), n->in(1)->_idx, n->in(2)->_idx, (res == _pcmp_eq ? "EQ" : "NotEQ"));
1862 if (Verbose) {
1863 n->dump(1);
1864 }
1865 }
1866 #endif
1867 _igvn->replace_node(n, res);
1868 }
1869 }
1870 // cleanup
1871 if (_pcmp_neq->outcnt() == 0)
1872 igvn->hash_delete(_pcmp_neq);
1873 if (_pcmp_eq->outcnt() == 0)
1874 igvn->hash_delete(_pcmp_eq);
1875 }
1876
1877 #ifndef PRODUCT
1878 if (PrintEscapeAnalysis) {
1879 dump(); // Dump ConnectionGraph
1880 }
1881 #endif
1882
1883 bool has_scalar_replaceable_candidates = false;
1884 alloc_length = alloc_worklist.length();
1885 for (uint next = 0; next < alloc_length; ++next) {
1886 Node* n = alloc_worklist.at(next);
1887 PointsToNode* ptn = ptnode_adr(n->_idx);
1888 assert(ptn->escape_state() == PointsToNode::NoEscape, "sanity");
1889 if (ptn->scalar_replaceable()) {
1890 has_scalar_replaceable_candidates = true;
1891 break;
1892 }
1893 }
1894
|
1578 if (noop_null->outcnt() == 0)
1579 igvn->hash_delete(noop_null);
1580 }
1581
1582 bool ConnectionGraph::compute_escape() {
1583 Compile* C = _compile;
1584
1585 // 1. Populate Connection Graph (CG) with Ideal nodes.
1586
1587 Unique_Node_List worklist_init;
1588 worklist_init.map(C->unique(), NULL); // preallocate space
1589
1590 // Initialize worklist
1591 if (C->root() != NULL) {
1592 worklist_init.push(C->root());
1593 }
1594
1595 GrowableArray<Node*> alloc_worklist;
1596 GrowableArray<Node*> addp_worklist;
1597 GrowableArray<Node*> ptr_cmp_worklist;
1598 GrowableArray<Node*> storestore_worklist;
1599 PhaseGVN* igvn = _igvn;
1600
1601 // Push all useful nodes onto CG list and set their type.
1602 for( uint next = 0; next < worklist_init.size(); ++next ) {
1603 Node* n = worklist_init.at(next);
1604 record_for_escape_analysis(n, igvn);
1605 // Only allocations and java static calls results are checked
1606 // for an escape status. See process_call_result() below.
1607 if (n->is_Allocate() || n->is_CallStaticJava() &&
1608 ptnode_adr(n->_idx)->node_type() == PointsToNode::JavaObject) {
1609 alloc_worklist.append(n);
1610 } else if(n->is_AddP()) {
1611 // Collect address nodes. Use them during stage 3 below
1612 // to build initial connection graph field edges.
1613 addp_worklist.append(n);
1614 } else if (n->is_MergeMem()) {
1615 // Collect all MergeMem nodes to add memory slices for
1616 // scalar replaceable objects in split_unique_types().
1617 _mergemem_worklist.append(n->as_MergeMem());
1618 } else if (OptimizePtrCompare && n->is_Cmp() &&
1619 (n->Opcode() == Op_CmpP || n->Opcode() == Op_CmpN)) {
1620 // Compare pointers nodes
1621 ptr_cmp_worklist.append(n);
1622 } else if (n->is_MemBarStoreStore()) {
1623 // Collect all MemBarStoreStore nodes so that depending on the
1624 // escape status of the associated Allocate node some of them
1625 // may be eliminated.
1626 storestore_worklist.append(n);
1627 }
1628 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
1629 Node* m = n->fast_out(i); // Get user
1630 worklist_init.push(m);
1631 }
1632 }
1633
1634 if (alloc_worklist.length() == 0) {
1635 _collecting = false;
1636 return false; // Nothing to do.
1637 }
1638
1639 // 2. First pass to create simple CG edges (doesn't require to walk CG).
1640 uint delayed_size = _delayed_worklist.size();
1641 for( uint next = 0; next < delayed_size; ++next ) {
1642 Node* n = _delayed_worklist.at(next);
1643 build_connection_graph(n, igvn);
1644 }
1645
1646 // 3. Pass to create initial fields edges (JavaObject -F-> AddP)
1713 return false;
1714 }
1715 #undef CG_BUILD_ITER_LIMIT
1716
1717 // 5. Propagate escaped states.
1718 worklist.clear();
1719
1720 // mark all nodes reachable from GlobalEscape nodes
1721 (void)propagate_escape_state(&cg_worklist, &worklist, PointsToNode::GlobalEscape);
1722
1723 // mark all nodes reachable from ArgEscape nodes
1724 bool has_non_escaping_obj = propagate_escape_state(&cg_worklist, &worklist, PointsToNode::ArgEscape);
1725
1726 Arena* arena = Thread::current()->resource_area();
1727 VectorSet visited(arena);
1728
1729 // 6. Find fields initializing values for not escaped allocations
1730 uint alloc_length = alloc_worklist.length();
1731 for (uint next = 0; next < alloc_length; ++next) {
1732 Node* n = alloc_worklist.at(next);
1733 PointsToNode::EscapeState es = ptnode_adr(n->_idx)->escape_state();
1734 if (es == PointsToNode::NoEscape) {
1735 has_non_escaping_obj = true;
1736 if (n->is_Allocate()) {
1737 find_init_values(n, &visited, igvn);
1738 // The object allocated by this Allocate node will never be
1739 // seen by an other thread. Mark it so that when it is
1740 // expanded no MemBarStoreStore is added.
1741 n->as_Allocate()->initialization()->set_does_not_escape();
1742 }
1743 } else if ((es == PointsToNode::ArgEscape) && n->is_Allocate()) {
1744 // Same as above. Mark this Allocate node so that when it is
1745 // expanded no MemBarStoreStore is added.
1746 n->as_Allocate()->initialization()->set_does_not_escape();
1747 }
1748 }
1749
1750 uint cg_length = cg_worklist.length();
1751
1752 // Skip the rest of code if all objects escaped.
1753 if (!has_non_escaping_obj) {
1754 cg_length = 0;
1755 addp_length = 0;
1756 }
1757
1758 for (uint next = 0; next < cg_length; ++next) {
1759 int ni = cg_worklist.at(next);
1760 PointsToNode* ptn = ptnode_adr(ni);
1761 PointsToNode::NodeType nt = ptn->node_type();
1762 if (nt == PointsToNode::LocalVar || nt == PointsToNode::Field) {
1763 if (ptn->edge_count() == 0) {
1764 // No values were found. Assume the value was set
1765 // outside this method - add edge to phantom object.
1766 add_pointsto_edge(ni, _phantom_object);
1870 while (ptr_cmp_worklist.length() != 0) {
1871 Node *n = ptr_cmp_worklist.pop();
1872 Node *res = optimize_ptr_compare(n);
1873 if (res != NULL) {
1874 #ifndef PRODUCT
1875 if (PrintOptimizePtrCompare) {
1876 tty->print_cr("++++ Replaced: %d %s(%d,%d) --> %s", n->_idx, (n->Opcode() == Op_CmpP ? "CmpP" : "CmpN"), n->in(1)->_idx, n->in(2)->_idx, (res == _pcmp_eq ? "EQ" : "NotEQ"));
1877 if (Verbose) {
1878 n->dump(1);
1879 }
1880 }
1881 #endif
1882 _igvn->replace_node(n, res);
1883 }
1884 }
1885 // cleanup
1886 if (_pcmp_neq->outcnt() == 0)
1887 igvn->hash_delete(_pcmp_neq);
1888 if (_pcmp_eq->outcnt() == 0)
1889 igvn->hash_delete(_pcmp_eq);
1890 }
1891
1892 // For MemBarStoreStore nodes added in library_call.cpp, check
1893 // escape status of associated AllocateNode and optimize out
1894 // MemBarStoreStore node if the allocated object never escapes.
1895 while (storestore_worklist.length() != 0) {
1896 Node *n = storestore_worklist.pop();
1897 MemBarStoreStoreNode *storestore = n ->as_MemBarStoreStore();
1898 Node *alloc = storestore->in(MemBarNode::Precedent)->in(0);
1899 assert (alloc->is_Allocate(), "storestore should point to AllocateNode");
1900 PointsToNode::EscapeState es = ptnode_adr(alloc->_idx)->escape_state();
1901 if (es == PointsToNode::NoEscape || es == PointsToNode::ArgEscape) {
1902 MemBarNode* mb = MemBarNode::make(C, Op_MemBarCPUOrder, Compile::AliasIdxBot);
1903 mb->init_req(TypeFunc::Memory, storestore->in(TypeFunc::Memory));
1904 mb->init_req(TypeFunc::Control, storestore->in(TypeFunc::Control));
1905
1906 _igvn->register_new_node_with_optimizer(mb);
1907 _igvn->replace_node(storestore, mb);
1908 }
1909 }
1910
1911 #ifndef PRODUCT
1912 if (PrintEscapeAnalysis) {
1913 dump(); // Dump ConnectionGraph
1914 }
1915 #endif
1916
1917 bool has_scalar_replaceable_candidates = false;
1918 alloc_length = alloc_worklist.length();
1919 for (uint next = 0; next < alloc_length; ++next) {
1920 Node* n = alloc_worklist.at(next);
1921 PointsToNode* ptn = ptnode_adr(n->_idx);
1922 assert(ptn->escape_state() == PointsToNode::NoEscape, "sanity");
1923 if (ptn->scalar_replaceable()) {
1924 has_scalar_replaceable_candidates = true;
1925 break;
1926 }
1927 }
1928
|