447 igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
448 // Treat Return value as LocalVar with GlobalEscape escape state.
449 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
450 n->in(TypeFunc::Parms), delayed_worklist);
451 }
452 break;
453 }
454 case Op_GetAndSetP:
455 case Op_GetAndSetN: {
456 add_objload_to_connection_graph(n, delayed_worklist);
457 // fallthrough
458 }
459 case Op_StoreP:
460 case Op_StoreN:
461 case Op_StorePConditional:
462 case Op_CompareAndSwapP:
463 case Op_CompareAndSwapN: {
464 Node* adr = n->in(MemNode::Address);
465 const Type *adr_type = igvn->type(adr);
466 adr_type = adr_type->make_ptr();
467 if (adr_type->isa_oopptr() ||
468 (opcode == Op_StoreP || opcode == Op_StoreN) &&
469 (adr_type == TypeRawPtr::NOTNULL &&
470 adr->in(AddPNode::Address)->is_Proj() &&
471 adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
472 delayed_worklist->push(n); // Process it later.
473 #ifdef ASSERT
474 assert(adr->is_AddP(), "expecting an AddP");
475 if (adr_type == TypeRawPtr::NOTNULL) {
476 // Verify a raw address for a store captured by Initialize node.
477 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
478 assert(offs != Type::OffsetBot, "offset must be a constant");
479 }
480 #endif
481 } else {
482 // Ignore copy the displaced header to the BoxNode (OSR compilation).
483 if (adr->is_BoxLock())
484 break;
485 // Stored value escapes in unsafe access.
486 if ((opcode == Op_StoreP) && (adr_type == TypeRawPtr::BOTTOM)) {
635 }
636 case Op_Rethrow: // Exception object escapes
637 case Op_Return: {
638 if (n->req() > TypeFunc::Parms &&
639 _igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
640 // Treat Return value as LocalVar with GlobalEscape escape state.
641 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
642 n->in(TypeFunc::Parms), NULL);
643 break;
644 }
645 ELSE_FAIL("Op_Return");
646 }
647 case Op_StoreP:
648 case Op_StoreN:
649 case Op_StorePConditional:
650 case Op_CompareAndSwapP:
651 case Op_CompareAndSwapN:
652 case Op_GetAndSetP:
653 case Op_GetAndSetN: {
654 Node* adr = n->in(MemNode::Address);
655 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN) {
656 const Type* t = _igvn->type(n);
657 if (t->make_ptr() != NULL) {
658 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL);
659 }
660 }
661 const Type *adr_type = _igvn->type(adr);
662 adr_type = adr_type->make_ptr();
663 if (adr_type->isa_oopptr() ||
664 (opcode == Op_StoreP || opcode == Op_StoreN) &&
665 (adr_type == TypeRawPtr::NOTNULL &&
666 adr->in(AddPNode::Address)->is_Proj() &&
667 adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
668 // Point Address to Value
669 PointsToNode* adr_ptn = ptnode_adr(adr->_idx);
670 assert(adr_ptn != NULL &&
671 adr_ptn->as_Field()->is_oop(), "node should be registered");
672 Node *val = n->in(MemNode::ValueIn);
673 PointsToNode* ptn = ptnode_adr(val->_idx);
674 assert(ptn != NULL, "node should be registered");
675 add_edge(adr_ptn, ptn);
676 break;
677 } else if ((opcode == Op_StoreP) && (adr_type == TypeRawPtr::BOTTOM)) {
678 // Stored value escapes in unsafe access.
679 Node *val = n->in(MemNode::ValueIn);
680 PointsToNode* ptn = ptnode_adr(val->_idx);
681 assert(ptn != NULL, "node should be registered");
682 ptn->set_escape_state(PointsToNode::GlobalEscape);
1765 return _pcmp_neq; // This includes nullness check.
1766 }
1767 }
1768 }
1769 if (jobj2 != NULL) {
1770 if (jobj2->escape_state() == PointsToNode::NoEscape) {
1771 Node* obj = jobj2->ideal_node();
1772 // Comparing not escaping allocation.
1773 if ((obj->is_Allocate() || obj->is_CallStaticJava()) &&
1774 !ptn1->points_to(jobj2)) {
1775 return _pcmp_neq; // This includes nullness check.
1776 }
1777 }
1778 }
1779 if (jobj1 != NULL && jobj1 != phantom_obj &&
1780 jobj2 != NULL && jobj2 != phantom_obj &&
1781 jobj1->ideal_node()->is_Con() &&
1782 jobj2->ideal_node()->is_Con()) {
1783 // Klass or String constants compare. Need to be careful with
1784 // compressed pointers - compare types of ConN and ConP instead of nodes.
1785 const Type* t1 = jobj1->ideal_node()->bottom_type()->make_ptr();
1786 const Type* t2 = jobj2->ideal_node()->bottom_type()->make_ptr();
1787 assert(t1 != NULL && t2 != NULL, "sanity");
1788 if (t1->make_ptr() == t2->make_ptr()) {
1789 return _pcmp_eq;
1790 } else {
1791 return _pcmp_neq;
1792 }
1793 }
1794 if (ptn1->meet(ptn2)) {
1795 return NULL; // Sets are not disjoint
1796 }
1797
1798 // Sets are disjoint.
1799 bool set1_has_unknown_ptr = ptn1->points_to(phantom_obj);
1800 bool set2_has_unknown_ptr = ptn2->points_to(phantom_obj);
1801 bool set1_has_null_ptr = ptn1->points_to(null_obj);
1802 bool set2_has_null_ptr = ptn2->points_to(null_obj);
1803 if (set1_has_unknown_ptr && set2_has_null_ptr ||
1804 set2_has_unknown_ptr && set1_has_null_ptr) {
1805 // Check nullness of unknown object.
1806 return NULL;
1807 }
|
447 igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
448 // Treat Return value as LocalVar with GlobalEscape escape state.
449 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
450 n->in(TypeFunc::Parms), delayed_worklist);
451 }
452 break;
453 }
454 case Op_GetAndSetP:
455 case Op_GetAndSetN: {
456 add_objload_to_connection_graph(n, delayed_worklist);
457 // fallthrough
458 }
459 case Op_StoreP:
460 case Op_StoreN:
461 case Op_StorePConditional:
462 case Op_CompareAndSwapP:
463 case Op_CompareAndSwapN: {
464 Node* adr = n->in(MemNode::Address);
465 const Type *adr_type = igvn->type(adr);
466 adr_type = adr_type->make_ptr();
467 if (adr_type == NULL) {
468 break; // skip dead nodes
469 }
470 if (adr_type->isa_oopptr() ||
471 (opcode == Op_StoreP || opcode == Op_StoreN) &&
472 (adr_type == TypeRawPtr::NOTNULL &&
473 adr->in(AddPNode::Address)->is_Proj() &&
474 adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
475 delayed_worklist->push(n); // Process it later.
476 #ifdef ASSERT
477 assert(adr->is_AddP(), "expecting an AddP");
478 if (adr_type == TypeRawPtr::NOTNULL) {
479 // Verify a raw address for a store captured by Initialize node.
480 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
481 assert(offs != Type::OffsetBot, "offset must be a constant");
482 }
483 #endif
484 } else {
485 // Ignore copy the displaced header to the BoxNode (OSR compilation).
486 if (adr->is_BoxLock())
487 break;
488 // Stored value escapes in unsafe access.
489 if ((opcode == Op_StoreP) && (adr_type == TypeRawPtr::BOTTOM)) {
638 }
639 case Op_Rethrow: // Exception object escapes
640 case Op_Return: {
641 if (n->req() > TypeFunc::Parms &&
642 _igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
643 // Treat Return value as LocalVar with GlobalEscape escape state.
644 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
645 n->in(TypeFunc::Parms), NULL);
646 break;
647 }
648 ELSE_FAIL("Op_Return");
649 }
650 case Op_StoreP:
651 case Op_StoreN:
652 case Op_StorePConditional:
653 case Op_CompareAndSwapP:
654 case Op_CompareAndSwapN:
655 case Op_GetAndSetP:
656 case Op_GetAndSetN: {
657 Node* adr = n->in(MemNode::Address);
658 const Type *adr_type = _igvn->type(adr);
659 adr_type = adr_type->make_ptr();
660 #ifdef ASSERT
661 if (adr_type == NULL) {
662 n->dump(1);
663 assert(adr_type != NULL, "dead node should not be on list");
664 break;
665 }
666 #endif
667 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN) {
668 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL);
669 }
670 if (adr_type->isa_oopptr() ||
671 (opcode == Op_StoreP || opcode == Op_StoreN) &&
672 (adr_type == TypeRawPtr::NOTNULL &&
673 adr->in(AddPNode::Address)->is_Proj() &&
674 adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
675 // Point Address to Value
676 PointsToNode* adr_ptn = ptnode_adr(adr->_idx);
677 assert(adr_ptn != NULL &&
678 adr_ptn->as_Field()->is_oop(), "node should be registered");
679 Node *val = n->in(MemNode::ValueIn);
680 PointsToNode* ptn = ptnode_adr(val->_idx);
681 assert(ptn != NULL, "node should be registered");
682 add_edge(adr_ptn, ptn);
683 break;
684 } else if ((opcode == Op_StoreP) && (adr_type == TypeRawPtr::BOTTOM)) {
685 // Stored value escapes in unsafe access.
686 Node *val = n->in(MemNode::ValueIn);
687 PointsToNode* ptn = ptnode_adr(val->_idx);
688 assert(ptn != NULL, "node should be registered");
689 ptn->set_escape_state(PointsToNode::GlobalEscape);
1772 return _pcmp_neq; // This includes nullness check.
1773 }
1774 }
1775 }
1776 if (jobj2 != NULL) {
1777 if (jobj2->escape_state() == PointsToNode::NoEscape) {
1778 Node* obj = jobj2->ideal_node();
1779 // Comparing not escaping allocation.
1780 if ((obj->is_Allocate() || obj->is_CallStaticJava()) &&
1781 !ptn1->points_to(jobj2)) {
1782 return _pcmp_neq; // This includes nullness check.
1783 }
1784 }
1785 }
1786 if (jobj1 != NULL && jobj1 != phantom_obj &&
1787 jobj2 != NULL && jobj2 != phantom_obj &&
1788 jobj1->ideal_node()->is_Con() &&
1789 jobj2->ideal_node()->is_Con()) {
1790 // Klass or String constants compare. Need to be careful with
1791 // compressed pointers - compare types of ConN and ConP instead of nodes.
1792 const Type* t1 = jobj1->ideal_node()->get_ptr_type();
1793 const Type* t2 = jobj2->ideal_node()->get_ptr_type();
1794 if (t1->make_ptr() == t2->make_ptr()) {
1795 return _pcmp_eq;
1796 } else {
1797 return _pcmp_neq;
1798 }
1799 }
1800 if (ptn1->meet(ptn2)) {
1801 return NULL; // Sets are not disjoint
1802 }
1803
1804 // Sets are disjoint.
1805 bool set1_has_unknown_ptr = ptn1->points_to(phantom_obj);
1806 bool set2_has_unknown_ptr = ptn2->points_to(phantom_obj);
1807 bool set1_has_null_ptr = ptn1->points_to(null_obj);
1808 bool set2_has_null_ptr = ptn2->points_to(null_obj);
1809 if (set1_has_unknown_ptr && set2_has_null_ptr ||
1810 set2_has_unknown_ptr && set1_has_null_ptr) {
1811 // Check nullness of unknown object.
1812 return NULL;
1813 }
|