28 #include "gc/shared/c2/barrierSetC2.hpp"
29 #include "libadt/vectset.hpp"
30 #include "memory/allocation.hpp"
31 #include "memory/resourceArea.hpp"
32 #include "opto/c2compiler.hpp"
33 #include "opto/arraycopynode.hpp"
34 #include "opto/callnode.hpp"
35 #include "opto/cfgnode.hpp"
36 #include "opto/compile.hpp"
37 #include "opto/escape.hpp"
38 #include "opto/phaseX.hpp"
39 #include "opto/movenode.hpp"
40 #include "opto/rootnode.hpp"
41 #include "utilities/macros.hpp"
42 #if INCLUDE_G1GC
43 #include "gc/g1/g1ThreadLocalData.hpp"
44 #endif // INCLUDE_G1GC
45 #if INCLUDE_ZGC
46 #include "gc/z/c2/zBarrierSetC2.hpp"
47 #endif
48
49 ConnectionGraph::ConnectionGraph(Compile * C, PhaseIterGVN *igvn) :
50 _nodes(C->comp_arena(), C->unique(), C->unique(), NULL),
51 _in_worklist(C->comp_arena()),
52 _next_pidx(0),
53 _collecting(true),
54 _verify(false),
55 _compile(C),
56 _igvn(igvn),
57 _node_map(C->comp_arena()) {
58 // Add unknown java object.
59 add_java_object(C->top(), PointsToNode::GlobalEscape);
60 phantom_obj = ptnode_adr(C->top()->_idx)->as_JavaObject();
61 // Add ConP(#NULL) and ConN(#NULL) nodes.
62 Node* oop_null = igvn->zerocon(T_OBJECT);
63 assert(oop_null->_idx < nodes_size(), "should be created already");
64 add_java_object(oop_null, PointsToNode::NoEscape);
65 null_obj = ptnode_adr(oop_null->_idx)->as_JavaObject();
66 if (UseCompressedOops) {
67 Node* noop_null = igvn->zerocon(T_NARROWOOP);
495 else if (UseZGC) {
496 if (n->as_Proj()->_con == LoadBarrierNode::Oop && n->in(0)->is_LoadBarrier()) {
497 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(0)->in(LoadBarrierNode::Oop), delayed_worklist);
498 }
499 }
500 #endif
501 break;
502 }
503 case Op_Rethrow: // Exception object escapes
504 case Op_Return: {
505 if (n->req() > TypeFunc::Parms &&
506 igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
507 // Treat Return value as LocalVar with GlobalEscape escape state.
508 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
509 n->in(TypeFunc::Parms), delayed_worklist);
510 }
511 break;
512 }
513 case Op_CompareAndExchangeP:
514 case Op_CompareAndExchangeN:
515 case Op_GetAndSetP:
516 case Op_GetAndSetN: {
517 add_objload_to_connection_graph(n, delayed_worklist);
518 // fallthrough
519 }
520 case Op_StoreP:
521 case Op_StoreN:
522 case Op_StoreNKlass:
523 case Op_StorePConditional:
524 case Op_WeakCompareAndSwapP:
525 case Op_WeakCompareAndSwapN:
526 case Op_CompareAndSwapP:
527 case Op_CompareAndSwapN: {
528 Node* adr = n->in(MemNode::Address);
529 const Type *adr_type = igvn->type(adr);
530 adr_type = adr_type->make_ptr();
531 if (adr_type == NULL) {
532 break; // skip dead nodes
533 }
534 if ( adr_type->isa_oopptr()
535 || ( (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass)
536 && adr_type == TypeRawPtr::NOTNULL
537 && adr->in(AddPNode::Address)->is_Proj()
538 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
539 delayed_worklist->push(n); // Process it later.
540 #ifdef ASSERT
541 assert(adr->is_AddP(), "expecting an AddP");
542 if (adr_type == TypeRawPtr::NOTNULL) {
543 // Verify a raw address for a store captured by Initialize node.
544 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
545 assert(offs != Type::OffsetBot, "offset must be a constant");
546 }
547 #endif
548 } else {
549 // Ignore copy the displaced header to the BoxNode (OSR compilation).
550 if (adr->is_BoxLock())
551 break;
552 // Stored value escapes in unsafe access.
553 if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) {
554 // Pointer stores in G1 barriers looks like unsafe access.
555 // Ignore such stores to be able scalar replace non-escaping
556 // allocations.
557 #if INCLUDE_G1GC
558 if (UseG1GC && adr->is_AddP()) {
559 Node* base = get_addp_base(adr);
560 if (base->Opcode() == Op_LoadP &&
561 base->in(MemNode::Address)->is_AddP()) {
562 adr = base->in(MemNode::Address);
563 Node* tls = get_addp_base(adr);
564 if (tls->Opcode() == Op_ThreadLocal) {
565 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
566 if (offs == in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset())) {
567 break; // G1 pre barrier previous oop value store.
568 }
569 if (offs == in_bytes(G1ThreadLocalData::dirty_card_queue_buffer_offset())) {
570 break; // G1 post barrier card address store.
571 }
572 }
573 }
574 }
575 #endif
576 delayed_worklist->push(n); // Process unsafe access later.
577 break;
578 }
579 #ifdef ASSERT
580 n->dump(1);
581 assert(false, "not unsafe or G1 barrier raw StoreP");
582 #endif
583 }
584 break;
585 }
586 case Op_AryEq:
587 case Op_HasNegatives:
588 case Op_StrComp:
589 case Op_StrEquals:
590 case Op_StrIndexOf:
591 case Op_StrIndexOfChar:
592 case Op_StrInflatedCopy:
593 case Op_StrCompressedCopy:
594 case Op_EncodeISOArray: {
595 add_local_var(n, PointsToNode::ArgEscape);
596 delayed_worklist->push(n); // Process it later.
597 break;
598 }
599 case Op_ThreadLocal: {
600 add_java_object(n, PointsToNode::ArgEscape);
601 break;
602 }
603 default:
604 ; // Do nothing for nodes not related to EA.
605 }
606 return;
607 }
608
609 #ifdef ASSERT
610 #define ELSE_FAIL(name) \
611 /* Should not be called for not pointer type. */ \
612 n->dump(1); \
613 assert(false, name); \
614 break;
615 #else
616 #define ELSE_FAIL(name) \
617 break;
618 #endif
619
620 // Add final simple edges to graph.
621 void ConnectionGraph::add_final_edges(Node *n) {
622 PointsToNode* n_ptn = ptnode_adr(n->_idx);
723 case Op_Return: {
724 if (n->req() > TypeFunc::Parms &&
725 _igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
726 // Treat Return value as LocalVar with GlobalEscape escape state.
727 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
728 n->in(TypeFunc::Parms), NULL);
729 break;
730 }
731 ELSE_FAIL("Op_Return");
732 }
733 case Op_StoreP:
734 case Op_StoreN:
735 case Op_StoreNKlass:
736 case Op_StorePConditional:
737 case Op_CompareAndExchangeP:
738 case Op_CompareAndExchangeN:
739 case Op_CompareAndSwapP:
740 case Op_CompareAndSwapN:
741 case Op_WeakCompareAndSwapP:
742 case Op_WeakCompareAndSwapN:
743 case Op_GetAndSetP:
744 case Op_GetAndSetN: {
745 Node* adr = n->in(MemNode::Address);
746 const Type *adr_type = _igvn->type(adr);
747 adr_type = adr_type->make_ptr();
748 #ifdef ASSERT
749 if (adr_type == NULL) {
750 n->dump(1);
751 assert(adr_type != NULL, "dead node should not be on list");
752 break;
753 }
754 #endif
755 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN ||
756 opcode == Op_CompareAndExchangeN || opcode == Op_CompareAndExchangeP) {
757 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL);
758 }
759 if ( adr_type->isa_oopptr()
760 || ( (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass)
761 && adr_type == TypeRawPtr::NOTNULL
762 && adr->in(AddPNode::Address)->is_Proj()
763 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
764 // Point Address to Value
765 PointsToNode* adr_ptn = ptnode_adr(adr->_idx);
766 assert(adr_ptn != NULL &&
767 adr_ptn->as_Field()->is_oop(), "node should be registered");
768 Node *val = n->in(MemNode::ValueIn);
769 PointsToNode* ptn = ptnode_adr(val->_idx);
770 assert(ptn != NULL, "node should be registered");
771 add_edge(adr_ptn, ptn);
772 break;
773 } else if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) {
774 // Stored value escapes in unsafe access.
775 Node *val = n->in(MemNode::ValueIn);
798 case Op_EncodeISOArray: {
799 // char[]/byte[] arrays passed to string intrinsic do not escape but
800 // they are not scalar replaceable. Adjust escape state for them.
801 // Start from in(2) edge since in(1) is memory edge.
802 for (uint i = 2; i < n->req(); i++) {
803 Node* adr = n->in(i);
804 const Type* at = _igvn->type(adr);
805 if (!adr->is_top() && at->isa_ptr()) {
806 assert(at == Type::TOP || at == TypePtr::NULL_PTR ||
807 at->isa_ptr() != NULL, "expecting a pointer");
808 if (adr->is_AddP()) {
809 adr = get_addp_base(adr);
810 }
811 PointsToNode* ptn = ptnode_adr(adr->_idx);
812 assert(ptn != NULL, "node should be registered");
813 add_edge(n_ptn, ptn);
814 }
815 }
816 break;
817 }
818 default: {
819 // This method should be called only for EA specific nodes which may
820 // miss some edges when they were created.
821 #ifdef ASSERT
822 n->dump(1);
823 #endif
824 guarantee(false, "unknown node");
825 }
826 }
827 return;
828 }
829
830 void ConnectionGraph::add_call_node(CallNode* call) {
831 assert(call->returns_pointer(), "only for call which returns pointer");
832 uint call_idx = call->_idx;
833 if (call->is_Allocate()) {
834 Node* k = call->in(AllocateNode::KlassNode);
835 const TypeKlassPtr* kt = k->bottom_type()->isa_klassptr();
836 assert(kt != NULL, "TypeKlassPtr required.");
837 ciKlass* cik = kt->klass();
2096 BasicType bt = T_INT;
2097 if (offset == Type::OffsetBot) {
2098 // Check only oop fields.
2099 if (!adr_type->isa_aryptr() ||
2100 (adr_type->isa_aryptr()->klass() == NULL) ||
2101 adr_type->isa_aryptr()->klass()->is_obj_array_klass()) {
2102 // OffsetBot is used to reference array's element. Ignore first AddP.
2103 if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2104 bt = T_OBJECT;
2105 }
2106 }
2107 } else if (offset != oopDesc::klass_offset_in_bytes()) {
2108 if (adr_type->isa_instptr()) {
2109 ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
2110 if (field != NULL) {
2111 bt = field->layout_type();
2112 } else {
2113 // Check for unsafe oop field access
2114 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2115 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2116 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2117 bt = T_OBJECT;
2118 (*unsafe) = true;
2119 }
2120 }
2121 } else if (adr_type->isa_aryptr()) {
2122 if (offset == arrayOopDesc::length_offset_in_bytes()) {
2123 // Ignore array length load.
2124 } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2125 // Ignore first AddP.
2126 } else {
2127 const Type* elemtype = adr_type->isa_aryptr()->elem();
2128 bt = elemtype->array_element_basic_type();
2129 }
2130 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2131 // Allocation initialization, ThreadLocal field access, unsafe access
2132 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2133 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2134 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2135 bt = T_OBJECT;
2361 // AddP ( base == top )
2362 //
2363 Node *base = addp->in(AddPNode::Base);
2364 if (base->uncast()->is_top()) { // The AddP case #3 and #6 and #9.
2365 base = addp->in(AddPNode::Address);
2366 while (base->is_AddP()) {
2367 // Case #6 (unsafe access) may have several chained AddP nodes.
2368 assert(base->in(AddPNode::Base)->uncast()->is_top(), "expected unsafe access address only");
2369 base = base->in(AddPNode::Address);
2370 }
2371 if (base->Opcode() == Op_CheckCastPP &&
2372 base->bottom_type()->isa_rawptr() &&
2373 _igvn->type(base->in(1))->isa_oopptr()) {
2374 base = base->in(1); // Case #9
2375 } else {
2376 Node* uncast_base = base->uncast();
2377 int opcode = uncast_base->Opcode();
2378 assert(opcode == Op_ConP || opcode == Op_ThreadLocal ||
2379 opcode == Op_CastX2P || uncast_base->is_DecodeNarrowPtr() ||
2380 (uncast_base->is_Mem() && (uncast_base->bottom_type()->isa_rawptr() != NULL)) ||
2381 (uncast_base->is_Proj() && uncast_base->in(0)->is_Allocate()), "sanity");
2382 }
2383 }
2384 return base;
2385 }
2386
2387 Node* ConnectionGraph::find_second_addp(Node* addp, Node* n) {
2388 assert(addp->is_AddP() && addp->outcnt() > 0, "Don't process dead nodes");
2389 Node* addp2 = addp->raw_out(0);
2390 if (addp->outcnt() == 1 && addp2->is_AddP() &&
2391 addp2->in(AddPNode::Base) == n &&
2392 addp2->in(AddPNode::Address) == addp) {
2393 assert(addp->in(AddPNode::Base) == n, "expecting the same base");
2394 //
2395 // Find array's offset to push it on worklist first and
2396 // as result process an array's element offset first (pushed second)
2397 // to avoid CastPP for the array's offset.
2398 // Otherwise the inserted CastPP (LocalVar) will point to what
2399 // the AddP (Field) points to. Which would be wrong since
2400 // the algorithm expects the CastPP has the same point as
2401 // as AddP's base CheckCastPP (LocalVar).
|
28 #include "gc/shared/c2/barrierSetC2.hpp"
29 #include "libadt/vectset.hpp"
30 #include "memory/allocation.hpp"
31 #include "memory/resourceArea.hpp"
32 #include "opto/c2compiler.hpp"
33 #include "opto/arraycopynode.hpp"
34 #include "opto/callnode.hpp"
35 #include "opto/cfgnode.hpp"
36 #include "opto/compile.hpp"
37 #include "opto/escape.hpp"
38 #include "opto/phaseX.hpp"
39 #include "opto/movenode.hpp"
40 #include "opto/rootnode.hpp"
41 #include "utilities/macros.hpp"
42 #if INCLUDE_G1GC
43 #include "gc/g1/g1ThreadLocalData.hpp"
44 #endif // INCLUDE_G1GC
45 #if INCLUDE_ZGC
46 #include "gc/z/c2/zBarrierSetC2.hpp"
47 #endif
48 #if INCLUDE_SHENANDOAHGC
49 #include "gc/shenandoah/c2/shenandoahBarrierSetC2.hpp"
50 #endif
51
52 ConnectionGraph::ConnectionGraph(Compile * C, PhaseIterGVN *igvn) :
53 _nodes(C->comp_arena(), C->unique(), C->unique(), NULL),
54 _in_worklist(C->comp_arena()),
55 _next_pidx(0),
56 _collecting(true),
57 _verify(false),
58 _compile(C),
59 _igvn(igvn),
60 _node_map(C->comp_arena()) {
61 // Add unknown java object.
62 add_java_object(C->top(), PointsToNode::GlobalEscape);
63 phantom_obj = ptnode_adr(C->top()->_idx)->as_JavaObject();
64 // Add ConP(#NULL) and ConN(#NULL) nodes.
65 Node* oop_null = igvn->zerocon(T_OBJECT);
66 assert(oop_null->_idx < nodes_size(), "should be created already");
67 add_java_object(oop_null, PointsToNode::NoEscape);
68 null_obj = ptnode_adr(oop_null->_idx)->as_JavaObject();
69 if (UseCompressedOops) {
70 Node* noop_null = igvn->zerocon(T_NARROWOOP);
498 else if (UseZGC) {
499 if (n->as_Proj()->_con == LoadBarrierNode::Oop && n->in(0)->is_LoadBarrier()) {
500 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(0)->in(LoadBarrierNode::Oop), delayed_worklist);
501 }
502 }
503 #endif
504 break;
505 }
506 case Op_Rethrow: // Exception object escapes
507 case Op_Return: {
508 if (n->req() > TypeFunc::Parms &&
509 igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
510 // Treat Return value as LocalVar with GlobalEscape escape state.
511 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
512 n->in(TypeFunc::Parms), delayed_worklist);
513 }
514 break;
515 }
516 case Op_CompareAndExchangeP:
517 case Op_CompareAndExchangeN:
518 #if INCLUDE_SHENANDOAHGC
519 case Op_ShenandoahCompareAndExchangeP:
520 case Op_ShenandoahCompareAndExchangeN:
521 #endif
522 case Op_GetAndSetP:
523 case Op_GetAndSetN: {
524 add_objload_to_connection_graph(n, delayed_worklist);
525 // fallthrough
526 }
527 case Op_StoreP:
528 case Op_StoreN:
529 case Op_StoreNKlass:
530 case Op_StorePConditional:
531 #if INCLUDE_SHENANDOAHGC
532 case Op_ShenandoahWeakCompareAndSwapP:
533 case Op_ShenandoahWeakCompareAndSwapN:
534 case Op_ShenandoahCompareAndSwapP:
535 case Op_ShenandoahCompareAndSwapN:
536 #endif
537 case Op_WeakCompareAndSwapP:
538 case Op_WeakCompareAndSwapN:
539 case Op_CompareAndSwapP:
540 case Op_CompareAndSwapN: {
541 Node* adr = n->in(MemNode::Address);
542 const Type *adr_type = igvn->type(adr);
543 adr_type = adr_type->make_ptr();
544 if (adr_type == NULL) {
545 break; // skip dead nodes
546 }
547 if ( adr_type->isa_oopptr()
548 || ( (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass)
549 && adr_type == TypeRawPtr::NOTNULL
550 && adr->in(AddPNode::Address)->is_Proj()
551 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
552 delayed_worklist->push(n); // Process it later.
553 #ifdef ASSERT
554 assert(adr->is_AddP(), "expecting an AddP");
555 if (adr_type == TypeRawPtr::NOTNULL) {
556 // Verify a raw address for a store captured by Initialize node.
557 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
558 assert(offs != Type::OffsetBot, "offset must be a constant");
559 }
560 #endif
561 } else {
562 // Ignore copy the displaced header to the BoxNode (OSR compilation).
563 if (adr->is_BoxLock())
564 break;
565 // Stored value escapes in unsafe access.
566 if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) {
567 // Pointer stores in G1 barriers looks like unsafe access.
568 // Ignore such stores to be able scalar replace non-escaping
569 // allocations.
570 #if INCLUDE_G1GC || INCLUDE_SHENANDOAHGC
571 if ((UseG1GC || UseShenandoahGC) && adr->is_AddP()) {
572 Node* base = get_addp_base(adr);
573 if (base->Opcode() == Op_LoadP &&
574 base->in(MemNode::Address)->is_AddP()) {
575 adr = base->in(MemNode::Address);
576 Node* tls = get_addp_base(adr);
577 if (tls->Opcode() == Op_ThreadLocal) {
578 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
579 #if INCLUDE_G1GC && INCLUDE_SHENANDOAHGC
580 const int buf_offset = in_bytes(UseG1GC ? G1ThreadLocalData::satb_mark_queue_buffer_offset()
581 : ShenandoahThreadLocalData::satb_mark_queue_buffer_offset());
582 #elif INCLUDE_G1GC
583 const int buf_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset());
584 #else
585 const int buf_offset = in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset());
586 #endif
587 if (offs == buf_offset) {
588 break; // G1 pre barrier previous oop value store.
589 }
590 if (offs == in_bytes(G1ThreadLocalData::dirty_card_queue_buffer_offset())) {
591 break; // G1 post barrier card address store.
592 }
593 }
594 }
595 }
596 #endif
597 delayed_worklist->push(n); // Process unsafe access later.
598 break;
599 }
600 #ifdef ASSERT
601 n->dump(1);
602 assert(false, "not unsafe or G1 barrier raw StoreP");
603 #endif
604 }
605 break;
606 }
607 case Op_AryEq:
608 case Op_HasNegatives:
609 case Op_StrComp:
610 case Op_StrEquals:
611 case Op_StrIndexOf:
612 case Op_StrIndexOfChar:
613 case Op_StrInflatedCopy:
614 case Op_StrCompressedCopy:
615 case Op_EncodeISOArray: {
616 add_local_var(n, PointsToNode::ArgEscape);
617 delayed_worklist->push(n); // Process it later.
618 break;
619 }
620 case Op_ThreadLocal: {
621 add_java_object(n, PointsToNode::ArgEscape);
622 break;
623 }
624 #if INCLUDE_SHENANDOAHGC
625 case Op_ShenandoahEnqueueBarrier:
626 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(1), delayed_worklist);
627 break;
628 case Op_ShenandoahLoadReferenceBarrier:
629 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(ShenandoahLoadReferenceBarrierNode::ValueIn), delayed_worklist);
630 #endif
631 default:
632 ; // Do nothing for nodes not related to EA.
633 }
634 return;
635 }
636
637 #ifdef ASSERT
638 #define ELSE_FAIL(name) \
639 /* Should not be called for not pointer type. */ \
640 n->dump(1); \
641 assert(false, name); \
642 break;
643 #else
644 #define ELSE_FAIL(name) \
645 break;
646 #endif
647
648 // Add final simple edges to graph.
649 void ConnectionGraph::add_final_edges(Node *n) {
650 PointsToNode* n_ptn = ptnode_adr(n->_idx);
751 case Op_Return: {
752 if (n->req() > TypeFunc::Parms &&
753 _igvn->type(n->in(TypeFunc::Parms))->isa_oopptr()) {
754 // Treat Return value as LocalVar with GlobalEscape escape state.
755 add_local_var_and_edge(n, PointsToNode::GlobalEscape,
756 n->in(TypeFunc::Parms), NULL);
757 break;
758 }
759 ELSE_FAIL("Op_Return");
760 }
761 case Op_StoreP:
762 case Op_StoreN:
763 case Op_StoreNKlass:
764 case Op_StorePConditional:
765 case Op_CompareAndExchangeP:
766 case Op_CompareAndExchangeN:
767 case Op_CompareAndSwapP:
768 case Op_CompareAndSwapN:
769 case Op_WeakCompareAndSwapP:
770 case Op_WeakCompareAndSwapN:
771 #if INCLUDE_SHENANDOAHGC
772 case Op_ShenandoahCompareAndExchangeP:
773 case Op_ShenandoahCompareAndExchangeN:
774 case Op_ShenandoahCompareAndSwapP:
775 case Op_ShenandoahCompareAndSwapN:
776 case Op_ShenandoahWeakCompareAndSwapP:
777 case Op_ShenandoahWeakCompareAndSwapN:
778 #endif
779 case Op_GetAndSetP:
780 case Op_GetAndSetN: {
781 Node* adr = n->in(MemNode::Address);
782 const Type *adr_type = _igvn->type(adr);
783 adr_type = adr_type->make_ptr();
784 #ifdef ASSERT
785 if (adr_type == NULL) {
786 n->dump(1);
787 assert(adr_type != NULL, "dead node should not be on list");
788 break;
789 }
790 #endif
791 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN ||
792 #if INCLUDE_SHENANDOAHGC
793 opcode == Op_ShenandoahCompareAndExchangeN || opcode == Op_ShenandoahCompareAndExchangeP ||
794 #endif
795 opcode == Op_CompareAndExchangeN || opcode == Op_CompareAndExchangeP) {
796 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL);
797 }
798 if ( adr_type->isa_oopptr()
799 || ( (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass)
800 && adr_type == TypeRawPtr::NOTNULL
801 && adr->in(AddPNode::Address)->is_Proj()
802 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
803 // Point Address to Value
804 PointsToNode* adr_ptn = ptnode_adr(adr->_idx);
805 assert(adr_ptn != NULL &&
806 adr_ptn->as_Field()->is_oop(), "node should be registered");
807 Node *val = n->in(MemNode::ValueIn);
808 PointsToNode* ptn = ptnode_adr(val->_idx);
809 assert(ptn != NULL, "node should be registered");
810 add_edge(adr_ptn, ptn);
811 break;
812 } else if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) {
813 // Stored value escapes in unsafe access.
814 Node *val = n->in(MemNode::ValueIn);
837 case Op_EncodeISOArray: {
838 // char[]/byte[] arrays passed to string intrinsic do not escape but
839 // they are not scalar replaceable. Adjust escape state for them.
840 // Start from in(2) edge since in(1) is memory edge.
841 for (uint i = 2; i < n->req(); i++) {
842 Node* adr = n->in(i);
843 const Type* at = _igvn->type(adr);
844 if (!adr->is_top() && at->isa_ptr()) {
845 assert(at == Type::TOP || at == TypePtr::NULL_PTR ||
846 at->isa_ptr() != NULL, "expecting a pointer");
847 if (adr->is_AddP()) {
848 adr = get_addp_base(adr);
849 }
850 PointsToNode* ptn = ptnode_adr(adr->_idx);
851 assert(ptn != NULL, "node should be registered");
852 add_edge(n_ptn, ptn);
853 }
854 }
855 break;
856 }
857 #if INCLUDE_SHENANDOAHGC
858 case Op_ShenandoahEnqueueBarrier:
859 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(1), NULL);
860 break;
861 case Op_ShenandoahLoadReferenceBarrier:
862 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(ShenandoahLoadReferenceBarrierNode::ValueIn), NULL);
863 break;
864 #endif
865 default: {
866 // This method should be called only for EA specific nodes which may
867 // miss some edges when they were created.
868 #ifdef ASSERT
869 n->dump(1);
870 #endif
871 guarantee(false, "unknown node");
872 }
873 }
874 return;
875 }
876
877 void ConnectionGraph::add_call_node(CallNode* call) {
878 assert(call->returns_pointer(), "only for call which returns pointer");
879 uint call_idx = call->_idx;
880 if (call->is_Allocate()) {
881 Node* k = call->in(AllocateNode::KlassNode);
882 const TypeKlassPtr* kt = k->bottom_type()->isa_klassptr();
883 assert(kt != NULL, "TypeKlassPtr required.");
884 ciKlass* cik = kt->klass();
2143 BasicType bt = T_INT;
2144 if (offset == Type::OffsetBot) {
2145 // Check only oop fields.
2146 if (!adr_type->isa_aryptr() ||
2147 (adr_type->isa_aryptr()->klass() == NULL) ||
2148 adr_type->isa_aryptr()->klass()->is_obj_array_klass()) {
2149 // OffsetBot is used to reference array's element. Ignore first AddP.
2150 if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2151 bt = T_OBJECT;
2152 }
2153 }
2154 } else if (offset != oopDesc::klass_offset_in_bytes()) {
2155 if (adr_type->isa_instptr()) {
2156 ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
2157 if (field != NULL) {
2158 bt = field->layout_type();
2159 } else {
2160 // Check for unsafe oop field access
2161 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2162 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2163 #if INCLUDE_SHENANDOAHGC
2164 n->has_out_with(Op_ShenandoahCompareAndExchangeP) || n->has_out_with(Op_ShenandoahCompareAndExchangeN) ||
2165 n->has_out_with(Op_ShenandoahCompareAndSwapP, Op_ShenandoahCompareAndSwapN, Op_ShenandoahWeakCompareAndSwapP, Op_ShenandoahWeakCompareAndSwapN) ||
2166 #endif
2167 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2168 bt = T_OBJECT;
2169 (*unsafe) = true;
2170 }
2171 }
2172 } else if (adr_type->isa_aryptr()) {
2173 if (offset == arrayOopDesc::length_offset_in_bytes()) {
2174 // Ignore array length load.
2175 } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2176 // Ignore first AddP.
2177 } else {
2178 const Type* elemtype = adr_type->isa_aryptr()->elem();
2179 bt = elemtype->array_element_basic_type();
2180 }
2181 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2182 // Allocation initialization, ThreadLocal field access, unsafe access
2183 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2184 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2185 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2186 bt = T_OBJECT;
2412 // AddP ( base == top )
2413 //
2414 Node *base = addp->in(AddPNode::Base);
2415 if (base->uncast()->is_top()) { // The AddP case #3 and #6 and #9.
2416 base = addp->in(AddPNode::Address);
2417 while (base->is_AddP()) {
2418 // Case #6 (unsafe access) may have several chained AddP nodes.
2419 assert(base->in(AddPNode::Base)->uncast()->is_top(), "expected unsafe access address only");
2420 base = base->in(AddPNode::Address);
2421 }
2422 if (base->Opcode() == Op_CheckCastPP &&
2423 base->bottom_type()->isa_rawptr() &&
2424 _igvn->type(base->in(1))->isa_oopptr()) {
2425 base = base->in(1); // Case #9
2426 } else {
2427 Node* uncast_base = base->uncast();
2428 int opcode = uncast_base->Opcode();
2429 assert(opcode == Op_ConP || opcode == Op_ThreadLocal ||
2430 opcode == Op_CastX2P || uncast_base->is_DecodeNarrowPtr() ||
2431 (uncast_base->is_Mem() && (uncast_base->bottom_type()->isa_rawptr() != NULL)) ||
2432 (uncast_base->is_Proj() && uncast_base->in(0)->is_Allocate()) ||
2433 uncast_base->Opcode() == Op_ShenandoahLoadReferenceBarrier, "sanity");
2434 }
2435 }
2436 return base;
2437 }
2438
2439 Node* ConnectionGraph::find_second_addp(Node* addp, Node* n) {
2440 assert(addp->is_AddP() && addp->outcnt() > 0, "Don't process dead nodes");
2441 Node* addp2 = addp->raw_out(0);
2442 if (addp->outcnt() == 1 && addp2->is_AddP() &&
2443 addp2->in(AddPNode::Base) == n &&
2444 addp2->in(AddPNode::Address) == addp) {
2445 assert(addp->in(AddPNode::Base) == n, "expecting the same base");
2446 //
2447 // Find array's offset to push it on worklist first and
2448 // as result process an array's element offset first (pushed second)
2449 // to avoid CastPP for the array's offset.
2450 // Otherwise the inserted CastPP (LocalVar) will point to what
2451 // the AddP (Field) points to. Which would be wrong since
2452 // the algorithm expects the CastPP has the same point as
2453 // as AddP's base CheckCastPP (LocalVar).
|