< prev index next >

src/share/vm/opto/escape.cpp

Print this page
rev 8961 : [mq]: diff-shenandoah.patch


  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/bcEscapeAnalyzer.hpp"
  27 #include "compiler/compileLog.hpp"
  28 #include "libadt/vectset.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "opto/c2compiler.hpp"
  31 #include "opto/arraycopynode.hpp"
  32 #include "opto/callnode.hpp"
  33 #include "opto/cfgnode.hpp"
  34 #include "opto/compile.hpp"
  35 #include "opto/escape.hpp"
  36 #include "opto/phaseX.hpp"
  37 #include "opto/movenode.hpp"
  38 #include "opto/rootnode.hpp"

  39 
  40 ConnectionGraph::ConnectionGraph(Compile * C, PhaseIterGVN *igvn) :
  41   _nodes(C->comp_arena(), C->unique(), C->unique(), NULL),
  42   _in_worklist(C->comp_arena()),
  43   _next_pidx(0),
  44   _collecting(true),
  45   _verify(false),
  46   _compile(C),
  47   _igvn(igvn),
  48   _node_map(C->comp_arena()) {
  49   // Add unknown java object.
  50   add_java_object(C->top(), PointsToNode::GlobalEscape);
  51   phantom_obj = ptnode_adr(C->top()->_idx)->as_JavaObject();
  52   // Add ConP(#NULL) and ConN(#NULL) nodes.
  53   Node* oop_null = igvn->zerocon(T_OBJECT);
  54   assert(oop_null->_idx < nodes_size(), "should be created already");
  55   add_java_object(oop_null, PointsToNode::NoEscape);
  56   null_obj = ptnode_adr(oop_null->_idx)->as_JavaObject();
  57   if (UseCompressedOops) {
  58     Node* noop_null = igvn->zerocon(T_NARROWOOP);


 513                          adr->in(AddPNode::Address)->is_Proj() &&
 514                          adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
 515         delayed_worklist->push(n); // Process it later.
 516 #ifdef ASSERT
 517         assert(adr->is_AddP(), "expecting an AddP");
 518         if (adr_type == TypeRawPtr::NOTNULL) {
 519           // Verify a raw address for a store captured by Initialize node.
 520           int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
 521           assert(offs != Type::OffsetBot, "offset must be a constant");
 522         }
 523 #endif
 524       } else {
 525         // Ignore copy the displaced header to the BoxNode (OSR compilation).
 526         if (adr->is_BoxLock())
 527           break;
 528         // Stored value escapes in unsafe access.
 529         if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) {
 530           // Pointer stores in G1 barriers looks like unsafe access.
 531           // Ignore such stores to be able scalar replace non-escaping
 532           // allocations.
 533           if (UseG1GC && adr->is_AddP()) {
 534             Node* base = get_addp_base(adr);
 535             if (base->Opcode() == Op_LoadP &&
 536                 base->in(MemNode::Address)->is_AddP()) {
 537               adr = base->in(MemNode::Address);
 538               Node* tls = get_addp_base(adr);
 539               if (tls->Opcode() == Op_ThreadLocal) {
 540                 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
 541                 if (offs == in_bytes(JavaThread::satb_mark_queue_offset() +
 542                                      PtrQueue::byte_offset_of_buf())) {
 543                   break; // G1 pre barrier previous oop value store.
 544                 }
 545                 if (offs == in_bytes(JavaThread::dirty_card_queue_offset() +
 546                                      PtrQueue::byte_offset_of_buf())) {
 547                   break; // G1 post barrier card address store.
 548                 }
 549               }
 550             }
 551           }
 552           delayed_worklist->push(n); // Process unsafe access later.
 553           break;


 555 #ifdef ASSERT
 556         n->dump(1);
 557         assert(false, "not unsafe or G1 barrier raw StoreP");
 558 #endif
 559       }
 560       break;
 561     }
 562     case Op_AryEq:
 563     case Op_StrComp:
 564     case Op_StrEquals:
 565     case Op_StrIndexOf:
 566     case Op_EncodeISOArray: {
 567       add_local_var(n, PointsToNode::ArgEscape);
 568       delayed_worklist->push(n); // Process it later.
 569       break;
 570     }
 571     case Op_ThreadLocal: {
 572       add_java_object(n, PointsToNode::ArgEscape);
 573       break;
 574     }






 575     default:
 576       ; // Do nothing for nodes not related to EA.
 577   }
 578   return;
 579 }
 580 
 581 #ifdef ASSERT
 582 #define ELSE_FAIL(name)                               \
 583       /* Should not be called for not pointer type. */  \
 584       n->dump(1);                                       \
 585       assert(false, name);                              \
 586       break;
 587 #else
 588 #define ELSE_FAIL(name) \
 589       break;
 590 #endif
 591 
 592 // Add final simple edges to graph.
 593 void ConnectionGraph::add_final_edges(Node *n) {
 594   PointsToNode* n_ptn = ptnode_adr(n->_idx);


 749     case Op_EncodeISOArray: {
 750       // char[] arrays passed to string intrinsic do not escape but
 751       // they are not scalar replaceable. Adjust escape state for them.
 752       // Start from in(2) edge since in(1) is memory edge.
 753       for (uint i = 2; i < n->req(); i++) {
 754         Node* adr = n->in(i);
 755         const Type* at = _igvn->type(adr);
 756         if (!adr->is_top() && at->isa_ptr()) {
 757           assert(at == Type::TOP || at == TypePtr::NULL_PTR ||
 758                  at->isa_ptr() != NULL, "expecting a pointer");
 759           if (adr->is_AddP()) {
 760             adr = get_addp_base(adr);
 761           }
 762           PointsToNode* ptn = ptnode_adr(adr->_idx);
 763           assert(ptn != NULL, "node should be registered");
 764           add_edge(n_ptn, ptn);
 765         }
 766       }
 767       break;
 768     }






 769     default: {
 770       // This method should be called only for EA specific nodes which may
 771       // miss some edges when they were created.
 772 #ifdef ASSERT
 773       n->dump(1);
 774 #endif
 775       guarantee(false, "unknown node");
 776     }
 777   }
 778   return;
 779 }
 780 
 781 void ConnectionGraph::add_call_node(CallNode* call) {
 782   assert(call->returns_pointer(), "only for call which returns pointer");
 783   uint call_idx = call->_idx;
 784   if (call->is_Allocate()) {
 785     Node* k = call->in(AllocateNode::KlassNode);
 786     const TypeKlassPtr* kt = k->bottom_type()->isa_klassptr();
 787     assert(kt != NULL, "TypeKlassPtr  required.");
 788     ciKlass* cik = kt->klass();


 868         const TypeTuple* d = call->tf()->domain();
 869         bool ret_arg = false;
 870         for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
 871           if (d->field_at(i)->isa_ptr() != NULL &&
 872               call_analyzer->is_arg_returned(i - TypeFunc::Parms)) {
 873             ret_arg = true;
 874             break;
 875           }
 876         }
 877         if (ret_arg) {
 878           add_local_var(call, PointsToNode::ArgEscape);
 879         } else {
 880           // Returns unknown object.
 881           map_ideal_node(call, phantom_obj);
 882         }
 883       }
 884     }
 885   } else {
 886     // An other type of call, assume the worst case:
 887     // returned value is unknown and globally escapes.
 888     assert(call->Opcode() == Op_CallDynamicJava, "add failed case check");
 889     map_ideal_node(call, phantom_obj);
 890   }
 891 }
 892 
 893 void ConnectionGraph::process_call_arguments(CallNode *call) {
 894     bool is_arraycopy = false;
 895     switch (call->Opcode()) {
 896 #ifdef ASSERT
 897     case Op_Allocate:
 898     case Op_AllocateArray:
 899     case Op_Lock:
 900     case Op_Unlock:
 901       assert(false, "should be done already");
 902       break;
 903 #endif
 904     case Op_ArrayCopy:
 905     case Op_CallLeafNoFP:
 906       // Most array copies are ArrayCopy nodes at this point but there
 907       // are still a few direct calls to the copy subroutines (See
 908       // PhaseStringOpts::copy_string())


 944                               (aat->isa_oopptr()->klass() == NULL || aat->isa_instptr() ||
 945                                (aat->isa_aryptr() && aat->isa_aryptr()->klass()->is_obj_array_klass()));
 946           if (i == TypeFunc::Parms) {
 947             src_has_oops = arg_has_oops;
 948           }
 949           //
 950           // src or dst could be j.l.Object when other is basic type array:
 951           //
 952           //   arraycopy(char[],0,Object*,0,size);
 953           //   arraycopy(Object*,0,char[],0,size);
 954           //
 955           // Don't add edges in such cases.
 956           //
 957           bool arg_is_arraycopy_dest = src_has_oops && is_arraycopy &&
 958                                        arg_has_oops && (i > TypeFunc::Parms);
 959 #ifdef ASSERT
 960           if (!(is_arraycopy ||
 961                 (call->as_CallLeaf()->_name != NULL &&
 962                  (strcmp(call->as_CallLeaf()->_name, "g1_wb_pre")  == 0 ||
 963                   strcmp(call->as_CallLeaf()->_name, "g1_wb_post") == 0 ||



 964                   strcmp(call->as_CallLeaf()->_name, "updateBytesCRC32") == 0 ||
 965                   strcmp(call->as_CallLeaf()->_name, "updateBytesCRC32C") == 0 ||
 966                   strcmp(call->as_CallLeaf()->_name, "updateBytesAdler32") == 0 ||
 967                   strcmp(call->as_CallLeaf()->_name, "aescrypt_encryptBlock") == 0 ||
 968                   strcmp(call->as_CallLeaf()->_name, "aescrypt_decryptBlock") == 0 ||
 969                   strcmp(call->as_CallLeaf()->_name, "cipherBlockChaining_encryptAESCrypt") == 0 ||
 970                   strcmp(call->as_CallLeaf()->_name, "cipherBlockChaining_decryptAESCrypt") == 0 ||
 971                   strcmp(call->as_CallLeaf()->_name, "ghash_processBlocks") == 0 ||
 972                   strcmp(call->as_CallLeaf()->_name, "sha1_implCompress") == 0 ||
 973                   strcmp(call->as_CallLeaf()->_name, "sha1_implCompressMB") == 0 ||
 974                   strcmp(call->as_CallLeaf()->_name, "sha256_implCompress") == 0 ||
 975                   strcmp(call->as_CallLeaf()->_name, "sha256_implCompressMB") == 0 ||
 976                   strcmp(call->as_CallLeaf()->_name, "sha512_implCompress") == 0 ||
 977                   strcmp(call->as_CallLeaf()->_name, "sha512_implCompressMB") == 0 ||
 978                   strcmp(call->as_CallLeaf()->_name, "multiplyToLen") == 0 ||
 979                   strcmp(call->as_CallLeaf()->_name, "squareToLen") == 0 ||
 980                   strcmp(call->as_CallLeaf()->_name, "mulAdd") == 0 ||
 981                   strcmp(call->as_CallLeaf()->_name, "montgomery_multiply") == 0 ||
 982                   strcmp(call->as_CallLeaf()->_name, "montgomery_square") == 0)
 983                  ))) {


2037       // OffsetBot is used to reference array's element. Ignore first AddP.
2038       if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2039         bt = T_OBJECT;
2040       }
2041     }
2042   } else if (offset != oopDesc::klass_offset_in_bytes()) {
2043     if (adr_type->isa_instptr()) {
2044       ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
2045       if (field != NULL) {
2046         bt = field->layout_type();
2047       } else {
2048         // Check for unsafe oop field access
2049         if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN)) {
2050           bt = T_OBJECT;
2051           (*unsafe) = true;
2052         }
2053       }
2054     } else if (adr_type->isa_aryptr()) {
2055       if (offset == arrayOopDesc::length_offset_in_bytes()) {
2056         // Ignore array length load.



2057       } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2058         // Ignore first AddP.
2059       } else {
2060         const Type* elemtype = adr_type->isa_aryptr()->elem();
2061         bt = elemtype->array_element_basic_type();
2062       }
2063     } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2064       // Allocation initialization, ThreadLocal field access, unsafe access
2065       if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN)) {
2066         bt = T_OBJECT;
2067       }
2068     }
2069   }
2070   return (bt == T_OBJECT || bt == T_NARROWOOP || bt == T_ARRAY);
2071 }
2072 
2073 // Returns unique pointed java object or NULL.
2074 JavaObjectNode* ConnectionGraph::unique_java_object(Node *n) {
2075   assert(!_collecting, "should not call when contructed graph");
2076   // If the node was created after the escape computation we can't answer.


2625 }
2626 
2627 //
2628 // Search memory chain of "mem" to find a MemNode whose address
2629 // is the specified alias index.
2630 //
2631 Node* ConnectionGraph::find_inst_mem(Node *orig_mem, int alias_idx, GrowableArray<PhiNode *>  &orig_phis) {
2632   if (orig_mem == NULL)
2633     return orig_mem;
2634   Compile* C = _compile;
2635   PhaseGVN* igvn = _igvn;
2636   const TypeOopPtr *toop = C->get_adr_type(alias_idx)->isa_oopptr();
2637   bool is_instance = (toop != NULL) && toop->is_known_instance();
2638   Node *start_mem = C->start()->proj_out(TypeFunc::Memory);
2639   Node *prev = NULL;
2640   Node *result = orig_mem;
2641   while (prev != result) {
2642     prev = result;
2643     if (result == start_mem)
2644       break;  // hit one of our sentinels

2645     if (result->is_Mem()) {
2646       const Type *at = igvn->type(result->in(MemNode::Address));
2647       if (at == Type::TOP)
2648         break; // Dead
2649       assert (at->isa_ptr() != NULL, "pointer type required.");
2650       int idx = C->get_alias_index(at->is_ptr());
2651       if (idx == alias_idx)
2652         break; // Found
2653       if (!is_instance && (at->isa_oopptr() == NULL ||
2654                            !at->is_oopptr()->is_known_instance())) {
2655         break; // Do not skip store to general memory slice.
2656       }
2657       result = result->in(MemNode::Memory);
2658     }
2659     if (!is_instance)
2660       continue;  // don't search further for non-instance types
2661     // skip over a call which does not affect this memory slice
2662     if (result->is_Proj() && result->as_Proj()->_con == TypeFunc::Memory) {
2663       Node *proj_in = result->in(0);
2664       if (proj_in->is_Allocate() && proj_in->_idx == (uint)toop->instance_id()) {


2988           }
2989         }
2990       }
2991     } else if (n->is_AddP()) {
2992       JavaObjectNode* jobj = unique_java_object(get_addp_base(n));
2993       if (jobj == NULL || jobj == phantom_obj) {
2994 #ifdef ASSERT
2995         ptnode_adr(get_addp_base(n)->_idx)->dump();
2996         ptnode_adr(n->_idx)->dump();
2997         assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
2998 #endif
2999         _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3000         return;
3001       }
3002       Node *base = get_map(jobj->idx());  // CheckCastPP node
3003       if (!split_AddP(n, base)) continue; // wrong type from dead path
3004     } else if (n->is_Phi() ||
3005                n->is_CheckCastPP() ||
3006                n->is_EncodeP() ||
3007                n->is_DecodeN() ||

3008                (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) {
3009       if (visited.test_set(n->_idx)) {
3010         assert(n->is_Phi(), "loops only through Phi's");
3011         continue;  // already processed
3012       }
3013       JavaObjectNode* jobj = unique_java_object(n);
3014       if (jobj == NULL || jobj == phantom_obj) {
3015 #ifdef ASSERT
3016         ptnode_adr(n->_idx)->dump();
3017         assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
3018 #endif
3019         _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3020         return;
3021       } else {
3022         Node *val = get_map(jobj->idx());   // CheckCastPP node
3023         TypeNode *tn = n->as_Type();
3024         const TypeOopPtr* tinst = igvn->type(val)->isa_oopptr();
3025         assert(tinst != NULL && tinst->is_known_instance() &&
3026                tinst->instance_id() == jobj->idx() , "instance type expected.");
3027 


3058     // push allocation's users on appropriate worklist
3059     for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3060       Node *use = n->fast_out(i);
3061       if(use->is_Mem() && use->in(MemNode::Address) == n) {
3062         // Load/store to instance's field
3063         memnode_worklist.append_if_missing(use);
3064       } else if (use->is_MemBar()) {
3065         if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge
3066           memnode_worklist.append_if_missing(use);
3067         }
3068       } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes
3069         Node* addp2 = find_second_addp(use, n);
3070         if (addp2 != NULL) {
3071           alloc_worklist.append_if_missing(addp2);
3072         }
3073         alloc_worklist.append_if_missing(use);
3074       } else if (use->is_Phi() ||
3075                  use->is_CheckCastPP() ||
3076                  use->is_EncodeNarrowPtr() ||
3077                  use->is_DecodeNarrowPtr() ||

3078                  (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) {
3079         alloc_worklist.append_if_missing(use);
3080 #ifdef ASSERT
3081       } else if (use->is_Mem()) {
3082         assert(use->in(MemNode::Address) != n, "EA: missing allocation reference path");
3083       } else if (use->is_MergeMem()) {
3084         assert(_mergemem_worklist.contains(use->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3085       } else if (use->is_SafePoint()) {
3086         // Look for MergeMem nodes for calls which reference unique allocation
3087         // (through CheckCastPP nodes) even for debug info.
3088         Node* m = use->in(TypeFunc::Memory);
3089         if (m->is_MergeMem()) {
3090           assert(_mergemem_worklist.contains(m->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3091         }
3092       } else if (use->Opcode() == Op_EncodeISOArray) {
3093         if (use->in(MemNode::Memory) == n || use->in(3) == n) {
3094           // EncodeISOArray overwrites destination array
3095           memnode_worklist.append_if_missing(use);
3096         }
3097       } else {
3098         uint op = use->Opcode();
3099         if (!(op == Op_CmpP || op == Op_Conv2B ||
3100               op == Op_CastP2X || op == Op_StoreCM ||
3101               op == Op_FastLock || op == Op_AryEq || op == Op_StrComp ||
3102               op == Op_StrEquals || op == Op_StrIndexOf)) {

3103           n->dump();
3104           use->dump();
3105           assert(false, "EA: missing allocation reference path");
3106         }
3107 #endif
3108       }
3109     }
3110 
3111   }
3112 
3113   // Go over all ArrayCopy nodes and if one of the inputs has a unique
3114   // type, record it in the ArrayCopy node so we know what memory this
3115   // node uses/modified.
3116   for (int next = 0; next < arraycopy_worklist.length(); next++) {
3117     ArrayCopyNode* ac = arraycopy_worklist.at(next);
3118     Node* dest = ac->in(ArrayCopyNode::Dest);
3119     if (dest->is_AddP()) {
3120       dest = get_addp_base(dest);
3121     }
3122     JavaObjectNode* jobj = unique_java_object(dest);




  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/bcEscapeAnalyzer.hpp"
  27 #include "compiler/compileLog.hpp"
  28 #include "libadt/vectset.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "opto/c2compiler.hpp"
  31 #include "opto/arraycopynode.hpp"
  32 #include "opto/callnode.hpp"
  33 #include "opto/cfgnode.hpp"
  34 #include "opto/compile.hpp"
  35 #include "opto/escape.hpp"
  36 #include "opto/phaseX.hpp"
  37 #include "opto/movenode.hpp"
  38 #include "opto/rootnode.hpp"
  39 #include "opto/shenandoahSupport.hpp"
  40 
  41 ConnectionGraph::ConnectionGraph(Compile * C, PhaseIterGVN *igvn) :
  42   _nodes(C->comp_arena(), C->unique(), C->unique(), NULL),
  43   _in_worklist(C->comp_arena()),
  44   _next_pidx(0),
  45   _collecting(true),
  46   _verify(false),
  47   _compile(C),
  48   _igvn(igvn),
  49   _node_map(C->comp_arena()) {
  50   // Add unknown java object.
  51   add_java_object(C->top(), PointsToNode::GlobalEscape);
  52   phantom_obj = ptnode_adr(C->top()->_idx)->as_JavaObject();
  53   // Add ConP(#NULL) and ConN(#NULL) nodes.
  54   Node* oop_null = igvn->zerocon(T_OBJECT);
  55   assert(oop_null->_idx < nodes_size(), "should be created already");
  56   add_java_object(oop_null, PointsToNode::NoEscape);
  57   null_obj = ptnode_adr(oop_null->_idx)->as_JavaObject();
  58   if (UseCompressedOops) {
  59     Node* noop_null = igvn->zerocon(T_NARROWOOP);


 514                          adr->in(AddPNode::Address)->is_Proj() &&
 515                          adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
 516         delayed_worklist->push(n); // Process it later.
 517 #ifdef ASSERT
 518         assert(adr->is_AddP(), "expecting an AddP");
 519         if (adr_type == TypeRawPtr::NOTNULL) {
 520           // Verify a raw address for a store captured by Initialize node.
 521           int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
 522           assert(offs != Type::OffsetBot, "offset must be a constant");
 523         }
 524 #endif
 525       } else {
 526         // Ignore copy the displaced header to the BoxNode (OSR compilation).
 527         if (adr->is_BoxLock())
 528           break;
 529         // Stored value escapes in unsafe access.
 530         if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) {
 531           // Pointer stores in G1 barriers looks like unsafe access.
 532           // Ignore such stores to be able scalar replace non-escaping
 533           // allocations.
 534           if ((UseG1GC || UseShenandoahGC) && adr->is_AddP()) {
 535             Node* base = get_addp_base(adr);
 536             if (base->Opcode() == Op_LoadP &&
 537                 base->in(MemNode::Address)->is_AddP()) {
 538               adr = base->in(MemNode::Address);
 539               Node* tls = get_addp_base(adr);
 540               if (tls->Opcode() == Op_ThreadLocal) {
 541                 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot);
 542                 if (offs == in_bytes(JavaThread::satb_mark_queue_offset() +
 543                                      PtrQueue::byte_offset_of_buf())) {
 544                   break; // G1 pre barrier previous oop value store.
 545                 }
 546                 if (offs == in_bytes(JavaThread::dirty_card_queue_offset() +
 547                                      PtrQueue::byte_offset_of_buf())) {
 548                   break; // G1 post barrier card address store.
 549                 }
 550               }
 551             }
 552           }
 553           delayed_worklist->push(n); // Process unsafe access later.
 554           break;


 556 #ifdef ASSERT
 557         n->dump(1);
 558         assert(false, "not unsafe or G1 barrier raw StoreP");
 559 #endif
 560       }
 561       break;
 562     }
 563     case Op_AryEq:
 564     case Op_StrComp:
 565     case Op_StrEquals:
 566     case Op_StrIndexOf:
 567     case Op_EncodeISOArray: {
 568       add_local_var(n, PointsToNode::ArgEscape);
 569       delayed_worklist->push(n); // Process it later.
 570       break;
 571     }
 572     case Op_ThreadLocal: {
 573       add_java_object(n, PointsToNode::ArgEscape);
 574       break;
 575     }
 576     case Op_ShenandoahReadBarrier:
 577     case Op_ShenandoahWriteBarrier:
 578       // Barriers 'pass through' its arguments. I.e. what goes in, comes out.
 579       // It doesn't escape.
 580       add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(ShenandoahBarrierNode::ValueIn), delayed_worklist);
 581       break;
 582     default:
 583       ; // Do nothing for nodes not related to EA.
 584   }
 585   return;
 586 }
 587 
 588 #ifdef ASSERT
 589 #define ELSE_FAIL(name)                               \
 590       /* Should not be called for not pointer type. */  \
 591       n->dump(1);                                       \
 592       assert(false, name);                              \
 593       break;
 594 #else
 595 #define ELSE_FAIL(name) \
 596       break;
 597 #endif
 598 
 599 // Add final simple edges to graph.
 600 void ConnectionGraph::add_final_edges(Node *n) {
 601   PointsToNode* n_ptn = ptnode_adr(n->_idx);


 756     case Op_EncodeISOArray: {
 757       // char[] arrays passed to string intrinsic do not escape but
 758       // they are not scalar replaceable. Adjust escape state for them.
 759       // Start from in(2) edge since in(1) is memory edge.
 760       for (uint i = 2; i < n->req(); i++) {
 761         Node* adr = n->in(i);
 762         const Type* at = _igvn->type(adr);
 763         if (!adr->is_top() && at->isa_ptr()) {
 764           assert(at == Type::TOP || at == TypePtr::NULL_PTR ||
 765                  at->isa_ptr() != NULL, "expecting a pointer");
 766           if (adr->is_AddP()) {
 767             adr = get_addp_base(adr);
 768           }
 769           PointsToNode* ptn = ptnode_adr(adr->_idx);
 770           assert(ptn != NULL, "node should be registered");
 771           add_edge(n_ptn, ptn);
 772         }
 773       }
 774       break;
 775     }
 776     case Op_ShenandoahReadBarrier:
 777     case Op_ShenandoahWriteBarrier:
 778       // Barriers 'pass through' its arguments. I.e. what goes in, comes out.
 779       // It doesn't escape.
 780       add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(ShenandoahBarrierNode::ValueIn), NULL);
 781       break;
 782     default: {
 783       // This method should be called only for EA specific nodes which may
 784       // miss some edges when they were created.
 785 #ifdef ASSERT
 786       n->dump(1);
 787 #endif
 788       guarantee(false, "unknown node");
 789     }
 790   }
 791   return;
 792 }
 793 
 794 void ConnectionGraph::add_call_node(CallNode* call) {
 795   assert(call->returns_pointer(), "only for call which returns pointer");
 796   uint call_idx = call->_idx;
 797   if (call->is_Allocate()) {
 798     Node* k = call->in(AllocateNode::KlassNode);
 799     const TypeKlassPtr* kt = k->bottom_type()->isa_klassptr();
 800     assert(kt != NULL, "TypeKlassPtr  required.");
 801     ciKlass* cik = kt->klass();


 881         const TypeTuple* d = call->tf()->domain();
 882         bool ret_arg = false;
 883         for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
 884           if (d->field_at(i)->isa_ptr() != NULL &&
 885               call_analyzer->is_arg_returned(i - TypeFunc::Parms)) {
 886             ret_arg = true;
 887             break;
 888           }
 889         }
 890         if (ret_arg) {
 891           add_local_var(call, PointsToNode::ArgEscape);
 892         } else {
 893           // Returns unknown object.
 894           map_ideal_node(call, phantom_obj);
 895         }
 896       }
 897     }
 898   } else {
 899     // An other type of call, assume the worst case:
 900     // returned value is unknown and globally escapes.
 901     assert(call->Opcode() == Op_CallDynamicJava || call->Opcode() == Op_CallLeaf, "add failed case check");
 902     map_ideal_node(call, phantom_obj);
 903   }
 904 }
 905 
 906 void ConnectionGraph::process_call_arguments(CallNode *call) {
 907     bool is_arraycopy = false;
 908     switch (call->Opcode()) {
 909 #ifdef ASSERT
 910     case Op_Allocate:
 911     case Op_AllocateArray:
 912     case Op_Lock:
 913     case Op_Unlock:
 914       assert(false, "should be done already");
 915       break;
 916 #endif
 917     case Op_ArrayCopy:
 918     case Op_CallLeafNoFP:
 919       // Most array copies are ArrayCopy nodes at this point but there
 920       // are still a few direct calls to the copy subroutines (See
 921       // PhaseStringOpts::copy_string())


 957                               (aat->isa_oopptr()->klass() == NULL || aat->isa_instptr() ||
 958                                (aat->isa_aryptr() && aat->isa_aryptr()->klass()->is_obj_array_klass()));
 959           if (i == TypeFunc::Parms) {
 960             src_has_oops = arg_has_oops;
 961           }
 962           //
 963           // src or dst could be j.l.Object when other is basic type array:
 964           //
 965           //   arraycopy(char[],0,Object*,0,size);
 966           //   arraycopy(Object*,0,char[],0,size);
 967           //
 968           // Don't add edges in such cases.
 969           //
 970           bool arg_is_arraycopy_dest = src_has_oops && is_arraycopy &&
 971                                        arg_has_oops && (i > TypeFunc::Parms);
 972 #ifdef ASSERT
 973           if (!(is_arraycopy ||
 974                 (call->as_CallLeaf()->_name != NULL &&
 975                  (strcmp(call->as_CallLeaf()->_name, "g1_wb_pre")  == 0 ||
 976                   strcmp(call->as_CallLeaf()->_name, "g1_wb_post") == 0 ||
 977                   strcmp(call->as_CallLeaf()->_name, "shenandoah_clone_barrier")  == 0 ||
 978                   strcmp(call->as_CallLeaf()->_name, "shenandoah_read_barrier")  == 0 ||
 979                   strcmp(call->as_CallLeaf()->_name, "shenandoah_cas_obj")  == 0 ||
 980                   strcmp(call->as_CallLeaf()->_name, "updateBytesCRC32") == 0 ||
 981                   strcmp(call->as_CallLeaf()->_name, "updateBytesCRC32C") == 0 ||
 982                   strcmp(call->as_CallLeaf()->_name, "updateBytesAdler32") == 0 ||
 983                   strcmp(call->as_CallLeaf()->_name, "aescrypt_encryptBlock") == 0 ||
 984                   strcmp(call->as_CallLeaf()->_name, "aescrypt_decryptBlock") == 0 ||
 985                   strcmp(call->as_CallLeaf()->_name, "cipherBlockChaining_encryptAESCrypt") == 0 ||
 986                   strcmp(call->as_CallLeaf()->_name, "cipherBlockChaining_decryptAESCrypt") == 0 ||
 987                   strcmp(call->as_CallLeaf()->_name, "ghash_processBlocks") == 0 ||
 988                   strcmp(call->as_CallLeaf()->_name, "sha1_implCompress") == 0 ||
 989                   strcmp(call->as_CallLeaf()->_name, "sha1_implCompressMB") == 0 ||
 990                   strcmp(call->as_CallLeaf()->_name, "sha256_implCompress") == 0 ||
 991                   strcmp(call->as_CallLeaf()->_name, "sha256_implCompressMB") == 0 ||
 992                   strcmp(call->as_CallLeaf()->_name, "sha512_implCompress") == 0 ||
 993                   strcmp(call->as_CallLeaf()->_name, "sha512_implCompressMB") == 0 ||
 994                   strcmp(call->as_CallLeaf()->_name, "multiplyToLen") == 0 ||
 995                   strcmp(call->as_CallLeaf()->_name, "squareToLen") == 0 ||
 996                   strcmp(call->as_CallLeaf()->_name, "mulAdd") == 0 ||
 997                   strcmp(call->as_CallLeaf()->_name, "montgomery_multiply") == 0 ||
 998                   strcmp(call->as_CallLeaf()->_name, "montgomery_square") == 0)
 999                  ))) {


2053       // OffsetBot is used to reference array's element. Ignore first AddP.
2054       if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2055         bt = T_OBJECT;
2056       }
2057     }
2058   } else if (offset != oopDesc::klass_offset_in_bytes()) {
2059     if (adr_type->isa_instptr()) {
2060       ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
2061       if (field != NULL) {
2062         bt = field->layout_type();
2063       } else {
2064         // Check for unsafe oop field access
2065         if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN)) {
2066           bt = T_OBJECT;
2067           (*unsafe) = true;
2068         }
2069       }
2070     } else if (adr_type->isa_aryptr()) {
2071       if (offset == arrayOopDesc::length_offset_in_bytes()) {
2072         // Ignore array length load.
2073       } else if (UseShenandoahGC && offset == -8) {
2074         // Shenandoah read barrier.
2075         bt = T_ARRAY;
2076       } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2077         // Ignore first AddP.
2078       } else {
2079         const Type* elemtype = adr_type->isa_aryptr()->elem();
2080         bt = elemtype->array_element_basic_type();
2081       }
2082     } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2083       // Allocation initialization, ThreadLocal field access, unsafe access
2084       if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN)) {
2085         bt = T_OBJECT;
2086       }
2087     }
2088   }
2089   return (bt == T_OBJECT || bt == T_NARROWOOP || bt == T_ARRAY);
2090 }
2091 
2092 // Returns unique pointed java object or NULL.
2093 JavaObjectNode* ConnectionGraph::unique_java_object(Node *n) {
2094   assert(!_collecting, "should not call when contructed graph");
2095   // If the node was created after the escape computation we can't answer.


2644 }
2645 
2646 //
2647 // Search memory chain of "mem" to find a MemNode whose address
2648 // is the specified alias index.
2649 //
2650 Node* ConnectionGraph::find_inst_mem(Node *orig_mem, int alias_idx, GrowableArray<PhiNode *>  &orig_phis) {
2651   if (orig_mem == NULL)
2652     return orig_mem;
2653   Compile* C = _compile;
2654   PhaseGVN* igvn = _igvn;
2655   const TypeOopPtr *toop = C->get_adr_type(alias_idx)->isa_oopptr();
2656   bool is_instance = (toop != NULL) && toop->is_known_instance();
2657   Node *start_mem = C->start()->proj_out(TypeFunc::Memory);
2658   Node *prev = NULL;
2659   Node *result = orig_mem;
2660   while (prev != result) {
2661     prev = result;
2662     if (result == start_mem)
2663       break;  // hit one of our sentinels
2664     assert(result->Opcode() != Op_ShenandoahWBMemProj, "unexpected memory slice");
2665     if (result->is_Mem()) {
2666       const Type *at = igvn->type(result->in(MemNode::Address));
2667       if (at == Type::TOP)
2668         break; // Dead
2669       assert (at->isa_ptr() != NULL, "pointer type required.");
2670       int idx = C->get_alias_index(at->is_ptr());
2671       if (idx == alias_idx)
2672         break; // Found
2673       if (!is_instance && (at->isa_oopptr() == NULL ||
2674                            !at->is_oopptr()->is_known_instance())) {
2675         break; // Do not skip store to general memory slice.
2676       }
2677       result = result->in(MemNode::Memory);
2678     }
2679     if (!is_instance)
2680       continue;  // don't search further for non-instance types
2681     // skip over a call which does not affect this memory slice
2682     if (result->is_Proj() && result->as_Proj()->_con == TypeFunc::Memory) {
2683       Node *proj_in = result->in(0);
2684       if (proj_in->is_Allocate() && proj_in->_idx == (uint)toop->instance_id()) {


3008           }
3009         }
3010       }
3011     } else if (n->is_AddP()) {
3012       JavaObjectNode* jobj = unique_java_object(get_addp_base(n));
3013       if (jobj == NULL || jobj == phantom_obj) {
3014 #ifdef ASSERT
3015         ptnode_adr(get_addp_base(n)->_idx)->dump();
3016         ptnode_adr(n->_idx)->dump();
3017         assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
3018 #endif
3019         _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3020         return;
3021       }
3022       Node *base = get_map(jobj->idx());  // CheckCastPP node
3023       if (!split_AddP(n, base)) continue; // wrong type from dead path
3024     } else if (n->is_Phi() ||
3025                n->is_CheckCastPP() ||
3026                n->is_EncodeP() ||
3027                n->is_DecodeN() ||
3028                n->is_ShenandoahBarrier() ||
3029                (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) {
3030       if (visited.test_set(n->_idx)) {
3031         assert(n->is_Phi(), "loops only through Phi's");
3032         continue;  // already processed
3033       }
3034       JavaObjectNode* jobj = unique_java_object(n);
3035       if (jobj == NULL || jobj == phantom_obj) {
3036 #ifdef ASSERT
3037         ptnode_adr(n->_idx)->dump();
3038         assert(jobj != NULL && jobj != phantom_obj, "escaped allocation");
3039 #endif
3040         _compile->record_failure(C2Compiler::retry_no_escape_analysis());
3041         return;
3042       } else {
3043         Node *val = get_map(jobj->idx());   // CheckCastPP node
3044         TypeNode *tn = n->as_Type();
3045         const TypeOopPtr* tinst = igvn->type(val)->isa_oopptr();
3046         assert(tinst != NULL && tinst->is_known_instance() &&
3047                tinst->instance_id() == jobj->idx() , "instance type expected.");
3048 


3079     // push allocation's users on appropriate worklist
3080     for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3081       Node *use = n->fast_out(i);
3082       if(use->is_Mem() && use->in(MemNode::Address) == n) {
3083         // Load/store to instance's field
3084         memnode_worklist.append_if_missing(use);
3085       } else if (use->is_MemBar()) {
3086         if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge
3087           memnode_worklist.append_if_missing(use);
3088         }
3089       } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes
3090         Node* addp2 = find_second_addp(use, n);
3091         if (addp2 != NULL) {
3092           alloc_worklist.append_if_missing(addp2);
3093         }
3094         alloc_worklist.append_if_missing(use);
3095       } else if (use->is_Phi() ||
3096                  use->is_CheckCastPP() ||
3097                  use->is_EncodeNarrowPtr() ||
3098                  use->is_DecodeNarrowPtr() ||
3099                  use->is_ShenandoahBarrier() ||
3100                  (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) {
3101         alloc_worklist.append_if_missing(use);
3102 #ifdef ASSERT
3103       } else if (use->is_Mem()) {
3104         assert(use->in(MemNode::Address) != n, "EA: missing allocation reference path");
3105       } else if (use->is_MergeMem()) {
3106         assert(_mergemem_worklist.contains(use->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3107       } else if (use->is_SafePoint()) {
3108         // Look for MergeMem nodes for calls which reference unique allocation
3109         // (through CheckCastPP nodes) even for debug info.
3110         Node* m = use->in(TypeFunc::Memory);
3111         if (m->is_MergeMem()) {
3112           assert(_mergemem_worklist.contains(m->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3113         }
3114       } else if (use->Opcode() == Op_EncodeISOArray) {
3115         if (use->in(MemNode::Memory) == n || use->in(3) == n) {
3116           // EncodeISOArray overwrites destination array
3117           memnode_worklist.append_if_missing(use);
3118         }
3119       } else {
3120         uint op = use->Opcode();
3121         if (!(op == Op_CmpP || op == Op_Conv2B ||
3122               op == Op_CastP2X || op == Op_StoreCM ||
3123               op == Op_FastLock || op == Op_AryEq || op == Op_StrComp ||
3124               op == Op_StrEquals || op == Op_StrIndexOf ||
3125               op == Op_ShenandoahWBMemProj)) {
3126           n->dump();
3127           use->dump();
3128           assert(false, "EA: missing allocation reference path");
3129         }
3130 #endif
3131       }
3132     }
3133 
3134   }
3135 
3136   // Go over all ArrayCopy nodes and if one of the inputs has a unique
3137   // type, record it in the ArrayCopy node so we know what memory this
3138   // node uses/modified.
3139   for (int next = 0; next < arraycopy_worklist.length(); next++) {
3140     ArrayCopyNode* ac = arraycopy_worklist.at(next);
3141     Node* dest = ac->in(ArrayCopyNode::Dest);
3142     if (dest->is_AddP()) {
3143       dest = get_addp_base(dest);
3144     }
3145     JavaObjectNode* jobj = unique_java_object(dest);


< prev index next >