src/share/vm/opto/escape.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6836054 Sdiff src/share/vm/opto

src/share/vm/opto/escape.cpp

Print this page




 888   //  Phase 1:  Process possible allocations from alloc_worklist.
 889   //  Create instance types for the CheckCastPP for allocations where possible.
 890   //
 891   // (Note: don't forget to change the order of the second AddP node on
 892   //  the alloc_worklist if the order of the worklist processing is changed,
 893   //  see the comment in find_second_addp().)
 894   //
 895   while (alloc_worklist.length() != 0) {
 896     Node *n = alloc_worklist.pop();
 897     uint ni = n->_idx;
 898     const TypeOopPtr* tinst = NULL;
 899     if (n->is_Call()) {
 900       CallNode *alloc = n->as_Call();
 901       // copy escape information to call node
 902       PointsToNode* ptn = ptnode_adr(alloc->_idx);
 903       PointsToNode::EscapeState es = escape_state(alloc, igvn);
 904       // We have an allocation or call which returns a Java object,
 905       // see if it is unescaped.
 906       if (es != PointsToNode::NoEscape || !ptn->_scalar_replaceable)
 907         continue;




 908       if (alloc->is_Allocate()) {
 909         // Set the scalar_replaceable flag before the next check.

 910         alloc->as_Allocate()->_is_scalar_replaceable = true;
 911       }
 912       // find CheckCastPP of call return value
 913       n = alloc->result_cast();
 914       if (n == NULL ||          // No uses accept Initialize or
 915           !n->is_CheckCastPP()) // not unique CheckCastPP.
 916         continue;






 917       // The inline code for Object.clone() casts the allocation result to
 918       // java.lang.Object and then to the actual type of the allocated
 919       // object. Detect this case and use the second cast.
 920       // Also detect j.l.reflect.Array.newInstance(jobject, jint) case when
 921       // the allocation result is cast to java.lang.Object and then
 922       // to the actual Array type.
 923       if (alloc->is_Allocate() && n->as_Type()->type() == TypeInstPtr::NOTNULL
 924           && (alloc->is_AllocateArray() ||
 925               igvn->type(alloc->in(AllocateNode::KlassNode)) != TypeKlassPtr::OBJECT)) {
 926         Node *cast2 = NULL;
 927         for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
 928           Node *use = n->fast_out(i);
 929           if (use->is_CheckCastPP()) {
 930             cast2 = use;
 931             break;
 932           }
 933         }
 934         if (cast2 != NULL) {
 935           n = cast2;
 936         } else {


 937           continue;
 938         }
 939       }





 940       set_escape_state(n->_idx, es);
 941       // in order for an object to be scalar-replaceable, it must be:
 942       //   - a direct allocation (not a call returning an object)
 943       //   - non-escaping
 944       //   - eligible to be a unique type
 945       //   - not determined to be ineligible by escape analysis
 946       set_map(alloc->_idx, n);
 947       set_map(n->_idx, alloc);
 948       const TypeOopPtr *t = igvn->type(n)->isa_oopptr();
 949       if (t == NULL)
 950         continue;  // not a TypeInstPtr
 951       tinst = t->cast_to_exactness(true)->is_oopptr()->cast_to_instance_id(ni);
 952       igvn->hash_delete(n);
 953       igvn->set_type(n,  tinst);
 954       n->raise_bottom_type(tinst);
 955       igvn->hash_insert(n);
 956       record_for_optimizer(n);
 957       if (alloc->is_Allocate() && ptn->_scalar_replaceable &&
 958           (t->isa_instptr() || t->isa_aryptr())) {
 959 




 888   //  Phase 1:  Process possible allocations from alloc_worklist.
 889   //  Create instance types for the CheckCastPP for allocations where possible.
 890   //
 891   // (Note: don't forget to change the order of the second AddP node on
 892   //  the alloc_worklist if the order of the worklist processing is changed,
 893   //  see the comment in find_second_addp().)
 894   //
 895   while (alloc_worklist.length() != 0) {
 896     Node *n = alloc_worklist.pop();
 897     uint ni = n->_idx;
 898     const TypeOopPtr* tinst = NULL;
 899     if (n->is_Call()) {
 900       CallNode *alloc = n->as_Call();
 901       // copy escape information to call node
 902       PointsToNode* ptn = ptnode_adr(alloc->_idx);
 903       PointsToNode::EscapeState es = escape_state(alloc, igvn);
 904       // We have an allocation or call which returns a Java object,
 905       // see if it is unescaped.
 906       if (es != PointsToNode::NoEscape || !ptn->_scalar_replaceable)
 907         continue;
 908 
 909       // Find CheckCastPP for the allocate or call return value
 910       n = alloc->result_cast();
 911       if (n == NULL) {            // No uses accept Initialize
 912         if (alloc->is_Allocate()) {
 913           // Set the scalar_replaceable flag for allocation
 914           // so it could be eliminated if it has no uses.
 915           alloc->as_Allocate()->_is_scalar_replaceable = true;
 916         }




 917         continue;
 918       }
 919       if (!n->is_CheckCastPP()) { // not unique CheckCastPP.
 920         assert(!alloc->is_Allocate(), "allocation should have unique type");
 921         continue;
 922       }
 923 
 924       // The inline code for Object.clone() casts the allocation result to
 925       // java.lang.Object and then to the actual type of the allocated
 926       // object. Detect this case and use the second cast.
 927       // Also detect j.l.reflect.Array.newInstance(jobject, jint) case when
 928       // the allocation result is cast to java.lang.Object and then
 929       // to the actual Array type.
 930       if (alloc->is_Allocate() && n->as_Type()->type() == TypeInstPtr::NOTNULL
 931           && (alloc->is_AllocateArray() ||
 932               igvn->type(alloc->in(AllocateNode::KlassNode)) != TypeKlassPtr::OBJECT)) {
 933         Node *cast2 = NULL;
 934         for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
 935           Node *use = n->fast_out(i);
 936           if (use->is_CheckCastPP()) {
 937             cast2 = use;
 938             break;
 939           }
 940         }
 941         if (cast2 != NULL) {
 942           n = cast2;
 943         } else {
 944           // Non-scalar replaceable if allocation does not have precise type (reflection allocation),
 945           // the object can't be restored during deoptimization without precise type.
 946           continue;
 947         }
 948       }
 949       if (alloc->is_Allocate()) {
 950         // Set the scalar_replaceable flag for allocation
 951         // so it could be eliminated.
 952         alloc->as_Allocate()->_is_scalar_replaceable = true;
 953       }
 954       set_escape_state(n->_idx, es);
 955       // in order for an object to be scalar-replaceable, it must be:
 956       //   - a direct allocation (not a call returning an object)
 957       //   - non-escaping
 958       //   - eligible to be a unique type
 959       //   - not determined to be ineligible by escape analysis
 960       set_map(alloc->_idx, n);
 961       set_map(n->_idx, alloc);
 962       const TypeOopPtr *t = igvn->type(n)->isa_oopptr();
 963       if (t == NULL)
 964         continue;  // not a TypeInstPtr
 965       tinst = t->cast_to_exactness(true)->is_oopptr()->cast_to_instance_id(ni);
 966       igvn->hash_delete(n);
 967       igvn->set_type(n,  tinst);
 968       n->raise_bottom_type(tinst);
 969       igvn->hash_insert(n);
 970       record_for_optimizer(n);
 971       if (alloc->is_Allocate() && ptn->_scalar_replaceable &&
 972           (t->isa_instptr() || t->isa_aryptr())) {
 973 


src/share/vm/opto/escape.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File