src/share/vm/opto/escape.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/escape.cpp

Print this page
rev 7046 : imported patch ea.exact


2822       if (alloc->is_Allocate() && n->as_Type()->type() == TypeInstPtr::NOTNULL
2823           && (alloc->is_AllocateArray() ||
2824               igvn->type(alloc->in(AllocateNode::KlassNode)) != TypeKlassPtr::OBJECT)) {
2825         Node *cast2 = NULL;
2826         for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
2827           Node *use = n->fast_out(i);
2828           if (use->is_CheckCastPP()) {
2829             cast2 = use;
2830             break;
2831           }
2832         }
2833         if (cast2 != NULL) {
2834           n = cast2;
2835         } else {
2836           // Non-scalar replaceable if the allocation type is unknown statically
2837           // (reflection allocation), the object can't be restored during
2838           // deoptimization without precise type.
2839           continue;
2840         }
2841       }







2842       if (alloc->is_Allocate()) {
2843         // Set the scalar_replaceable flag for allocation
2844         // so it could be eliminated.
2845         alloc->as_Allocate()->_is_scalar_replaceable = true;
2846       }
2847       if (alloc->is_CallStaticJava()) {
2848         // Set the scalar_replaceable flag for boxing method
2849         // so it could be eliminated.
2850         alloc->as_CallStaticJava()->_is_scalar_replaceable = true;
2851       }
2852       set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state
2853       // in order for an object to be scalar-replaceable, it must be:
2854       //   - a direct allocation (not a call returning an object)
2855       //   - non-escaping
2856       //   - eligible to be a unique type
2857       //   - not determined to be ineligible by escape analysis
2858       set_map(alloc, n);
2859       set_map(n, alloc);
2860       const TypeOopPtr *t = igvn->type(n)->isa_oopptr();
2861       if (t == NULL)
2862         continue;  // not a TypeOopPtr
2863       const TypeOopPtr* tinst = t->cast_to_exactness(true)->is_oopptr()->cast_to_instance_id(ni);
2864       igvn->hash_delete(n);
2865       igvn->set_type(n,  tinst);
2866       n->raise_bottom_type(tinst);
2867       igvn->hash_insert(n);
2868       record_for_optimizer(n);
2869       if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
2870 
2871         // First, put on the worklist all Field edges from Connection Graph
2872         // which is more accurate then putting immediate users from Ideal Graph.
2873         for (EdgeIterator e(ptn); e.has_next(); e.next()) {
2874           PointsToNode* tgt = e.get();
2875           Node* use = tgt->ideal_node();
2876           assert(tgt->is_Field() && use->is_AddP(),
2877                  "only AddP nodes are Field edges in CG");
2878           if (use->outcnt() > 0) { // Don't process dead nodes
2879             Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
2880             if (addp2 != NULL) {
2881               assert(alloc->is_AllocateArray(),"array allocation was expected");
2882               alloc_worklist.append_if_missing(addp2);
2883             }




2822       if (alloc->is_Allocate() && n->as_Type()->type() == TypeInstPtr::NOTNULL
2823           && (alloc->is_AllocateArray() ||
2824               igvn->type(alloc->in(AllocateNode::KlassNode)) != TypeKlassPtr::OBJECT)) {
2825         Node *cast2 = NULL;
2826         for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
2827           Node *use = n->fast_out(i);
2828           if (use->is_CheckCastPP()) {
2829             cast2 = use;
2830             break;
2831           }
2832         }
2833         if (cast2 != NULL) {
2834           n = cast2;
2835         } else {
2836           // Non-scalar replaceable if the allocation type is unknown statically
2837           // (reflection allocation), the object can't be restored during
2838           // deoptimization without precise type.
2839           continue;
2840         }
2841       }
2842 
2843       const TypeOopPtr *t = igvn->type(n)->isa_oopptr();
2844       if (t == NULL)
2845         continue;  // not a TypeOopPtr
2846       if (!t->klass_is_exact())
2847         continue; // not an unique type
2848 
2849       if (alloc->is_Allocate()) {
2850         // Set the scalar_replaceable flag for allocation
2851         // so it could be eliminated.
2852         alloc->as_Allocate()->_is_scalar_replaceable = true;
2853       }
2854       if (alloc->is_CallStaticJava()) {
2855         // Set the scalar_replaceable flag for boxing method
2856         // so it could be eliminated.
2857         alloc->as_CallStaticJava()->_is_scalar_replaceable = true;
2858       }
2859       set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state
2860       // in order for an object to be scalar-replaceable, it must be:
2861       //   - a direct allocation (not a call returning an object)
2862       //   - non-escaping
2863       //   - eligible to be a unique type
2864       //   - not determined to be ineligible by escape analysis
2865       set_map(alloc, n);
2866       set_map(n, alloc);
2867       const TypeOopPtr* tinst = t->cast_to_instance_id(ni);



2868       igvn->hash_delete(n);
2869       igvn->set_type(n,  tinst);
2870       n->raise_bottom_type(tinst);
2871       igvn->hash_insert(n);
2872       record_for_optimizer(n);
2873       if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
2874 
2875         // First, put on the worklist all Field edges from Connection Graph
2876         // which is more accurate then putting immediate users from Ideal Graph.
2877         for (EdgeIterator e(ptn); e.has_next(); e.next()) {
2878           PointsToNode* tgt = e.get();
2879           Node* use = tgt->ideal_node();
2880           assert(tgt->is_Field() && use->is_AddP(),
2881                  "only AddP nodes are Field edges in CG");
2882           if (use->outcnt() > 0) { // Don't process dead nodes
2883             Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
2884             if (addp2 != NULL) {
2885               assert(alloc->is_AllocateArray(),"array allocation was expected");
2886               alloc_worklist.append_if_missing(addp2);
2887             }


src/share/vm/opto/escape.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File