src/share/vm/opto/compile.cpp

Print this page




2866         if (nn != NULL) {
2867           // Decode a narrow oop to match address
2868           // [R12 + narrow_oop_reg<<3 + offset]
2869           if (t->isa_oopptr()) {
2870             nn = new (this) DecodeNNode(nn, t);
2871           } else {
2872             nn = new (this) DecodeNKlassNode(nn, t);
2873           }
2874           n->set_req(AddPNode::Base, nn);
2875           n->set_req(AddPNode::Address, nn);
2876           if (addp->outcnt() == 0) {
2877             addp->disconnect_inputs(NULL, this);
2878           }
2879         }
2880       }
2881     }
2882 #endif
2883     break;
2884   }
2885 
2886 #ifdef _LP64
2887   case Op_CastPP:
2888     if (n->in(1)->is_DecodeN() && Matcher::gen_narrow_oop_implicit_null_checks()) {































2889       Node* in1 = n->in(1);
2890       const Type* t = n->bottom_type();
2891       Node* new_in1 = in1->clone();
2892       new_in1->as_DecodeN()->set_type(t);
2893 
2894       if (!Matcher::narrow_oop_use_complex_address()) {
2895         //
2896         // x86, ARM and friends can handle 2 adds in addressing mode
2897         // and Matcher can fold a DecodeN node into address by using
2898         // a narrow oop directly and do implicit NULL check in address:
2899         //
2900         // [R12 + narrow_oop_reg<<3 + offset]
2901         // NullCheck narrow_oop_reg
2902         //
2903         // On other platforms (Sparc) we have to keep new DecodeN node and
2904         // use it to do implicit NULL check in address:
2905         //
2906         // decode_not_null narrow_oop_reg, base_reg
2907         // [base_reg + offset]
2908         // NullCheck base_reg
2909         //
2910         // Pin the new DecodeN node to non-null path on these platform (Sparc)
2911         // to keep the information to which NULL check the new DecodeN node
2912         // corresponds to use it as value in implicit_null_check().
2913         //
2914         new_in1->set_req(0, n->in(0));
2915       }
2916 
2917       n->subsume_by(new_in1, this);
2918       if (in1->outcnt() == 0) {
2919         in1->disconnect_inputs(NULL, this);
2920       }





2921     }
2922     break;
2923 

2924   case Op_CmpP:
2925     // Do this transformation here to preserve CmpPNode::sub() and
2926     // other TypePtr related Ideal optimizations (for example, ptr nullness).
2927     if (n->in(1)->is_DecodeNarrowPtr() || n->in(2)->is_DecodeNarrowPtr()) {
2928       Node* in1 = n->in(1);
2929       Node* in2 = n->in(2);
2930       if (!in1->is_DecodeNarrowPtr()) {
2931         in2 = in1;
2932         in1 = n->in(2);
2933       }
2934       assert(in1->is_DecodeNarrowPtr(), "sanity");
2935 
2936       Node* new_in2 = NULL;
2937       if (in2->is_DecodeNarrowPtr()) {
2938         assert(in2->Opcode() == in1->Opcode(), "must be same node type");
2939         new_in2 = in2->in(1);
2940       } else if (in2->Opcode() == Op_ConP) {
2941         const Type* t = in2->bottom_type();
2942         if (t == TypePtr::NULL_PTR) {
2943           assert(in1->is_DecodeN(), "compare klass to null?");




2866         if (nn != NULL) {
2867           // Decode a narrow oop to match address
2868           // [R12 + narrow_oop_reg<<3 + offset]
2869           if (t->isa_oopptr()) {
2870             nn = new (this) DecodeNNode(nn, t);
2871           } else {
2872             nn = new (this) DecodeNKlassNode(nn, t);
2873           }
2874           n->set_req(AddPNode::Base, nn);
2875           n->set_req(AddPNode::Address, nn);
2876           if (addp->outcnt() == 0) {
2877             addp->disconnect_inputs(NULL, this);
2878           }
2879         }
2880       }
2881     }
2882 #endif
2883     break;
2884   }
2885 

2886   case Op_CastPP:
2887   {
2888     // Remove CastPP nodes to gain more freedom during scheduling but
2889     // keep the dependency they encode as control or precedence edges
2890     // (if control is set already) on memory operations. Some CastPP
2891     // nodes don't have a control (don't carry a dependency): skip
2892     // those.
2893     if (n->in(0) != NULL) {
2894       ResourceMark rm;
2895       Unique_Node_List wq;
2896       wq.push(n);
2897       for (uint next = 0; next < wq.size(); ++next) {
2898         Node *m = wq.at(next);
2899         for (DUIterator_Fast imax, i = m->fast_outs(imax); i < imax; i++) {
2900           Node* use = m->fast_out(i);
2901           if (use->is_Mem() || use->is_EncodeNarrowPtr()) {
2902             use->ensure_control_or_add_prec(n->in(0));
2903           } else if (use->in(0) == NULL) {
2904             switch(use->Opcode()) {
2905             case Op_AddP:
2906             case Op_DecodeN:
2907             case Op_DecodeNKlass:
2908             case Op_CheckCastPP:
2909             case Op_CastPP:
2910               wq.push(use);
2911               break;
2912             }
2913           }
2914         }
2915       }
2916     }
2917     const bool is_LP64 = LP64_ONLY(true) NOT_LP64(false);
2918     if (is_LP64 && n->in(1)->is_DecodeN() && Matcher::gen_narrow_oop_implicit_null_checks()) {
2919       Node* in1 = n->in(1);
2920       const Type* t = n->bottom_type();
2921       Node* new_in1 = in1->clone();
2922       new_in1->as_DecodeN()->set_type(t);
2923 
2924       if (!Matcher::narrow_oop_use_complex_address()) {
2925         //
2926         // x86, ARM and friends can handle 2 adds in addressing mode
2927         // and Matcher can fold a DecodeN node into address by using
2928         // a narrow oop directly and do implicit NULL check in address:
2929         //
2930         // [R12 + narrow_oop_reg<<3 + offset]
2931         // NullCheck narrow_oop_reg
2932         //
2933         // On other platforms (Sparc) we have to keep new DecodeN node and
2934         // use it to do implicit NULL check in address:
2935         //
2936         // decode_not_null narrow_oop_reg, base_reg
2937         // [base_reg + offset]
2938         // NullCheck base_reg
2939         //
2940         // Pin the new DecodeN node to non-null path on these platform (Sparc)
2941         // to keep the information to which NULL check the new DecodeN node
2942         // corresponds to use it as value in implicit_null_check().
2943         //
2944         new_in1->set_req(0, n->in(0));
2945       }
2946 
2947       n->subsume_by(new_in1, this);
2948       if (in1->outcnt() == 0) {
2949         in1->disconnect_inputs(NULL, this);
2950       }
2951     } else {
2952       n->subsume_by(n->in(1), this);
2953       if (n->outcnt() == 0) {
2954         n->disconnect_inputs(NULL, this);
2955       }
2956     }
2957     break;
2958   }
2959 #ifdef _LP64
2960   case Op_CmpP:
2961     // Do this transformation here to preserve CmpPNode::sub() and
2962     // other TypePtr related Ideal optimizations (for example, ptr nullness).
2963     if (n->in(1)->is_DecodeNarrowPtr() || n->in(2)->is_DecodeNarrowPtr()) {
2964       Node* in1 = n->in(1);
2965       Node* in2 = n->in(2);
2966       if (!in1->is_DecodeNarrowPtr()) {
2967         in2 = in1;
2968         in1 = n->in(2);
2969       }
2970       assert(in1->is_DecodeNarrowPtr(), "sanity");
2971 
2972       Node* new_in2 = NULL;
2973       if (in2->is_DecodeNarrowPtr()) {
2974         assert(in2->Opcode() == in1->Opcode(), "must be same node type");
2975         new_in2 = in2->in(1);
2976       } else if (in2->Opcode() == Op_ConP) {
2977         const Type* t = in2->bottom_type();
2978         if (t == TypePtr::NULL_PTR) {
2979           assert(in1->is_DecodeN(), "compare klass to null?");