src/share/vm/opto/graphKit.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/graphKit.cpp

Print this page
rev 5462 : 8024069: replace_in_map() should operate on parent maps
Summary: type information gets lost because replace_in_map() doesn't update parent maps
Reviewed-by:
rev 5464 : 8024070: C2 needs some form of type speculation
Summary: record unused type profile information with type system, propagate and use it.
Reviewed-by:
rev 5465 : imported patch speculative-cleanup


2081   return NULL;
2082 }
2083 
2084 
2085 void GraphKit::round_double_arguments(ciMethod* dest_method) {
2086   // (Note:  TypeFunc::make has a cache that makes this fast.)
2087   const TypeFunc* tf    = TypeFunc::make(dest_method);
2088   int             nargs = tf->_domain->_cnt - TypeFunc::Parms;
2089   for (int j = 0; j < nargs; j++) {
2090     const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2091     if( targ->basic_type() == T_DOUBLE ) {
2092       // If any parameters are doubles, they must be rounded before
2093       // the call, dstore_rounding does gvn.transform
2094       Node *arg = argument(j);
2095       arg = dstore_rounding(arg);
2096       set_argument(j, arg);
2097     }
2098   }
2099 }
2100 






































2101 void GraphKit::round_double_result(ciMethod* dest_method) {
2102   // A non-strict method may return a double value which has an extended
2103   // exponent, but this must not be visible in a caller which is 'strict'
2104   // If a strict caller invokes a non-strict callee, round a double result
2105 
2106   BasicType result_type = dest_method->return_type()->basic_type();
2107   assert( method() != NULL, "must have caller context");
2108   if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2109     // Destination method's return value is on top of stack
2110     // dstore_rounding() does gvn.transform
2111     Node *result = pop_pair();
2112     result = dstore_rounding(result);
2113     push_pair(result);
2114   }
2115 }
2116 
2117 // rounding for strict float precision conformance
2118 Node* GraphKit::precision_rounding(Node* n) {
2119   return UseStrictFP && _method->flags().is_strict()
2120     && UseSSE == 0 && Matcher::strict_fp_requires_explicit_rounding


2618   if (UncommonNullCast               // Cutout for this technique
2619       && obj != null()               // And not the -Xcomp stupid case?
2620       && !too_many_traps(Deoptimization::Reason_null_check)
2621       ) {
2622     if (data == NULL)
2623       // Edge case:  no mature data.  Be optimistic here.
2624       return true;
2625     // If the profile has not seen a null, assume it won't happen.
2626     assert(java_bc() == Bytecodes::_checkcast ||
2627            java_bc() == Bytecodes::_instanceof ||
2628            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2629     return !data->as_BitData()->null_seen();
2630   }
2631   return false;
2632 }
2633 
2634 //------------------------maybe_cast_profiled_receiver-------------------------
2635 // If the profile has seen exactly one type, narrow to exactly that type.
2636 // Subsequent type checks will always fold up.
2637 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2638                                              ciProfileData* data,
2639                                              ciKlass* require_klass) {

2640   if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2641   if (data == NULL)  return NULL;
2642 
2643   // Make sure we haven't already deoptimized from this tactic.
2644   if (too_many_traps(Deoptimization::Reason_class_check))
2645     return NULL;
2646 
2647   // (No, this isn't a call, but it's enough like a virtual call
2648   // to use the same ciMethod accessor to get the profile info...)
2649   ciCallProfile profile = method()->call_profile_at_bci(bci());
2650   if (profile.count() >= 0 &&         // no cast failures here
2651       profile.has_receiver(0) &&
2652       profile.morphism() == 1) {
2653     ciKlass* exact_kls = profile.receiver(0);
2654     if (require_klass == NULL ||
2655         static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2656       // If we narrow the type to match what the type profile sees,
2657       // we can then remove the rest of the cast.

2658       // This is a win, even if the exact_kls is very specific,
2659       // because downstream operations, such as method calls,
2660       // will often benefit from the sharper type.
2661       Node* exact_obj = not_null_obj; // will get updated in place...
2662       Node* slow_ctl  = type_check_receiver(exact_obj, exact_kls, 1.0,
2663                                             &exact_obj);
2664       { PreserveJVMState pjvms(this);
2665         set_control(slow_ctl);
2666         uncommon_trap(Deoptimization::Reason_class_check,
2667                       Deoptimization::Action_maybe_recompile);
2668       }

2669       replace_in_map(not_null_obj, exact_obj);

2670       return exact_obj;
2671     }
2672     // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2673   }
2674 
2675   return NULL;
2676 }
2677 













































































2678 
2679 //-------------------------------gen_instanceof--------------------------------
2680 // Generate an instance-of idiom.  Used by both the instance-of bytecode
2681 // and the reflective instance-of call.
2682 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass) {
2683   kill_dead_locals();           // Benefit all the uncommon traps
2684   assert( !stopped(), "dead parse path should be checked in callers" );
2685   assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2686          "must check for not-null not-dead klass in callers");
2687 
2688   // Make the merge point
2689   enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2690   RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2691   Node*       phi    = new(C) PhiNode(region, TypeInt::BOOL);
2692   C->set_has_split_ifs(true); // Has chance for split-if optimization
2693 
2694   ciProfileData* data = NULL;
2695   bool safe_for_replace = false;
2696   if (java_bc() == Bytecodes::_instanceof) {  // Only for the bytecode
2697     data = method()->method_data()->bci_to_data(bci());
2698     safe_for_replace = true;
2699   }
2700   bool never_see_null = (ProfileDynamicTypes  // aggressive use of profile
2701                          && seems_never_null(obj, data));
2702 
2703   // Null check; get casted pointer; set region slot 3
2704   Node* null_ctl = top();
2705   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2706 
2707   // If not_null_obj is dead, only null-path is taken
2708   if (stopped()) {              // Doing instance-of on a NULL?
2709     set_control(null_ctl);
2710     return intcon(0);
2711   }
2712   region->init_req(_null_path, null_ctl);
2713   phi   ->init_req(_null_path, intcon(0)); // Set null path value
2714   if (null_ctl == top()) {
2715     // Do this eagerly, so that pattern matches like is_diamond_phi
2716     // will work even during parsing.
2717     assert(_null_path == PATH_LIMIT-1, "delete last");
2718     region->del_req(_null_path);
2719     phi   ->del_req(_null_path);
2720   }
2721 
2722   if (ProfileDynamicTypes && data != NULL) {
2723     Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, NULL);





















2724     if (stopped()) {            // Profile disagrees with this path.
2725       set_control(null_ctl);    // Null is the only remaining possibility.
2726       return intcon(0);
2727     }
2728     if (cast_obj != NULL)
2729       not_null_obj = cast_obj;
2730   }


2731 
2732   // Load the object's klass
2733   Node* obj_klass = load_object_klass(not_null_obj);
2734 
2735   // Generate the subtype check
2736   Node* not_subtype_ctrl = gen_subtype_check(obj_klass, superklass);
2737 
2738   // Plug in the success path to the general merge in slot 1.
2739   region->init_req(_obj_path, control());
2740   phi   ->init_req(_obj_path, intcon(1));
2741 
2742   // Plug in the failing path to the general merge in slot 2.
2743   region->init_req(_fail_path, not_subtype_ctrl);
2744   phi   ->init_req(_fail_path, intcon(0));
2745 
2746   // Return final merged results
2747   set_control( _gvn.transform(region) );
2748   record_for_igvn(region);
2749   return _gvn.transform(phi);
2750 }


2756 // If failure_control is supplied and not null, it is filled in with
2757 // the control edge for the cast failure.  Otherwise, an appropriate
2758 // uncommon trap or exception is thrown.
2759 Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
2760                               Node* *failure_control) {
2761   kill_dead_locals();           // Benefit all the uncommon traps
2762   const TypeKlassPtr *tk = _gvn.type(superklass)->is_klassptr();
2763   const Type *toop = TypeOopPtr::make_from_klass(tk->klass());
2764 
2765   // Fast cutout:  Check the case that the cast is vacuously true.
2766   // This detects the common cases where the test will short-circuit
2767   // away completely.  We do this before we perform the null check,
2768   // because if the test is going to turn into zero code, we don't
2769   // want a residual null check left around.  (Causes a slowdown,
2770   // for example, in some objArray manipulations, such as a[i]=a[j].)
2771   if (tk->singleton()) {
2772     const TypeOopPtr* objtp = _gvn.type(obj)->isa_oopptr();
2773     if (objtp != NULL && objtp->klass() != NULL) {
2774       switch (static_subtype_check(tk->klass(), objtp->klass())) {
2775       case SSC_always_true:
2776         return obj;



2777       case SSC_always_false:
2778         // It needs a null check because a null will *pass* the cast check.
2779         // A non-null value will always produce an exception.
2780         return null_assert(obj);
2781       }
2782     }
2783   }
2784 
2785   ciProfileData* data = NULL;
2786   bool safe_for_replace = false;
2787   if (failure_control == NULL) {        // use MDO in regular case only
2788     assert(java_bc() == Bytecodes::_aastore ||
2789            java_bc() == Bytecodes::_checkcast,
2790            "interpreter profiles type checks only for these BCs");
2791     data = method()->method_data()->bci_to_data(bci());
2792     safe_for_replace = true;
2793   }
2794 
2795   // Make the merge point
2796   enum { _obj_path = 1, _null_path, PATH_LIMIT };


2805   // Null check; get casted pointer; set region slot 3
2806   Node* null_ctl = top();
2807   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2808 
2809   // If not_null_obj is dead, only null-path is taken
2810   if (stopped()) {              // Doing instance-of on a NULL?
2811     set_control(null_ctl);
2812     return null();
2813   }
2814   region->init_req(_null_path, null_ctl);
2815   phi   ->init_req(_null_path, null());  // Set null path value
2816   if (null_ctl == top()) {
2817     // Do this eagerly, so that pattern matches like is_diamond_phi
2818     // will work even during parsing.
2819     assert(_null_path == PATH_LIMIT-1, "delete last");
2820     region->del_req(_null_path);
2821     phi   ->del_req(_null_path);
2822   }
2823 
2824   Node* cast_obj = NULL;
2825   if (data != NULL &&





2826       // Counter has never been decremented (due to cast failure).
2827       // ...This is a reasonable thing to expect.  It is true of
2828       // all casts inserted by javac to implement generic types.
2829       data->as_CounterData()->count() >= 0) {
2830     cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, tk->klass());
2831     if (cast_obj != NULL) {
2832       if (failure_control != NULL) // failure is now impossible
2833         (*failure_control) = top();
2834       // adjust the type of the phi to the exact klass:
2835       phi->raise_bottom_type(_gvn.type(cast_obj)->meet(TypePtr::NULL_PTR));
2836     }
2837   }
2838 
2839   if (cast_obj == NULL) {
2840     // Load the object's klass
2841     Node* obj_klass = load_object_klass(not_null_obj);
2842 
2843     // Generate the subtype check
2844     Node* not_subtype_ctrl = gen_subtype_check( obj_klass, superklass );
2845 
2846     // Plug in success path into the merge
2847     cast_obj = _gvn.transform(new (C) CheckCastPPNode(control(),
2848                                                          not_null_obj, toop));
2849     // Failure path ends in uncommon trap (or may be dead - failure impossible)
2850     if (failure_control == NULL) {




2081   return NULL;
2082 }
2083 
2084 
2085 void GraphKit::round_double_arguments(ciMethod* dest_method) {
2086   // (Note:  TypeFunc::make has a cache that makes this fast.)
2087   const TypeFunc* tf    = TypeFunc::make(dest_method);
2088   int             nargs = tf->_domain->_cnt - TypeFunc::Parms;
2089   for (int j = 0; j < nargs; j++) {
2090     const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2091     if( targ->basic_type() == T_DOUBLE ) {
2092       // If any parameters are doubles, they must be rounded before
2093       // the call, dstore_rounding does gvn.transform
2094       Node *arg = argument(j);
2095       arg = dstore_rounding(arg);
2096       set_argument(j, arg);
2097     }
2098   }
2099 }
2100 
2101 // Record profiling data from argument profiling at an invoke with the
2102 // type system so that it can propagate it (speculation)
2103 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2104   if (!UseTypeSpeculation) {
2105     return;
2106   }
2107   const TypeFunc* tf    = TypeFunc::make(dest_method);
2108   int             nargs = tf->_domain->_cnt - TypeFunc::Parms;
2109   int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2110   for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2111     const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2112     if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2113       ciKlass* better_type = NULL;
2114       if (method()->argument_profiled_type(bci(), i, better_type)) {
2115         record_profile_for_speculation(argument(j), better_type);
2116       }
2117       i++;
2118     }
2119   }
2120 }
2121 
2122 // Record profiling data from parameter profiling at an invoke with
2123 // the type system so that it can propagate it (speculation)
2124 void GraphKit::record_profiled_parameters_for_speculation() {
2125   if (!UseTypeSpeculation) {
2126     return;
2127   }
2128   for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2129     if (_gvn.type(local(i))->isa_oopptr()) {
2130       ciKlass* better_type = NULL;
2131       if (method()->parameter_profiled_type(j, better_type)) {
2132         record_profile_for_speculation(local(i), better_type);
2133       }
2134       j++;
2135     }
2136   }
2137 }
2138 
2139 void GraphKit::round_double_result(ciMethod* dest_method) {
2140   // A non-strict method may return a double value which has an extended
2141   // exponent, but this must not be visible in a caller which is 'strict'
2142   // If a strict caller invokes a non-strict callee, round a double result
2143 
2144   BasicType result_type = dest_method->return_type()->basic_type();
2145   assert( method() != NULL, "must have caller context");
2146   if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2147     // Destination method's return value is on top of stack
2148     // dstore_rounding() does gvn.transform
2149     Node *result = pop_pair();
2150     result = dstore_rounding(result);
2151     push_pair(result);
2152   }
2153 }
2154 
2155 // rounding for strict float precision conformance
2156 Node* GraphKit::precision_rounding(Node* n) {
2157   return UseStrictFP && _method->flags().is_strict()
2158     && UseSSE == 0 && Matcher::strict_fp_requires_explicit_rounding


2656   if (UncommonNullCast               // Cutout for this technique
2657       && obj != null()               // And not the -Xcomp stupid case?
2658       && !too_many_traps(Deoptimization::Reason_null_check)
2659       ) {
2660     if (data == NULL)
2661       // Edge case:  no mature data.  Be optimistic here.
2662       return true;
2663     // If the profile has not seen a null, assume it won't happen.
2664     assert(java_bc() == Bytecodes::_checkcast ||
2665            java_bc() == Bytecodes::_instanceof ||
2666            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2667     return !data->as_BitData()->null_seen();
2668   }
2669   return false;
2670 }
2671 
2672 //------------------------maybe_cast_profiled_receiver-------------------------
2673 // If the profile has seen exactly one type, narrow to exactly that type.
2674 // Subsequent type checks will always fold up.
2675 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2676                                              ciKlass* require_klass,
2677                                              ciKlass* spec_klass,
2678                                              bool safe_for_replace) {
2679   if (!UseTypeProfile || !TypeProfileCasts) return NULL;

2680 
2681   // Make sure we haven't already deoptimized from this tactic.
2682   if (too_many_traps(Deoptimization::Reason_class_check))
2683     return NULL;
2684 
2685   // (No, this isn't a call, but it's enough like a virtual call
2686   // to use the same ciMethod accessor to get the profile info...)
2687   // If we have a speculative type use it instead of profiling (which
2688   // may not help us)
2689   ciKlass* exact_kls = spec_klass == NULL ? profile_has_unique_klass() : spec_klass;
2690   if (exact_kls != NULL) {// no cast failures here

2691     if (require_klass == NULL ||
2692         static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2693       // If we narrow the type to match what the type profile sees or
2694       // the speculative type, we can then remove the rest of the
2695       // cast.
2696       // This is a win, even if the exact_kls is very specific,
2697       // because downstream operations, such as method calls,
2698       // will often benefit from the sharper type.
2699       Node* exact_obj = not_null_obj; // will get updated in place...
2700       Node* slow_ctl  = type_check_receiver(exact_obj, exact_kls, 1.0,
2701                                             &exact_obj);
2702       { PreserveJVMState pjvms(this);
2703         set_control(slow_ctl);
2704         uncommon_trap(Deoptimization::Reason_class_check,
2705                       Deoptimization::Action_maybe_recompile);
2706       }
2707       if (safe_for_replace) {
2708         replace_in_map(not_null_obj, exact_obj);
2709       }
2710       return exact_obj;
2711     }
2712     // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2713   }
2714 
2715   return NULL;
2716 }
2717 
2718 // Cast obj to type and emit guard unless we had too many traps here
2719 // already
2720 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2721                                         ciKlass* type,
2722                                         bool not_null) {
2723   // type == NULL if profiling tells us this object is always null
2724   if (type != NULL) {
2725     if (!too_many_traps(Deoptimization::Reason_null_check) &&
2726         !too_many_traps(Deoptimization::Reason_class_check)) {
2727       Node* not_null_obj = NULL;
2728       // not_null is true if we know the object is not null and
2729       // there's no need for a null check
2730       if (!not_null) {
2731         Node* null_ctl = top();
2732         not_null_obj = null_check_oop(obj, &null_ctl, true, true);
2733         assert(null_ctl->is_top(), "no null control here");
2734       } else {
2735         not_null_obj = obj;
2736       }
2737 
2738       Node* exact_obj = not_null_obj;
2739       ciKlass* exact_kls = type;
2740       Node* slow_ctl  = type_check_receiver(exact_obj, exact_kls, 1.0,
2741                                             &exact_obj);
2742       {
2743         PreserveJVMState pjvms(this);
2744         set_control(slow_ctl);
2745         uncommon_trap(Deoptimization::Reason_class_check,
2746                       Deoptimization::Action_maybe_recompile);
2747       }
2748       replace_in_map(not_null_obj, exact_obj);
2749       obj = exact_obj;
2750     }
2751   } else {
2752     if (!too_many_traps(Deoptimization::Reason_null_assert)) {
2753       Node* exact_obj = null_assert(obj);
2754       replace_in_map(obj, exact_obj);
2755       obj = exact_obj;
2756     }
2757   }
2758   return obj;
2759 }
2760 
2761 // Record profiling data exact_kls for Node n with the type system so
2762 // that it can propagate it (speculation)
2763 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls) {
2764   assert(UseTypeSpeculation, "type speculation must be on");
2765   if (exact_kls != NULL) {
2766     const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2767     const TypeOopPtr* xtype = tklass->as_instance_type();
2768     assert(xtype->klass_is_exact(), "Should be exact");
2769     
2770     // Build a type with a speculative type (what we think we know
2771     // about the type but will need a guard when we use it)
2772     const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, xtype);
2773     // We're changing the type, we need a new cast node to carry the
2774     // new type. The new type depends on the control: what profiling
2775     // tells us is only valid from here as far as we can tell.
2776     Node* cast = new(C) CastPPNode(n, spec_type);
2777     cast->init_req(0, control());
2778     cast = _gvn.transform(cast);
2779     replace_in_map(n, cast);
2780     n = cast;
2781   }
2782   return n;
2783 }
2784 
2785 // Record profiling data from receiver profiling at an invoke with the
2786 // type system so that it can propagate it (speculation)
2787 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2788   if (!UseTypeSpeculation) {
2789     return n;
2790   }
2791   ciKlass* exact_kls = profile_has_unique_klass();
2792   return record_profile_for_speculation(n, exact_kls);
2793 }
2794 
2795 
2796 //-------------------------------gen_instanceof--------------------------------
2797 // Generate an instance-of idiom.  Used by both the instance-of bytecode
2798 // and the reflective instance-of call.
2799 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2800   kill_dead_locals();           // Benefit all the uncommon traps
2801   assert( !stopped(), "dead parse path should be checked in callers" );
2802   assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2803          "must check for not-null not-dead klass in callers");
2804 
2805   // Make the merge point
2806   enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2807   RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2808   Node*       phi    = new(C) PhiNode(region, TypeInt::BOOL);
2809   C->set_has_split_ifs(true); // Has chance for split-if optimization
2810 
2811   ciProfileData* data = NULL;

2812   if (java_bc() == Bytecodes::_instanceof) {  // Only for the bytecode
2813     data = method()->method_data()->bci_to_data(bci());

2814   }
2815   bool never_see_null = (ProfileDynamicTypes  // aggressive use of profile
2816                          && seems_never_null(obj, data));
2817 
2818   // Null check; get casted pointer; set region slot 3
2819   Node* null_ctl = top();
2820   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2821 
2822   // If not_null_obj is dead, only null-path is taken
2823   if (stopped()) {              // Doing instance-of on a NULL?
2824     set_control(null_ctl);
2825     return intcon(0);
2826   }
2827   region->init_req(_null_path, null_ctl);
2828   phi   ->init_req(_null_path, intcon(0)); // Set null path value
2829   if (null_ctl == top()) {
2830     // Do this eagerly, so that pattern matches like is_diamond_phi
2831     // will work even during parsing.
2832     assert(_null_path == PATH_LIMIT-1, "delete last");
2833     region->del_req(_null_path);
2834     phi   ->del_req(_null_path);
2835   }
2836 
2837   // Dow we know the type check always succeed?
2838   bool known_statically = false;
2839   if (_gvn.type(superklass)->singleton()) {
2840     ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
2841     ciKlass* subk = _gvn.type(obj)->is_oopptr()->klass();
2842     if (subk != NULL && subk->is_loaded()) {
2843       int static_res = static_subtype_check(superk, subk);
2844       known_statically = (static_res == SSC_always_true || static_res == SSC_always_false);
2845     }
2846   }
2847 
2848   if (known_statically && UseTypeSpeculation) {
2849     // If we know the type check always succeed then we don't use the
2850     // profiling data at this bytecode. Don't lose it, feed it to the
2851     // type system as a speculative type.
2852     not_null_obj = record_profiled_receiver_for_speculation(not_null_obj);
2853   } else {
2854     const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2855     // We may not have profiling here or it may not help us. If we
2856     // have a speculative type use it to perform an exact cast.
2857     ciKlass* spec_obj_type = obj_type->speculative_type();
2858     if (spec_obj_type != NULL || (ProfileDynamicTypes && data != NULL)) {
2859       Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, NULL, spec_obj_type, safe_for_replace);
2860       if (stopped()) {            // Profile disagrees with this path.
2861         set_control(null_ctl);    // Null is the only remaining possibility.
2862         return intcon(0);
2863       }
2864       if (cast_obj != NULL) {
2865         not_null_obj = cast_obj;
2866       }
2867     }
2868   }
2869 
2870   // Load the object's klass
2871   Node* obj_klass = load_object_klass(not_null_obj);
2872 
2873   // Generate the subtype check
2874   Node* not_subtype_ctrl = gen_subtype_check(obj_klass, superklass);
2875 
2876   // Plug in the success path to the general merge in slot 1.
2877   region->init_req(_obj_path, control());
2878   phi   ->init_req(_obj_path, intcon(1));
2879 
2880   // Plug in the failing path to the general merge in slot 2.
2881   region->init_req(_fail_path, not_subtype_ctrl);
2882   phi   ->init_req(_fail_path, intcon(0));
2883 
2884   // Return final merged results
2885   set_control( _gvn.transform(region) );
2886   record_for_igvn(region);
2887   return _gvn.transform(phi);
2888 }


2894 // If failure_control is supplied and not null, it is filled in with
2895 // the control edge for the cast failure.  Otherwise, an appropriate
2896 // uncommon trap or exception is thrown.
2897 Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
2898                               Node* *failure_control) {
2899   kill_dead_locals();           // Benefit all the uncommon traps
2900   const TypeKlassPtr *tk = _gvn.type(superklass)->is_klassptr();
2901   const Type *toop = TypeOopPtr::make_from_klass(tk->klass());
2902 
2903   // Fast cutout:  Check the case that the cast is vacuously true.
2904   // This detects the common cases where the test will short-circuit
2905   // away completely.  We do this before we perform the null check,
2906   // because if the test is going to turn into zero code, we don't
2907   // want a residual null check left around.  (Causes a slowdown,
2908   // for example, in some objArray manipulations, such as a[i]=a[j].)
2909   if (tk->singleton()) {
2910     const TypeOopPtr* objtp = _gvn.type(obj)->isa_oopptr();
2911     if (objtp != NULL && objtp->klass() != NULL) {
2912       switch (static_subtype_check(tk->klass(), objtp->klass())) {
2913       case SSC_always_true:
2914         // If we know the type check always succeed then we don't use
2915         // the profiling data at this bytecode. Don't lose it, feed it
2916         // to the type system as a speculative type.
2917         return record_profiled_receiver_for_speculation(obj);
2918       case SSC_always_false:
2919         // It needs a null check because a null will *pass* the cast check.
2920         // A non-null value will always produce an exception.
2921         return null_assert(obj);
2922       }
2923     }
2924   }
2925 
2926   ciProfileData* data = NULL;
2927   bool safe_for_replace = false;
2928   if (failure_control == NULL) {        // use MDO in regular case only
2929     assert(java_bc() == Bytecodes::_aastore ||
2930            java_bc() == Bytecodes::_checkcast,
2931            "interpreter profiles type checks only for these BCs");
2932     data = method()->method_data()->bci_to_data(bci());
2933     safe_for_replace = true;
2934   }
2935 
2936   // Make the merge point
2937   enum { _obj_path = 1, _null_path, PATH_LIMIT };


2946   // Null check; get casted pointer; set region slot 3
2947   Node* null_ctl = top();
2948   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2949 
2950   // If not_null_obj is dead, only null-path is taken
2951   if (stopped()) {              // Doing instance-of on a NULL?
2952     set_control(null_ctl);
2953     return null();
2954   }
2955   region->init_req(_null_path, null_ctl);
2956   phi   ->init_req(_null_path, null());  // Set null path value
2957   if (null_ctl == top()) {
2958     // Do this eagerly, so that pattern matches like is_diamond_phi
2959     // will work even during parsing.
2960     assert(_null_path == PATH_LIMIT-1, "delete last");
2961     region->del_req(_null_path);
2962     phi   ->del_req(_null_path);
2963   }
2964 
2965   Node* cast_obj = NULL;
2966   const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2967   // We may not have profiling here or it may not help us. If we have
2968   // a speculative type use it to perform an exact cast.
2969   ciKlass* spec_obj_type = obj_type->speculative_type();
2970   if (spec_obj_type != NULL ||
2971       (data != NULL &&
2972        // Counter has never been decremented (due to cast failure).
2973        // ...This is a reasonable thing to expect.  It is true of
2974        // all casts inserted by javac to implement generic types.
2975        data->as_CounterData()->count() >= 0)) {
2976     cast_obj = maybe_cast_profiled_receiver(not_null_obj, tk->klass(), spec_obj_type, safe_for_replace);
2977     if (cast_obj != NULL) {
2978       if (failure_control != NULL) // failure is now impossible
2979         (*failure_control) = top();
2980       // adjust the type of the phi to the exact klass:
2981       phi->raise_bottom_type(_gvn.type(cast_obj)->meet(TypePtr::NULL_PTR));
2982     }
2983   }
2984 
2985   if (cast_obj == NULL) {
2986     // Load the object's klass
2987     Node* obj_klass = load_object_klass(not_null_obj);
2988 
2989     // Generate the subtype check
2990     Node* not_subtype_ctrl = gen_subtype_check( obj_klass, superklass );
2991 
2992     // Plug in success path into the merge
2993     cast_obj = _gvn.transform(new (C) CheckCastPPNode(control(),
2994                                                          not_null_obj, toop));
2995     // Failure path ends in uncommon trap (or may be dead - failure impossible)
2996     if (failure_control == NULL) {


src/share/vm/opto/graphKit.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File