src/share/vm/opto/graphKit.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/graphKit.cpp

Print this page
rev 6139 : 8031755: Type speculation should be used to optimize explicit null checks
Summary: feed profiling data about reference nullness to type speculation.
Reviewed-by:


 595       const TypePtr* adr_typ = ex_con->add_offset(offset);
 596 
 597       Node *adr = basic_plus_adr(ex_node, ex_node, offset);
 598       const TypeOopPtr* val_type = TypeOopPtr::make_from_klass(env()->String_klass());
 599       // Conservatively release stores of object references.
 600       Node *store = store_oop_to_object(control(), ex_node, adr, adr_typ, null(), val_type, T_OBJECT, MemNode::release);
 601 
 602       add_exception_state(make_exception_state(ex_node));
 603       return;
 604     }
 605   }
 606 
 607   // %%% Maybe add entry to OptoRuntime which directly throws the exc.?
 608   // It won't be much cheaper than bailing to the interp., since we'll
 609   // have to pass up all the debug-info, and the runtime will have to
 610   // create the stack trace.
 611 
 612   // Usual case:  Bail to interpreter.
 613   // Reserve the right to recompile if we haven't seen anything yet.
 614 
 615   assert(!Deoptimization::reason_is_speculate(reason), "unsupported");
 616   Deoptimization::DeoptAction action = Deoptimization::Action_maybe_recompile;
 617   if (treat_throw_as_hot
 618       && (method()->method_data()->trap_recompiled_at(bci(), NULL)
 619           || C->too_many_traps(reason))) {
 620     // We cannot afford to take more traps here.  Suffer in the interpreter.
 621     if (C->log() != NULL)
 622       C->log()->elem("hot_throw preallocated='0' reason='%s' mcount='%d'",
 623                      Deoptimization::trap_reason_name(reason),
 624                      C->trap_count(reason));
 625     action = Deoptimization::Action_none;
 626   }
 627 
 628   // "must_throw" prunes the JVM state to include only the stack, if there
 629   // are no local exception handlers.  This should cut down on register
 630   // allocation time and code size, by drastically reducing the number
 631   // of in-edges on the call to the uncommon trap.
 632 
 633   uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
 634 }
 635 
 636 
 637 //----------------------------PreserveJVMState---------------------------------
 638 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {


1164     alen = _gvn.transform( new (C) LoadRangeNode(0, immutable_memory(), r_adr, TypeInt::POS));
1165   } else {
1166     alen = alloc->Ideal_length();
1167     Node* ccast = alloc->make_ideal_length(_gvn.type(array)->is_oopptr(), &_gvn);
1168     if (ccast != alen) {
1169       alen = _gvn.transform(ccast);
1170     }
1171   }
1172   return alen;
1173 }
1174 
1175 //------------------------------do_null_check----------------------------------
1176 // Helper function to do a NULL pointer check.  Returned value is
1177 // the incoming address with NULL casted away.  You are allowed to use the
1178 // not-null value only if you are control dependent on the test.
1179 extern int explicit_null_checks_inserted,
1180            explicit_null_checks_elided;
1181 Node* GraphKit::null_check_common(Node* value, BasicType type,
1182                                   // optional arguments for variations:
1183                                   bool assert_null,
1184                                   Node* *null_control) {

1185   assert(!assert_null || null_control == NULL, "not both at once");
1186   if (stopped())  return top();
1187   if (!GenerateCompilerNullChecks && !assert_null && null_control == NULL) {
1188     // For some performance testing, we may wish to suppress null checking.
1189     value = cast_not_null(value);   // Make it appear to be non-null (4962416).
1190     return value;
1191   }
1192   explicit_null_checks_inserted++;
1193 
1194   // Construct NULL check
1195   Node *chk = NULL;
1196   switch(type) {
1197     case T_LONG   : chk = new (C) CmpLNode(value, _gvn.zerocon(T_LONG)); break;
1198     case T_INT    : chk = new (C) CmpINode(value, _gvn.intcon(0)); break;
1199     case T_ARRAY  : // fall through
1200       type = T_OBJECT;  // simplify further tests
1201     case T_OBJECT : {
1202       const Type *t = _gvn.type( value );
1203 
1204       const TypeOopPtr* tp = t->isa_oopptr();


1274           replace_in_map(value, null());
1275           return null();  // do not issue the redundant test
1276         }
1277         Node *oldcontrol = control();
1278         set_control(cfg);
1279         Node *res = cast_not_null(value);
1280         set_control(oldcontrol);
1281         explicit_null_checks_elided++;
1282         return res;
1283       }
1284       cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1285       if (cfg == NULL)  break;  // Quit at region nodes
1286       depth++;
1287     }
1288   }
1289 
1290   //-----------
1291   // Branch to failure if null
1292   float ok_prob = PROB_MAX;  // a priori estimate:  nulls never happen
1293   Deoptimization::DeoptReason reason;
1294   if (assert_null)
1295     reason = Deoptimization::Reason_null_assert;
1296   else if (type == T_OBJECT)
1297     reason = Deoptimization::Reason_null_check;
1298   else
1299     reason = Deoptimization::Reason_div0_check;
1300 
1301   // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1302   // ciMethodData::has_trap_at will return a conservative -1 if any
1303   // must-be-null assertion has failed.  This could cause performance
1304   // problems for a method after its first do_null_assert failure.
1305   // Consider using 'Reason_class_check' instead?
1306 
1307   // To cause an implicit null check, we set the not-null probability
1308   // to the maximum (PROB_MAX).  For an explicit check the probability
1309   // is set to a smaller value.
1310   if (null_control != NULL || too_many_traps(reason)) {
1311     // probability is less likely
1312     ok_prob =  PROB_LIKELY_MAG(3);
1313   } else if (!assert_null &&
1314              (ImplicitNullCheckThreshold > 0) &&
1315              method() != NULL &&
1316              (method()->method_data()->trap_count(reason)
1317               >= (uint)ImplicitNullCheckThreshold)) {
1318     ok_prob =  PROB_LIKELY_MAG(3);
1319   }
1320 


2103   const TypeFunc* tf    = TypeFunc::make(dest_method);
2104   int             nargs = tf->_domain->_cnt - TypeFunc::Parms;
2105   for (int j = 0; j < nargs; j++) {
2106     const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2107     if( targ->basic_type() == T_DOUBLE ) {
2108       // If any parameters are doubles, they must be rounded before
2109       // the call, dstore_rounding does gvn.transform
2110       Node *arg = argument(j);
2111       arg = dstore_rounding(arg);
2112       set_argument(j, arg);
2113     }
2114   }
2115 }
2116 
2117 /**
2118  * Record profiling data exact_kls for Node n with the type system so
2119  * that it can propagate it (speculation)
2120  *
2121  * @param n          node that the type applies to
2122  * @param exact_kls  type from profiling

2123  *
2124  * @return           node with improved type
2125  */
2126 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls) {
2127   const Type* current_type = _gvn.type(n);
2128   assert(UseTypeSpeculation, "type speculation must be on");
2129 
2130   const TypeOopPtr* speculative = current_type->speculative();
2131 

2132   if (current_type->would_improve_type(exact_kls, jvms()->depth())) {
2133     const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2134     const TypeOopPtr* xtype = tklass->as_instance_type();
2135     assert(xtype->klass_is_exact(), "Should be exact");


2136     // record the new speculative type's depth
2137     speculative = xtype->with_inline_depth(jvms()->depth());











2138   }
2139 
2140   if (speculative != current_type->speculative()) {
2141     // Build a type with a speculative type (what we think we know
2142     // about the type but will need a guard when we use it)
2143     const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, speculative);
2144     // We're changing the type, we need a new CheckCast node to carry
2145     // the new type. The new type depends on the control: what
2146     // profiling tells us is only valid from here as far as we can
2147     // tell.
2148     Node* cast = new(C) CheckCastPPNode(control(), n, current_type->remove_speculative()->join_speculative(spec_type));
2149     cast = _gvn.transform(cast);
2150     replace_in_map(n, cast);
2151     n = cast;
2152   }
2153 
2154   return n;
2155 }
2156 
2157 /**
2158  * Record profiling data from receiver profiling at an invoke with the
2159  * type system so that it can propagate it (speculation)
2160  *
2161  * @param n  receiver node
2162  *
2163  * @return   node with improved type
2164  */
2165 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2166   if (!UseTypeSpeculation) {
2167     return n;
2168   }
2169   ciKlass* exact_kls = profile_has_unique_klass();
2170   return record_profile_for_speculation(n, exact_kls);








2171 }
2172 
2173 /**
2174  * Record profiling data from argument profiling at an invoke with the
2175  * type system so that it can propagate it (speculation)
2176  *
2177  * @param dest_method  target method for the call
2178  * @param bc           what invoke bytecode is this?
2179  */
2180 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2181   if (!UseTypeSpeculation) {
2182     return;
2183   }
2184   const TypeFunc* tf    = TypeFunc::make(dest_method);
2185   int             nargs = tf->_domain->_cnt - TypeFunc::Parms;
2186   int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2187   for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2188     const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2189     if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2190       ciKlass* better_type = method()->argument_profiled_type(bci(), i);
2191       if (better_type != NULL) {
2192         record_profile_for_speculation(argument(j), better_type);

2193       }
2194       i++;
2195     }
2196   }
2197 }
2198 
2199 /**
2200  * Record profiling data from parameter profiling at an invoke with
2201  * the type system so that it can propagate it (speculation)
2202  */
2203 void GraphKit::record_profiled_parameters_for_speculation() {
2204   if (!UseTypeSpeculation) {
2205     return;
2206   }
2207   for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2208     if (_gvn.type(local(i))->isa_oopptr()) {
2209       ciKlass* better_type = method()->parameter_profiled_type(j);
2210       if (better_type != NULL) {
2211         record_profile_for_speculation(local(i), better_type);

2212       }
2213       j++;
2214     }
2215   }
2216 }
2217 


















2218 void GraphKit::round_double_result(ciMethod* dest_method) {
2219   // A non-strict method may return a double value which has an extended
2220   // exponent, but this must not be visible in a caller which is 'strict'
2221   // If a strict caller invokes a non-strict callee, round a double result
2222 
2223   BasicType result_type = dest_method->return_type()->basic_type();
2224   assert( method() != NULL, "must have caller context");
2225   if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2226     // Destination method's return value is on top of stack
2227     // dstore_rounding() does gvn.transform
2228     Node *result = pop_pair();
2229     result = dstore_rounding(result);
2230     push_pair(result);
2231   }
2232 }
2233 
2234 // rounding for strict float precision conformance
2235 Node* GraphKit::precision_rounding(Node* n) {
2236   return UseStrictFP && _method->flags().is_strict()
2237     && UseSSE == 0 && Matcher::strict_fp_requires_explicit_rounding


2277 //   [slow_call]     \[fast_result]
2278 //    Ctl   Val       \      \
2279 //     |               \      \
2280 //    Catch       <1>   \      \
2281 //   /    \        ^     \      \
2282 //  Ex    No_Ex    |      \      \
2283 //  |       \   \  |       \ <2>  \
2284 //  ...      \  [slow_res] |  |    \   [null_result]
2285 //            \         \--+--+---  |  |
2286 //             \           | /    \ | /
2287 //              --------Region     Phi
2288 //
2289 //=============================================================================
2290 // Code is structured as a series of driver functions all called 'do_XXX' that
2291 // call a set of helper functions.  Helper functions first, then drivers.
2292 
2293 //------------------------------null_check_oop---------------------------------
2294 // Null check oop.  Set null-path control into Region in slot 3.
2295 // Make a cast-not-nullness use the other not-null control.  Return cast.
2296 Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
2297                                bool never_see_null, bool safe_for_replace) {


2298   // Initial NULL check taken path
2299   (*null_control) = top();
2300   Node* cast = null_check_common(value, T_OBJECT, false, null_control);
2301 
2302   // Generate uncommon_trap:
2303   if (never_see_null && (*null_control) != top()) {
2304     // If we see an unexpected null at a check-cast we record it and force a
2305     // recompile; the offending check-cast will be compiled to handle NULLs.
2306     // If we see more than one offending BCI, then all checkcasts in the
2307     // method will be compiled to handle NULLs.
2308     PreserveJVMState pjvms(this);
2309     set_control(*null_control);
2310     replace_in_map(value, null());
2311     uncommon_trap(Deoptimization::Reason_null_check,

2312                   Deoptimization::Action_make_not_entrant);
2313     (*null_control) = top();    // NULL path is dead
2314   }
2315   if ((*null_control) == top() && safe_for_replace) {
2316     replace_in_map(value, cast);
2317   }
2318 
2319   // Cast away null-ness on the result
2320   return cast;
2321 }
2322 
2323 //------------------------------opt_iff----------------------------------------
2324 // Optimize the fast-check IfNode.  Set the fast-path region slot 2.
2325 // Return slow-path control.
2326 Node* GraphKit::opt_iff(Node* region, Node* iff) {
2327   IfNode *opt_iff = _gvn.transform(iff)->as_If();
2328 
2329   // Fast path taken; set region slot 2
2330   Node *fast_taken = _gvn.transform( new (C) IfFalseNode(opt_iff) );
2331   region->init_req(2,fast_taken); // Capture fast-control


2715 
2716   const TypeOopPtr* recv_xtype = tklass->as_instance_type();
2717   assert(recv_xtype->klass_is_exact(), "");
2718 
2719   // Subsume downstream occurrences of receiver with a cast to
2720   // recv_xtype, since now we know what the type will be.
2721   Node* cast = new(C) CheckCastPPNode(control(), receiver, recv_xtype);
2722   (*casted_receiver) = _gvn.transform(cast);
2723   // (User must make the replace_in_map call.)
2724 
2725   return fail;
2726 }
2727 
2728 
2729 //------------------------------seems_never_null-------------------------------
2730 // Use null_seen information if it is available from the profile.
2731 // If we see an unexpected null at a type check we record it and force a
2732 // recompile; the offending check will be recompiled to handle NULLs.
2733 // If we see several offending BCIs, then all checks in the
2734 // method will be recompiled.
2735 bool GraphKit::seems_never_null(Node* obj, ciProfileData* data) {


2736   if (UncommonNullCast               // Cutout for this technique
2737       && obj != null()               // And not the -Xcomp stupid case?
2738       && !too_many_traps(Deoptimization::Reason_null_check)
2739       ) {



2740     if (data == NULL)
2741       // Edge case:  no mature data.  Be optimistic here.
2742       return true;
2743     // If the profile has not seen a null, assume it won't happen.
2744     assert(java_bc() == Bytecodes::_checkcast ||
2745            java_bc() == Bytecodes::_instanceof ||
2746            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2747     return !data->as_BitData()->null_seen();
2748   }

2749   return false;
2750 }
2751 
2752 //------------------------maybe_cast_profiled_receiver-------------------------
2753 // If the profile has seen exactly one type, narrow to exactly that type.
2754 // Subsequent type checks will always fold up.
2755 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2756                                              ciKlass* require_klass,
2757                                              ciKlass* spec_klass,
2758                                              bool safe_for_replace) {
2759   if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2760 
2761   Deoptimization::DeoptReason reason = spec_klass == NULL ? Deoptimization::Reason_class_check : Deoptimization::Reason_speculate_class_check;
2762 
2763   // Make sure we haven't already deoptimized from this tactic.
2764   if (too_many_traps(reason))
2765     return NULL;
2766 
2767   // (No, this isn't a call, but it's enough like a virtual call
2768   // to use the same ciMethod accessor to get the profile info...)
2769   // If we have a speculative type use it instead of profiling (which
2770   // may not help us)
2771   ciKlass* exact_kls = spec_klass == NULL ? profile_has_unique_klass() : spec_klass;
2772   if (exact_kls != NULL) {// no cast failures here
2773     if (require_klass == NULL ||
2774         static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2775       // If we narrow the type to match what the type profile sees or
2776       // the speculative type, we can then remove the rest of the
2777       // cast.
2778       // This is a win, even if the exact_kls is very specific,
2779       // because downstream operations, such as method calls,
2780       // will often benefit from the sharper type.
2781       Node* exact_obj = not_null_obj; // will get updated in place...


2794     // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2795   }
2796 
2797   return NULL;
2798 }
2799 
2800 /**
2801  * Cast obj to type and emit guard unless we had too many traps here
2802  * already
2803  *
2804  * @param obj       node being casted
2805  * @param type      type to cast the node to
2806  * @param not_null  true if we know node cannot be null
2807  */
2808 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2809                                         ciKlass* type,
2810                                         bool not_null) {
2811   // type == NULL if profiling tells us this object is always null
2812   if (type != NULL) {
2813     Deoptimization::DeoptReason class_reason = Deoptimization::Reason_speculate_class_check;
2814     Deoptimization::DeoptReason null_reason = Deoptimization::Reason_null_check;
2815     if (!too_many_traps(null_reason) &&
2816         !too_many_traps(class_reason)) {
2817       Node* not_null_obj = NULL;
2818       // not_null is true if we know the object is not null and
2819       // there's no need for a null check
2820       if (!not_null) {
2821         Node* null_ctl = top();
2822         not_null_obj = null_check_oop(obj, &null_ctl, true, true);
2823         assert(null_ctl->is_top(), "no null control here");
2824       } else {
2825         not_null_obj = obj;
2826       }
2827 
2828       Node* exact_obj = not_null_obj;
2829       ciKlass* exact_kls = type;
2830       Node* slow_ctl  = type_check_receiver(exact_obj, exact_kls, 1.0,
2831                                             &exact_obj);
2832       {
2833         PreserveJVMState pjvms(this);
2834         set_control(slow_ctl);
2835         uncommon_trap(class_reason,
2836                       Deoptimization::Action_maybe_recompile);
2837       }
2838       replace_in_map(not_null_obj, exact_obj);
2839       obj = exact_obj;
2840     }
2841   } else {
2842     if (!too_many_traps(Deoptimization::Reason_null_assert)) {


2850 
2851 //-------------------------------gen_instanceof--------------------------------
2852 // Generate an instance-of idiom.  Used by both the instance-of bytecode
2853 // and the reflective instance-of call.
2854 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2855   kill_dead_locals();           // Benefit all the uncommon traps
2856   assert( !stopped(), "dead parse path should be checked in callers" );
2857   assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2858          "must check for not-null not-dead klass in callers");
2859 
2860   // Make the merge point
2861   enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2862   RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2863   Node*       phi    = new(C) PhiNode(region, TypeInt::BOOL);
2864   C->set_has_split_ifs(true); // Has chance for split-if optimization
2865 
2866   ciProfileData* data = NULL;
2867   if (java_bc() == Bytecodes::_instanceof) {  // Only for the bytecode
2868     data = method()->method_data()->bci_to_data(bci());
2869   }

2870   bool never_see_null = (ProfileDynamicTypes  // aggressive use of profile
2871                          && seems_never_null(obj, data));
2872 
2873   // Null check; get casted pointer; set region slot 3
2874   Node* null_ctl = top();
2875   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2876 
2877   // If not_null_obj is dead, only null-path is taken
2878   if (stopped()) {              // Doing instance-of on a NULL?
2879     set_control(null_ctl);
2880     return intcon(0);
2881   }
2882   region->init_req(_null_path, null_ctl);
2883   phi   ->init_req(_null_path, intcon(0)); // Set null path value
2884   if (null_ctl == top()) {
2885     // Do this eagerly, so that pattern matches like is_diamond_phi
2886     // will work even during parsing.
2887     assert(_null_path == PATH_LIMIT-1, "delete last");
2888     region->del_req(_null_path);
2889     phi   ->del_req(_null_path);
2890   }
2891 
2892   // Do we know the type check always succeed?
2893   bool known_statically = false;
2894   if (_gvn.type(superklass)->singleton()) {
2895     ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();


2978     }
2979   }
2980 
2981   ciProfileData* data = NULL;
2982   bool safe_for_replace = false;
2983   if (failure_control == NULL) {        // use MDO in regular case only
2984     assert(java_bc() == Bytecodes::_aastore ||
2985            java_bc() == Bytecodes::_checkcast,
2986            "interpreter profiles type checks only for these BCs");
2987     data = method()->method_data()->bci_to_data(bci());
2988     safe_for_replace = true;
2989   }
2990 
2991   // Make the merge point
2992   enum { _obj_path = 1, _null_path, PATH_LIMIT };
2993   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
2994   Node*       phi    = new (C) PhiNode(region, toop);
2995   C->set_has_split_ifs(true); // Has chance for split-if optimization
2996 
2997   // Use null-cast information if it is available

2998   bool never_see_null = ((failure_control == NULL)  // regular case only
2999                          && seems_never_null(obj, data));
3000 
3001   // Null check; get casted pointer; set region slot 3
3002   Node* null_ctl = top();
3003   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
3004 
3005   // If not_null_obj is dead, only null-path is taken
3006   if (stopped()) {              // Doing instance-of on a NULL?
3007     set_control(null_ctl);
3008     return null();
3009   }
3010   region->init_req(_null_path, null_ctl);
3011   phi   ->init_req(_null_path, null());  // Set null path value
3012   if (null_ctl == top()) {
3013     // Do this eagerly, so that pattern matches like is_diamond_phi
3014     // will work even during parsing.
3015     assert(_null_path == PATH_LIMIT-1, "delete last");
3016     region->del_req(_null_path);
3017     phi   ->del_req(_null_path);
3018   }
3019 
3020   Node* cast_obj = NULL;
3021   if (tk->klass_is_exact()) {
3022     // The following optimization tries to statically cast the speculative type of the object
3023     // (for example obtained during profiling) to the type of the superklass and then do a




 595       const TypePtr* adr_typ = ex_con->add_offset(offset);
 596 
 597       Node *adr = basic_plus_adr(ex_node, ex_node, offset);
 598       const TypeOopPtr* val_type = TypeOopPtr::make_from_klass(env()->String_klass());
 599       // Conservatively release stores of object references.
 600       Node *store = store_oop_to_object(control(), ex_node, adr, adr_typ, null(), val_type, T_OBJECT, MemNode::release);
 601 
 602       add_exception_state(make_exception_state(ex_node));
 603       return;
 604     }
 605   }
 606 
 607   // %%% Maybe add entry to OptoRuntime which directly throws the exc.?
 608   // It won't be much cheaper than bailing to the interp., since we'll
 609   // have to pass up all the debug-info, and the runtime will have to
 610   // create the stack trace.
 611 
 612   // Usual case:  Bail to interpreter.
 613   // Reserve the right to recompile if we haven't seen anything yet.
 614 
 615   ciMethod* m = Deoptimization::reason_is_speculate(reason) ? C->method() : NULL;
 616   Deoptimization::DeoptAction action = Deoptimization::Action_maybe_recompile;
 617   if (treat_throw_as_hot
 618       && (method()->method_data()->trap_recompiled_at(bci(), m)
 619           || C->too_many_traps(reason))) {
 620     // We cannot afford to take more traps here.  Suffer in the interpreter.
 621     if (C->log() != NULL)
 622       C->log()->elem("hot_throw preallocated='0' reason='%s' mcount='%d'",
 623                      Deoptimization::trap_reason_name(reason),
 624                      C->trap_count(reason));
 625     action = Deoptimization::Action_none;
 626   }
 627 
 628   // "must_throw" prunes the JVM state to include only the stack, if there
 629   // are no local exception handlers.  This should cut down on register
 630   // allocation time and code size, by drastically reducing the number
 631   // of in-edges on the call to the uncommon trap.
 632 
 633   uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
 634 }
 635 
 636 
 637 //----------------------------PreserveJVMState---------------------------------
 638 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {


1164     alen = _gvn.transform( new (C) LoadRangeNode(0, immutable_memory(), r_adr, TypeInt::POS));
1165   } else {
1166     alen = alloc->Ideal_length();
1167     Node* ccast = alloc->make_ideal_length(_gvn.type(array)->is_oopptr(), &_gvn);
1168     if (ccast != alen) {
1169       alen = _gvn.transform(ccast);
1170     }
1171   }
1172   return alen;
1173 }
1174 
1175 //------------------------------do_null_check----------------------------------
1176 // Helper function to do a NULL pointer check.  Returned value is
1177 // the incoming address with NULL casted away.  You are allowed to use the
1178 // not-null value only if you are control dependent on the test.
1179 extern int explicit_null_checks_inserted,
1180            explicit_null_checks_elided;
1181 Node* GraphKit::null_check_common(Node* value, BasicType type,
1182                                   // optional arguments for variations:
1183                                   bool assert_null,
1184                                   Node* *null_control,
1185                                   bool speculative) {
1186   assert(!assert_null || null_control == NULL, "not both at once");
1187   if (stopped())  return top();
1188   if (!GenerateCompilerNullChecks && !assert_null && null_control == NULL) {
1189     // For some performance testing, we may wish to suppress null checking.
1190     value = cast_not_null(value);   // Make it appear to be non-null (4962416).
1191     return value;
1192   }
1193   explicit_null_checks_inserted++;
1194 
1195   // Construct NULL check
1196   Node *chk = NULL;
1197   switch(type) {
1198     case T_LONG   : chk = new (C) CmpLNode(value, _gvn.zerocon(T_LONG)); break;
1199     case T_INT    : chk = new (C) CmpINode(value, _gvn.intcon(0)); break;
1200     case T_ARRAY  : // fall through
1201       type = T_OBJECT;  // simplify further tests
1202     case T_OBJECT : {
1203       const Type *t = _gvn.type( value );
1204 
1205       const TypeOopPtr* tp = t->isa_oopptr();


1275           replace_in_map(value, null());
1276           return null();  // do not issue the redundant test
1277         }
1278         Node *oldcontrol = control();
1279         set_control(cfg);
1280         Node *res = cast_not_null(value);
1281         set_control(oldcontrol);
1282         explicit_null_checks_elided++;
1283         return res;
1284       }
1285       cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1286       if (cfg == NULL)  break;  // Quit at region nodes
1287       depth++;
1288     }
1289   }
1290 
1291   //-----------
1292   // Branch to failure if null
1293   float ok_prob = PROB_MAX;  // a priori estimate:  nulls never happen
1294   Deoptimization::DeoptReason reason;
1295   if (assert_null) {
1296     reason = Deoptimization::Reason_null_assert;
1297   } else if (type == T_OBJECT) {
1298     reason = Deoptimization::reason_null_check(speculative);
1299   } else {
1300     reason = Deoptimization::Reason_div0_check;
1301   }
1302   // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1303   // ciMethodData::has_trap_at will return a conservative -1 if any
1304   // must-be-null assertion has failed.  This could cause performance
1305   // problems for a method after its first do_null_assert failure.
1306   // Consider using 'Reason_class_check' instead?
1307 
1308   // To cause an implicit null check, we set the not-null probability
1309   // to the maximum (PROB_MAX).  For an explicit check the probability
1310   // is set to a smaller value.
1311   if (null_control != NULL || too_many_traps(reason)) {
1312     // probability is less likely
1313     ok_prob =  PROB_LIKELY_MAG(3);
1314   } else if (!assert_null &&
1315              (ImplicitNullCheckThreshold > 0) &&
1316              method() != NULL &&
1317              (method()->method_data()->trap_count(reason)
1318               >= (uint)ImplicitNullCheckThreshold)) {
1319     ok_prob =  PROB_LIKELY_MAG(3);
1320   }
1321 


2104   const TypeFunc* tf    = TypeFunc::make(dest_method);
2105   int             nargs = tf->_domain->_cnt - TypeFunc::Parms;
2106   for (int j = 0; j < nargs; j++) {
2107     const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2108     if( targ->basic_type() == T_DOUBLE ) {
2109       // If any parameters are doubles, they must be rounded before
2110       // the call, dstore_rounding does gvn.transform
2111       Node *arg = argument(j);
2112       arg = dstore_rounding(arg);
2113       set_argument(j, arg);
2114     }
2115   }
2116 }
2117 
2118 /**
2119  * Record profiling data exact_kls for Node n with the type system so
2120  * that it can propagate it (speculation)
2121  *
2122  * @param n          node that the type applies to
2123  * @param exact_kls  type from profiling
2124  * @param maybe_null did profiling see null?
2125  *
2126  * @return           node with improved type
2127  */
2128 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls, bool maybe_null) {
2129   const Type* current_type = _gvn.type(n);
2130   assert(UseTypeSpeculation, "type speculation must be on");
2131 
2132   const TypePtr* speculative = current_type->speculative();
2133 
2134   // Should the klass from the profile be recorded in the speculative type?
2135   if (current_type->would_improve_type(exact_kls, jvms()->depth())) {
2136     const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2137     const TypeOopPtr* xtype = tklass->as_instance_type();
2138     assert(xtype->klass_is_exact(), "Should be exact");
2139     // Any reason to believe n is not null (from this profiling or a previous one)?
2140     const TypePtr* ptr = (maybe_null && current_type->speculative_maybe_null()) ? TypePtr::BOTTOM : TypePtr::NOTNULL;
2141     // record the new speculative type's depth
2142     speculative = xtype->cast_to_ptr_type(ptr->ptr())->is_ptr();
2143     speculative = speculative->with_inline_depth(jvms()->depth());
2144   } else if (current_type->would_improve_ptr(maybe_null)) {
2145     // Profiling report that null was never seen so we can change the
2146     // speculative type to non null ptr.
2147     assert(!maybe_null, "nothing to improve");
2148     if (speculative == NULL) {
2149       speculative = TypePtr::NOTNULL;
2150     } else {
2151       const TypePtr* ptr = TypePtr::NOTNULL;
2152       speculative = speculative->cast_to_ptr_type(ptr->ptr())->is_ptr();
2153     }
2154   }
2155 
2156   if (speculative != current_type->speculative()) {
2157     // Build a type with a speculative type (what we think we know
2158     // about the type but will need a guard when we use it)
2159     const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, speculative);
2160     // We're changing the type, we need a new CheckCast node to carry
2161     // the new type. The new type depends on the control: what
2162     // profiling tells us is only valid from here as far as we can
2163     // tell.
2164     Node* cast = new(C) CheckCastPPNode(control(), n, current_type->remove_speculative()->join_speculative(spec_type));
2165     cast = _gvn.transform(cast);
2166     replace_in_map(n, cast);
2167     n = cast;
2168   }
2169 
2170   return n;
2171 }
2172 
2173 /**
2174  * Record profiling data from receiver profiling at an invoke with the
2175  * type system so that it can propagate it (speculation)
2176  *
2177  * @param n  receiver node
2178  *
2179  * @return   node with improved type
2180  */
2181 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2182   if (!UseTypeSpeculation) {
2183     return n;
2184   }
2185   ciKlass* exact_kls = profile_has_unique_klass();
2186   bool maybe_null = true;
2187   if (java_bc() == Bytecodes::_checkcast ||
2188       java_bc() == Bytecodes::_instanceof ||
2189       java_bc() == Bytecodes::_aastore) {
2190     ciProfileData* data = method()->method_data()->bci_to_data(bci());
2191     bool maybe_null = data == NULL ? true : data->as_BitData()->null_seen();
2192   }
2193   return record_profile_for_speculation(n, exact_kls, maybe_null);
2194   return n;
2195 }
2196 
2197 /**
2198  * Record profiling data from argument profiling at an invoke with the
2199  * type system so that it can propagate it (speculation)
2200  *
2201  * @param dest_method  target method for the call
2202  * @param bc           what invoke bytecode is this?
2203  */
2204 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2205   if (!UseTypeSpeculation) {
2206     return;
2207   }
2208   const TypeFunc* tf    = TypeFunc::make(dest_method);
2209   int             nargs = tf->_domain->_cnt - TypeFunc::Parms;
2210   int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2211   for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2212     const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2213     if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2214       bool maybe_null = true;
2215       ciKlass* better_type = NULL;
2216       if (method()->argument_profiled_type(bci(), i, better_type, maybe_null)) {
2217         record_profile_for_speculation(argument(j), better_type, maybe_null);
2218       }
2219       i++;
2220     }
2221   }
2222 }
2223 
2224 /**
2225  * Record profiling data from parameter profiling at an invoke with
2226  * the type system so that it can propagate it (speculation)
2227  */
2228 void GraphKit::record_profiled_parameters_for_speculation() {
2229   if (!UseTypeSpeculation) {
2230     return;
2231   }
2232   for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2233     if (_gvn.type(local(i))->isa_oopptr()) {
2234       bool maybe_null = true;
2235       ciKlass* better_type = NULL;
2236       if (method()->parameter_profiled_type(j, better_type, maybe_null)) {
2237         record_profile_for_speculation(local(i), better_type, maybe_null);
2238       }
2239       j++;
2240     }
2241   }
2242 }
2243 
2244 /**
2245  * Record profiling data from return value profiling at an invoke with
2246  * the type system so that it can propagate it (speculation)
2247  */
2248 void GraphKit::record_profiled_return_for_speculation() {
2249   if (!UseTypeSpeculation) {
2250     return;
2251   }
2252   bool maybe_null = true;
2253   ciKlass* better_type = NULL;
2254   if (method()->return_profiled_type(bci(), better_type, maybe_null)) {
2255     // If profiling reports a single type for the return value,
2256     // feed it to the type system so it can propagate it as a
2257     // speculative type
2258     record_profile_for_speculation(stack(sp()-1), better_type, maybe_null);
2259   }
2260 }
2261 
2262 void GraphKit::round_double_result(ciMethod* dest_method) {
2263   // A non-strict method may return a double value which has an extended
2264   // exponent, but this must not be visible in a caller which is 'strict'
2265   // If a strict caller invokes a non-strict callee, round a double result
2266 
2267   BasicType result_type = dest_method->return_type()->basic_type();
2268   assert( method() != NULL, "must have caller context");
2269   if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2270     // Destination method's return value is on top of stack
2271     // dstore_rounding() does gvn.transform
2272     Node *result = pop_pair();
2273     result = dstore_rounding(result);
2274     push_pair(result);
2275   }
2276 }
2277 
2278 // rounding for strict float precision conformance
2279 Node* GraphKit::precision_rounding(Node* n) {
2280   return UseStrictFP && _method->flags().is_strict()
2281     && UseSSE == 0 && Matcher::strict_fp_requires_explicit_rounding


2321 //   [slow_call]     \[fast_result]
2322 //    Ctl   Val       \      \
2323 //     |               \      \
2324 //    Catch       <1>   \      \
2325 //   /    \        ^     \      \
2326 //  Ex    No_Ex    |      \      \
2327 //  |       \   \  |       \ <2>  \
2328 //  ...      \  [slow_res] |  |    \   [null_result]
2329 //            \         \--+--+---  |  |
2330 //             \           | /    \ | /
2331 //              --------Region     Phi
2332 //
2333 //=============================================================================
2334 // Code is structured as a series of driver functions all called 'do_XXX' that
2335 // call a set of helper functions.  Helper functions first, then drivers.
2336 
2337 //------------------------------null_check_oop---------------------------------
2338 // Null check oop.  Set null-path control into Region in slot 3.
2339 // Make a cast-not-nullness use the other not-null control.  Return cast.
2340 Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
2341                                bool never_see_null,
2342                                bool safe_for_replace,
2343                                bool speculative) {
2344   // Initial NULL check taken path
2345   (*null_control) = top();
2346   Node* cast = null_check_common(value, T_OBJECT, false, null_control, speculative);
2347 
2348   // Generate uncommon_trap:
2349   if (never_see_null && (*null_control) != top()) {
2350     // If we see an unexpected null at a check-cast we record it and force a
2351     // recompile; the offending check-cast will be compiled to handle NULLs.
2352     // If we see more than one offending BCI, then all checkcasts in the
2353     // method will be compiled to handle NULLs.
2354     PreserveJVMState pjvms(this);
2355     set_control(*null_control);
2356     replace_in_map(value, null());
2357     Deoptimization::DeoptReason reason = Deoptimization::reason_null_check(speculative);
2358     uncommon_trap(reason,
2359                   Deoptimization::Action_make_not_entrant);
2360     (*null_control) = top();    // NULL path is dead
2361   }
2362   if ((*null_control) == top() && safe_for_replace) {
2363     replace_in_map(value, cast);
2364   }
2365 
2366   // Cast away null-ness on the result
2367   return cast;
2368 }
2369 
2370 //------------------------------opt_iff----------------------------------------
2371 // Optimize the fast-check IfNode.  Set the fast-path region slot 2.
2372 // Return slow-path control.
2373 Node* GraphKit::opt_iff(Node* region, Node* iff) {
2374   IfNode *opt_iff = _gvn.transform(iff)->as_If();
2375 
2376   // Fast path taken; set region slot 2
2377   Node *fast_taken = _gvn.transform( new (C) IfFalseNode(opt_iff) );
2378   region->init_req(2,fast_taken); // Capture fast-control


2762 
2763   const TypeOopPtr* recv_xtype = tklass->as_instance_type();
2764   assert(recv_xtype->klass_is_exact(), "");
2765 
2766   // Subsume downstream occurrences of receiver with a cast to
2767   // recv_xtype, since now we know what the type will be.
2768   Node* cast = new(C) CheckCastPPNode(control(), receiver, recv_xtype);
2769   (*casted_receiver) = _gvn.transform(cast);
2770   // (User must make the replace_in_map call.)
2771 
2772   return fail;
2773 }
2774 
2775 
2776 //------------------------------seems_never_null-------------------------------
2777 // Use null_seen information if it is available from the profile.
2778 // If we see an unexpected null at a type check we record it and force a
2779 // recompile; the offending check will be recompiled to handle NULLs.
2780 // If we see several offending BCIs, then all checks in the
2781 // method will be recompiled.
2782 bool GraphKit::seems_never_null(Node* obj, ciProfileData* data, bool& speculating) {
2783   speculating = !_gvn.type(obj)->speculative_maybe_null();
2784   Deoptimization::DeoptReason reason = Deoptimization::reason_null_check(speculating);
2785   if (UncommonNullCast               // Cutout for this technique
2786       && obj != null()               // And not the -Xcomp stupid case?
2787       && !too_many_traps(reason)
2788       ) {
2789     if (speculating) {
2790       return true;
2791     }
2792     if (data == NULL)
2793       // Edge case:  no mature data.  Be optimistic here.
2794       return true;
2795     // If the profile has not seen a null, assume it won't happen.
2796     assert(java_bc() == Bytecodes::_checkcast ||
2797            java_bc() == Bytecodes::_instanceof ||
2798            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2799     return !data->as_BitData()->null_seen();
2800   }
2801   speculating = false;
2802   return false;
2803 }
2804 
2805 //------------------------maybe_cast_profiled_receiver-------------------------
2806 // If the profile has seen exactly one type, narrow to exactly that type.
2807 // Subsequent type checks will always fold up.
2808 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2809                                              ciKlass* require_klass,
2810                                              ciKlass* spec_klass,
2811                                              bool safe_for_replace) {
2812   if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2813 
2814   Deoptimization::DeoptReason reason = Deoptimization::reason_class_check(spec_klass != NULL);
2815 
2816   // Make sure we haven't already deoptimized from this tactic.
2817   if (too_many_traps(reason))
2818     return NULL;
2819 
2820   // (No, this isn't a call, but it's enough like a virtual call
2821   // to use the same ciMethod accessor to get the profile info...)
2822   // If we have a speculative type use it instead of profiling (which
2823   // may not help us)
2824   ciKlass* exact_kls = spec_klass == NULL ? profile_has_unique_klass() : spec_klass;
2825   if (exact_kls != NULL) {// no cast failures here
2826     if (require_klass == NULL ||
2827         static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2828       // If we narrow the type to match what the type profile sees or
2829       // the speculative type, we can then remove the rest of the
2830       // cast.
2831       // This is a win, even if the exact_kls is very specific,
2832       // because downstream operations, such as method calls,
2833       // will often benefit from the sharper type.
2834       Node* exact_obj = not_null_obj; // will get updated in place...


2847     // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2848   }
2849 
2850   return NULL;
2851 }
2852 
2853 /**
2854  * Cast obj to type and emit guard unless we had too many traps here
2855  * already
2856  *
2857  * @param obj       node being casted
2858  * @param type      type to cast the node to
2859  * @param not_null  true if we know node cannot be null
2860  */
2861 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2862                                         ciKlass* type,
2863                                         bool not_null) {
2864   // type == NULL if profiling tells us this object is always null
2865   if (type != NULL) {
2866     Deoptimization::DeoptReason class_reason = Deoptimization::Reason_speculate_class_check;
2867     Deoptimization::DeoptReason null_reason = Deoptimization::Reason_speculate_null_check;
2868     if (!too_many_traps(null_reason) &&
2869         !too_many_traps(class_reason)) {
2870       Node* not_null_obj = NULL;
2871       // not_null is true if we know the object is not null and
2872       // there's no need for a null check
2873       if (!not_null) {
2874         Node* null_ctl = top();
2875         not_null_obj = null_check_oop(obj, &null_ctl, true, true, true);
2876         assert(null_ctl->is_top(), "no null control here");
2877       } else {
2878         not_null_obj = obj;
2879       }
2880 
2881       Node* exact_obj = not_null_obj;
2882       ciKlass* exact_kls = type;
2883       Node* slow_ctl  = type_check_receiver(exact_obj, exact_kls, 1.0,
2884                                             &exact_obj);
2885       {
2886         PreserveJVMState pjvms(this);
2887         set_control(slow_ctl);
2888         uncommon_trap(class_reason,
2889                       Deoptimization::Action_maybe_recompile);
2890       }
2891       replace_in_map(not_null_obj, exact_obj);
2892       obj = exact_obj;
2893     }
2894   } else {
2895     if (!too_many_traps(Deoptimization::Reason_null_assert)) {


2903 
2904 //-------------------------------gen_instanceof--------------------------------
2905 // Generate an instance-of idiom.  Used by both the instance-of bytecode
2906 // and the reflective instance-of call.
2907 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2908   kill_dead_locals();           // Benefit all the uncommon traps
2909   assert( !stopped(), "dead parse path should be checked in callers" );
2910   assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2911          "must check for not-null not-dead klass in callers");
2912 
2913   // Make the merge point
2914   enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2915   RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2916   Node*       phi    = new(C) PhiNode(region, TypeInt::BOOL);
2917   C->set_has_split_ifs(true); // Has chance for split-if optimization
2918 
2919   ciProfileData* data = NULL;
2920   if (java_bc() == Bytecodes::_instanceof) {  // Only for the bytecode
2921     data = method()->method_data()->bci_to_data(bci());
2922   }
2923   bool speculative_not_null = false;
2924   bool never_see_null = (ProfileDynamicTypes  // aggressive use of profile
2925                          && seems_never_null(obj, data, speculative_not_null));
2926 
2927   // Null check; get casted pointer; set region slot 3
2928   Node* null_ctl = top();
2929   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace, speculative_not_null);
2930 
2931   // If not_null_obj is dead, only null-path is taken
2932   if (stopped()) {              // Doing instance-of on a NULL?
2933     set_control(null_ctl);
2934     return intcon(0);
2935   }
2936   region->init_req(_null_path, null_ctl);
2937   phi   ->init_req(_null_path, intcon(0)); // Set null path value
2938   if (null_ctl == top()) {
2939     // Do this eagerly, so that pattern matches like is_diamond_phi
2940     // will work even during parsing.
2941     assert(_null_path == PATH_LIMIT-1, "delete last");
2942     region->del_req(_null_path);
2943     phi   ->del_req(_null_path);
2944   }
2945 
2946   // Do we know the type check always succeed?
2947   bool known_statically = false;
2948   if (_gvn.type(superklass)->singleton()) {
2949     ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();


3032     }
3033   }
3034 
3035   ciProfileData* data = NULL;
3036   bool safe_for_replace = false;
3037   if (failure_control == NULL) {        // use MDO in regular case only
3038     assert(java_bc() == Bytecodes::_aastore ||
3039            java_bc() == Bytecodes::_checkcast,
3040            "interpreter profiles type checks only for these BCs");
3041     data = method()->method_data()->bci_to_data(bci());
3042     safe_for_replace = true;
3043   }
3044 
3045   // Make the merge point
3046   enum { _obj_path = 1, _null_path, PATH_LIMIT };
3047   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
3048   Node*       phi    = new (C) PhiNode(region, toop);
3049   C->set_has_split_ifs(true); // Has chance for split-if optimization
3050 
3051   // Use null-cast information if it is available
3052   bool speculative_not_null = false;
3053   bool never_see_null = ((failure_control == NULL)  // regular case only
3054                          && seems_never_null(obj, data, speculative_not_null));
3055 
3056   // Null check; get casted pointer; set region slot 3
3057   Node* null_ctl = top();
3058   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace, speculative_not_null);
3059 
3060   // If not_null_obj is dead, only null-path is taken
3061   if (stopped()) {              // Doing instance-of on a NULL?
3062     set_control(null_ctl);
3063     return null();
3064   }
3065   region->init_req(_null_path, null_ctl);
3066   phi   ->init_req(_null_path, null());  // Set null path value
3067   if (null_ctl == top()) {
3068     // Do this eagerly, so that pattern matches like is_diamond_phi
3069     // will work even during parsing.
3070     assert(_null_path == PATH_LIMIT-1, "delete last");
3071     region->del_req(_null_path);
3072     phi   ->del_req(_null_path);
3073   }
3074 
3075   Node* cast_obj = NULL;
3076   if (tk->klass_is_exact()) {
3077     // The following optimization tries to statically cast the speculative type of the object
3078     // (for example obtained during profiling) to the type of the superklass and then do a


src/share/vm/opto/graphKit.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File