src/share/vm/opto/graphKit.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/graphKit.cpp

Print this page
rev 5100 : 8024067: Missing replace_in_map() calls following null checks
Summary: add replace_in_map() calls following some null checks in type checks
Reviewed-by:


2086 //   [slow_call]     \[fast_result]
2087 //    Ctl   Val       \      \
2088 //     |               \      \
2089 //    Catch       <1>   \      \
2090 //   /    \        ^     \      \
2091 //  Ex    No_Ex    |      \      \
2092 //  |       \   \  |       \ <2>  \
2093 //  ...      \  [slow_res] |  |    \   [null_result]
2094 //            \         \--+--+---  |  |
2095 //             \           | /    \ | /
2096 //              --------Region     Phi
2097 //
2098 //=============================================================================
2099 // Code is structured as a series of driver functions all called 'do_XXX' that
2100 // call a set of helper functions.  Helper functions first, then drivers.
2101 
2102 //------------------------------null_check_oop---------------------------------
2103 // Null check oop.  Set null-path control into Region in slot 3.
2104 // Make a cast-not-nullness use the other not-null control.  Return cast.
2105 Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
2106                                bool never_see_null) {
2107   // Initial NULL check taken path
2108   (*null_control) = top();
2109   Node* cast = null_check_common(value, T_OBJECT, false, null_control);
2110 
2111   // Generate uncommon_trap:
2112   if (never_see_null && (*null_control) != top()) {
2113     // If we see an unexpected null at a check-cast we record it and force a
2114     // recompile; the offending check-cast will be compiled to handle NULLs.
2115     // If we see more than one offending BCI, then all checkcasts in the
2116     // method will be compiled to handle NULLs.
2117     PreserveJVMState pjvms(this);
2118     set_control(*null_control);
2119     replace_in_map(value, null());
2120     uncommon_trap(Deoptimization::Reason_null_check,
2121                   Deoptimization::Action_make_not_entrant);
2122     (*null_control) = top();    // NULL path is dead
2123   }



2124 
2125   // Cast away null-ness on the result
2126   return cast;
2127 }
2128 
2129 //------------------------------opt_iff----------------------------------------
2130 // Optimize the fast-check IfNode.  Set the fast-path region slot 2.
2131 // Return slow-path control.
2132 Node* GraphKit::opt_iff(Node* region, Node* iff) {
2133   IfNode *opt_iff = _gvn.transform(iff)->as_If();
2134 
2135   // Fast path taken; set region slot 2
2136   Node *fast_taken = _gvn.transform( new (C) IfFalseNode(opt_iff) );
2137   region->init_req(2,fast_taken); // Capture fast-control
2138 
2139   // Fast path not-taken, i.e. slow path
2140   Node *slow_taken = _gvn.transform( new (C) IfTrueNode(opt_iff) );
2141   return slow_taken;
2142 }
2143 


2598   return NULL;
2599 }
2600 
2601 
2602 //-------------------------------gen_instanceof--------------------------------
2603 // Generate an instance-of idiom.  Used by both the instance-of bytecode
2604 // and the reflective instance-of call.
2605 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass) {
2606   kill_dead_locals();           // Benefit all the uncommon traps
2607   assert( !stopped(), "dead parse path should be checked in callers" );
2608   assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2609          "must check for not-null not-dead klass in callers");
2610 
2611   // Make the merge point
2612   enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2613   RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2614   Node*       phi    = new(C) PhiNode(region, TypeInt::BOOL);
2615   C->set_has_split_ifs(true); // Has chance for split-if optimization
2616 
2617   ciProfileData* data = NULL;

2618   if (java_bc() == Bytecodes::_instanceof) {  // Only for the bytecode
2619     data = method()->method_data()->bci_to_data(bci());

2620   }
2621   bool never_see_null = (ProfileDynamicTypes  // aggressive use of profile
2622                          && seems_never_null(obj, data));
2623 
2624   // Null check; get casted pointer; set region slot 3
2625   Node* null_ctl = top();
2626   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null);
2627 
2628   // If not_null_obj is dead, only null-path is taken
2629   if (stopped()) {              // Doing instance-of on a NULL?
2630     set_control(null_ctl);
2631     return intcon(0);
2632   }
2633   region->init_req(_null_path, null_ctl);
2634   phi   ->init_req(_null_path, intcon(0)); // Set null path value
2635   if (null_ctl == top()) {
2636     // Do this eagerly, so that pattern matches like is_diamond_phi
2637     // will work even during parsing.
2638     assert(_null_path == PATH_LIMIT-1, "delete last");
2639     region->del_req(_null_path);
2640     phi   ->del_req(_null_path);
2641   }
2642 
2643   if (ProfileDynamicTypes && data != NULL) {
2644     Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, NULL);
2645     if (stopped()) {            // Profile disagrees with this path.
2646       set_control(null_ctl);    // Null is the only remaining possibility.


2687   // This detects the common cases where the test will short-circuit
2688   // away completely.  We do this before we perform the null check,
2689   // because if the test is going to turn into zero code, we don't
2690   // want a residual null check left around.  (Causes a slowdown,
2691   // for example, in some objArray manipulations, such as a[i]=a[j].)
2692   if (tk->singleton()) {
2693     const TypeOopPtr* objtp = _gvn.type(obj)->isa_oopptr();
2694     if (objtp != NULL && objtp->klass() != NULL) {
2695       switch (static_subtype_check(tk->klass(), objtp->klass())) {
2696       case SSC_always_true:
2697         return obj;
2698       case SSC_always_false:
2699         // It needs a null check because a null will *pass* the cast check.
2700         // A non-null value will always produce an exception.
2701         return null_assert(obj);
2702       }
2703     }
2704   }
2705 
2706   ciProfileData* data = NULL;

2707   if (failure_control == NULL) {        // use MDO in regular case only
2708     assert(java_bc() == Bytecodes::_aastore ||
2709            java_bc() == Bytecodes::_checkcast,
2710            "interpreter profiles type checks only for these BCs");
2711     data = method()->method_data()->bci_to_data(bci());

2712   }
2713 
2714   // Make the merge point
2715   enum { _obj_path = 1, _null_path, PATH_LIMIT };
2716   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
2717   Node*       phi    = new (C) PhiNode(region, toop);
2718   C->set_has_split_ifs(true); // Has chance for split-if optimization
2719 
2720   // Use null-cast information if it is available
2721   bool never_see_null = ((failure_control == NULL)  // regular case only
2722                          && seems_never_null(obj, data));
2723 
2724   // Null check; get casted pointer; set region slot 3
2725   Node* null_ctl = top();
2726   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null);
2727 
2728   // If not_null_obj is dead, only null-path is taken
2729   if (stopped()) {              // Doing instance-of on a NULL?
2730     set_control(null_ctl);
2731     return null();
2732   }
2733   region->init_req(_null_path, null_ctl);
2734   phi   ->init_req(_null_path, null());  // Set null path value
2735   if (null_ctl == top()) {
2736     // Do this eagerly, so that pattern matches like is_diamond_phi
2737     // will work even during parsing.
2738     assert(_null_path == PATH_LIMIT-1, "delete last");
2739     region->del_req(_null_path);
2740     phi   ->del_req(_null_path);
2741   }
2742 
2743   Node* cast_obj = NULL;
2744   if (data != NULL &&
2745       // Counter has never been decremented (due to cast failure).
2746       // ...This is a reasonable thing to expect.  It is true of




2086 //   [slow_call]     \[fast_result]
2087 //    Ctl   Val       \      \
2088 //     |               \      \
2089 //    Catch       <1>   \      \
2090 //   /    \        ^     \      \
2091 //  Ex    No_Ex    |      \      \
2092 //  |       \   \  |       \ <2>  \
2093 //  ...      \  [slow_res] |  |    \   [null_result]
2094 //            \         \--+--+---  |  |
2095 //             \           | /    \ | /
2096 //              --------Region     Phi
2097 //
2098 //=============================================================================
2099 // Code is structured as a series of driver functions all called 'do_XXX' that
2100 // call a set of helper functions.  Helper functions first, then drivers.
2101 
2102 //------------------------------null_check_oop---------------------------------
2103 // Null check oop.  Set null-path control into Region in slot 3.
2104 // Make a cast-not-nullness use the other not-null control.  Return cast.
2105 Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
2106                                bool never_see_null, bool safe_for_replace) {
2107   // Initial NULL check taken path
2108   (*null_control) = top();
2109   Node* cast = null_check_common(value, T_OBJECT, false, null_control);
2110 
2111   // Generate uncommon_trap:
2112   if (never_see_null && (*null_control) != top()) {
2113     // If we see an unexpected null at a check-cast we record it and force a
2114     // recompile; the offending check-cast will be compiled to handle NULLs.
2115     // If we see more than one offending BCI, then all checkcasts in the
2116     // method will be compiled to handle NULLs.
2117     PreserveJVMState pjvms(this);
2118     set_control(*null_control);
2119     replace_in_map(value, null());
2120     uncommon_trap(Deoptimization::Reason_null_check,
2121                   Deoptimization::Action_make_not_entrant);
2122     (*null_control) = top();    // NULL path is dead
2123   }
2124   if ((*null_control) == top() && safe_for_replace) {
2125     replace_in_map(value, cast);
2126   }
2127 
2128   // Cast away null-ness on the result
2129   return cast;
2130 }
2131 
2132 //------------------------------opt_iff----------------------------------------
2133 // Optimize the fast-check IfNode.  Set the fast-path region slot 2.
2134 // Return slow-path control.
2135 Node* GraphKit::opt_iff(Node* region, Node* iff) {
2136   IfNode *opt_iff = _gvn.transform(iff)->as_If();
2137 
2138   // Fast path taken; set region slot 2
2139   Node *fast_taken = _gvn.transform( new (C) IfFalseNode(opt_iff) );
2140   region->init_req(2,fast_taken); // Capture fast-control
2141 
2142   // Fast path not-taken, i.e. slow path
2143   Node *slow_taken = _gvn.transform( new (C) IfTrueNode(opt_iff) );
2144   return slow_taken;
2145 }
2146 


2601   return NULL;
2602 }
2603 
2604 
2605 //-------------------------------gen_instanceof--------------------------------
2606 // Generate an instance-of idiom.  Used by both the instance-of bytecode
2607 // and the reflective instance-of call.
2608 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass) {
2609   kill_dead_locals();           // Benefit all the uncommon traps
2610   assert( !stopped(), "dead parse path should be checked in callers" );
2611   assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2612          "must check for not-null not-dead klass in callers");
2613 
2614   // Make the merge point
2615   enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2616   RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2617   Node*       phi    = new(C) PhiNode(region, TypeInt::BOOL);
2618   C->set_has_split_ifs(true); // Has chance for split-if optimization
2619 
2620   ciProfileData* data = NULL;
2621   bool safe_for_replace = false;
2622   if (java_bc() == Bytecodes::_instanceof) {  // Only for the bytecode
2623     data = method()->method_data()->bci_to_data(bci());
2624     safe_for_replace = true;
2625   }
2626   bool never_see_null = (ProfileDynamicTypes  // aggressive use of profile
2627                          && seems_never_null(obj, data));
2628 
2629   // Null check; get casted pointer; set region slot 3
2630   Node* null_ctl = top();
2631   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2632 
2633   // If not_null_obj is dead, only null-path is taken
2634   if (stopped()) {              // Doing instance-of on a NULL?
2635     set_control(null_ctl);
2636     return intcon(0);
2637   }
2638   region->init_req(_null_path, null_ctl);
2639   phi   ->init_req(_null_path, intcon(0)); // Set null path value
2640   if (null_ctl == top()) {
2641     // Do this eagerly, so that pattern matches like is_diamond_phi
2642     // will work even during parsing.
2643     assert(_null_path == PATH_LIMIT-1, "delete last");
2644     region->del_req(_null_path);
2645     phi   ->del_req(_null_path);
2646   }
2647 
2648   if (ProfileDynamicTypes && data != NULL) {
2649     Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, NULL);
2650     if (stopped()) {            // Profile disagrees with this path.
2651       set_control(null_ctl);    // Null is the only remaining possibility.


2692   // This detects the common cases where the test will short-circuit
2693   // away completely.  We do this before we perform the null check,
2694   // because if the test is going to turn into zero code, we don't
2695   // want a residual null check left around.  (Causes a slowdown,
2696   // for example, in some objArray manipulations, such as a[i]=a[j].)
2697   if (tk->singleton()) {
2698     const TypeOopPtr* objtp = _gvn.type(obj)->isa_oopptr();
2699     if (objtp != NULL && objtp->klass() != NULL) {
2700       switch (static_subtype_check(tk->klass(), objtp->klass())) {
2701       case SSC_always_true:
2702         return obj;
2703       case SSC_always_false:
2704         // It needs a null check because a null will *pass* the cast check.
2705         // A non-null value will always produce an exception.
2706         return null_assert(obj);
2707       }
2708     }
2709   }
2710 
2711   ciProfileData* data = NULL;
2712   bool safe_for_replace = false;
2713   if (failure_control == NULL) {        // use MDO in regular case only
2714     assert(java_bc() == Bytecodes::_aastore ||
2715            java_bc() == Bytecodes::_checkcast,
2716            "interpreter profiles type checks only for these BCs");
2717     data = method()->method_data()->bci_to_data(bci());
2718     safe_for_replace = true;
2719   }
2720 
2721   // Make the merge point
2722   enum { _obj_path = 1, _null_path, PATH_LIMIT };
2723   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
2724   Node*       phi    = new (C) PhiNode(region, toop);
2725   C->set_has_split_ifs(true); // Has chance for split-if optimization
2726 
2727   // Use null-cast information if it is available
2728   bool never_see_null = ((failure_control == NULL)  // regular case only
2729                          && seems_never_null(obj, data));
2730 
2731   // Null check; get casted pointer; set region slot 3
2732   Node* null_ctl = top();
2733   Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2734 
2735   // If not_null_obj is dead, only null-path is taken
2736   if (stopped()) {              // Doing instance-of on a NULL?
2737     set_control(null_ctl);
2738     return null();
2739   }
2740   region->init_req(_null_path, null_ctl);
2741   phi   ->init_req(_null_path, null());  // Set null path value
2742   if (null_ctl == top()) {
2743     // Do this eagerly, so that pattern matches like is_diamond_phi
2744     // will work even during parsing.
2745     assert(_null_path == PATH_LIMIT-1, "delete last");
2746     region->del_req(_null_path);
2747     phi   ->del_req(_null_path);
2748   }
2749 
2750   Node* cast_obj = NULL;
2751   if (data != NULL &&
2752       // Counter has never been decremented (due to cast failure).
2753       // ...This is a reasonable thing to expect.  It is true of


src/share/vm/opto/graphKit.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File