< prev index next >

src/share/vm/opto/parse1.cpp

Print this page
rev 5783 : 8024069: replace_in_map() should operate on parent maps
Summary: type information gets lost because replace_in_map() doesn't update parent maps
Reviewed-by: kvn, twisti
rev 5784 : 8026796: Make replace_in_map() on parent maps generic
Summary: propagate node replacements along control flow edges to callers
Reviewed-by: kvn, vlivanov
rev 5786 : 8174164: SafePointNode::_replaced_nodes breaks with irreducible loops
Reviewed-by: kvn


 378     set_map(types_are_good);
 379   }
 380 }
 381 
 382 //------------------------------Parse------------------------------------------
 383 // Main parser constructor.
 384 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
 385   : _exits(caller)
 386 {
 387   // Init some variables
 388   _caller = caller;
 389   _method = parse_method;
 390   _expected_uses = expected_uses;
 391   _depth = 1 + (caller->has_method() ? caller->depth() : 0);
 392   _wrote_final = false;
 393   // Add MemBarRelease for constructors which write volatile field (PPC64).
 394   PPC64_ONLY(_wrote_volatile = false;)
 395   _entry_bci = InvocationEntryBci;
 396   _tf = NULL;
 397   _block = NULL;



 398   debug_only(_block_count = -1);
 399   debug_only(_blocks = (Block*)-1);
 400 #ifndef PRODUCT
 401   if (PrintCompilation || PrintOpto) {
 402     // Make sure I have an inline tree, so I can print messages about it.
 403     JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
 404     InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
 405   }
 406   _max_switch_depth = 0;
 407   _est_switch_depth = 0;
 408 #endif
 409 
 410   _tf = TypeFunc::make(method());
 411   _iter.reset_to_method(method());
 412   _flow = method()->get_flow_analysis();
 413   if (_flow->failing()) {
 414     C->record_method_not_compilable_all_tiers(_flow->failure_reason());
 415   }
 416 
 417 #ifndef PRODUCT


 901     }
 902   }
 903 
 904   // We now return to our regularly scheduled program:
 905 }
 906 
 907 //---------------------------throw_to_exit-------------------------------------
 908 // Merge the given map into an exception exit from this method.
 909 // The exception exit will handle any unlocking of receiver.
 910 // The ex_oop must be saved within the ex_map, unlike merge_exception.
 911 void Parse::throw_to_exit(SafePointNode* ex_map) {
 912   // Pop the JVMS to (a copy of) the caller.
 913   GraphKit caller;
 914   caller.set_map_clone(_caller->map());
 915   caller.set_bci(_caller->bci());
 916   caller.set_sp(_caller->sp());
 917   // Copy out the standard machine state:
 918   for (uint i = 0; i < TypeFunc::Parms; i++) {
 919     caller.map()->set_req(i, ex_map->in(i));
 920   }




 921   // ...and the exception:
 922   Node*          ex_oop        = saved_ex_oop(ex_map);
 923   SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
 924   // Finally, collect the new exception state in my exits:
 925   _exits.add_exception_state(caller_ex_map);
 926 }
 927 
 928 //------------------------------do_exits---------------------------------------
 929 void Parse::do_exits() {
 930   set_parse_bci(InvocationEntryBci);
 931 
 932   // Now peephole on the return bits
 933   Node* region = _exits.control();
 934   _exits.set_control(gvn().transform(region));
 935 
 936   Node* iophi = _exits.i_o();
 937   _exits.set_i_o(gvn().transform(iophi));
 938 
 939   // Add MemBarRelease for constructors which write volatile field (PPC64).
 940   // Intention is to avoid that other threads can observe initial values even though the


 970   if (tf()->range()->cnt() > TypeFunc::Parms) {
 971     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 972     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 973     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 974     if (ret_type->isa_int()) {
 975       BasicType ret_bt = method()->return_type()->basic_type();
 976       ret_phi = mask_int_value(ret_phi, ret_bt, &_gvn);
 977     }
 978     _exits.push_node(ret_type->basic_type(), ret_phi);
 979   }
 980 
 981   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
 982 
 983   // Unlock along the exceptional paths.
 984   // This is done late so that we can common up equivalent exceptions
 985   // (e.g., null checks) arising from multiple points within this method.
 986   // See GraphKit::add_exception_state, which performs the commoning.
 987   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
 988 
 989   // record exit from a method if compiled while Dtrace is turned on.
 990   if (do_synch || C->env()->dtrace_method_probes()) {
 991     // First move the exception list out of _exits:
 992     GraphKit kit(_exits.transfer_exceptions_into_jvms());
 993     SafePointNode* normal_map = kit.map();  // keep this guy safe
 994     // Now re-collect the exceptions into _exits:
 995     SafePointNode* ex_map;
 996     while ((ex_map = kit.pop_exception_state()) != NULL) {
 997       Node* ex_oop = kit.use_exception_state(ex_map);
 998       // Force the exiting JVM state to have this method at InvocationEntryBci.
 999       // The exiting JVM state is otherwise a copy of the calling JVMS.
1000       JVMState* caller = kit.jvms();
1001       JVMState* ex_jvms = caller->clone_shallow(C);
1002       ex_jvms->set_map(kit.clone_map());
1003       ex_jvms->map()->set_jvms(ex_jvms);
1004       ex_jvms->set_bci(   InvocationEntryBci);
1005       kit.set_jvms(ex_jvms);
1006       if (do_synch) {
1007         // Add on the synchronized-method box/object combo
1008         kit.map()->push_monitor(_synch_lock);
1009         // Unlock!
1010         kit.shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
1011       }
1012       if (C->env()->dtrace_method_probes()) {
1013         kit.make_dtrace_method_exit(method());
1014       }



1015       // Done with exception-path processing.
1016       ex_map = kit.make_exception_state(ex_oop);
1017       assert(ex_jvms->same_calls_as(ex_map->jvms()), "sanity");
1018       // Pop the last vestige of this method:
1019       ex_map->set_jvms(caller->clone_shallow(C));
1020       ex_map->jvms()->set_map(ex_map);
1021       _exits.push_exception_state(ex_map);
1022     }
1023     assert(_exits.map() == normal_map, "keep the same return state");
1024   }
1025 
1026   {
1027     // Capture very early exceptions (receiver null checks) from caller JVMS
1028     GraphKit caller(_caller);
1029     SafePointNode* ex_map;
1030     while ((ex_map = caller.pop_exception_state()) != NULL) {
1031       _exits.add_exception_state(ex_map);
1032     }
1033   }

1034 }
1035 
1036 //-----------------------------create_entry_map-------------------------------
1037 // Initialize our parser map to contain the types at method entry.
1038 // For OSR, the map contains a single RawPtr parameter.
1039 // Initial monitor locking for sync. methods is performed by do_method_entry.
1040 SafePointNode* Parse::create_entry_map() {
1041   // Check for really stupid bail-out cases.
1042   uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1043   if (len >= 32760) {
1044     C->record_method_not_compilable_all_tiers("too many local variables");
1045     return NULL;
1046   }
1047 



1048   // If this is an inlined method, we may have to do a receiver null check.
1049   if (_caller->has_method() && is_normal_parse() && !method()->is_static()) {
1050     GraphKit kit(_caller);
1051     kit.null_check_receiver_before_call(method());
1052     _caller = kit.transfer_exceptions_into_jvms();
1053     if (kit.stopped()) {
1054       _exits.add_exception_states_from(_caller);
1055       _exits.set_jvms(_caller);
1056       return NULL;
1057     }
1058   }
1059 
1060   assert(method() != NULL, "parser must have a method");
1061 
1062   // Create an initial safepoint to hold JVM state during parsing
1063   JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : NULL);
1064   set_map(new (C) SafePointNode(len, jvms));
1065   jvms->set_map(map());
1066   record_for_igvn(map());
1067   assert(jvms->endoff() == len, "correct jvms sizing");
1068 
1069   SafePointNode* inmap = _caller->map();
1070   assert(inmap != NULL, "must have inmap");


1071 
1072   uint i;
1073 
1074   // Pass thru the predefined input parameters.
1075   for (i = 0; i < TypeFunc::Parms; i++) {
1076     map()->init_req(i, inmap->in(i));
1077   }
1078 
1079   if (depth() == 1) {
1080     assert(map()->memory()->Opcode() == Op_Parm, "");
1081     // Insert the memory aliasing node
1082     set_all_memory(reset_memory());
1083   }
1084   assert(merged_memory(), "");
1085 
1086   // Now add the locals which are initially bound to arguments:
1087   uint arg_size = tf()->domain()->cnt();
1088   ensure_stack(arg_size - TypeFunc::Parms);  // OSR methods have funny args
1089   for (i = TypeFunc::Parms; i < arg_size; i++) {
1090     map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));


1676           // So far, Phis have had a reasonable type from ciTypeFlow.
1677           // Now _gvn will join that with the meet of current inputs.
1678           // BOTTOM is never permissible here, 'cause pessimistically
1679           // Phis of pointers cannot lose the basic pointer type.
1680           debug_only(const Type* bt1 = phi->bottom_type());
1681           assert(bt1 != Type::BOTTOM, "should not be building conflict phis");
1682           map()->set_req(j, _gvn.transform_no_reclaim(phi));
1683           debug_only(const Type* bt2 = phi->bottom_type());
1684           assert(bt2->higher_equal(bt1), "must be consistent with type-flow");
1685           record_for_igvn(phi);
1686         }
1687       }
1688     } // End of for all values to be merged
1689 
1690     if (pnum == PhiNode::Input &&
1691         !r->in(0)) {         // The occasional useless Region
1692       assert(control() == r, "");
1693       set_control(r->nonnull_req());
1694     }
1695 


1696     // newin has been subsumed into the lazy merge, and is now dead.
1697     set_block(save_block);
1698 
1699     stop();                     // done with this guy, for now
1700   }
1701 
1702   if (TraceOptoParse) {
1703     tty->print_cr(" on path %d", pnum);
1704   }
1705 
1706   // Done with this parser state.
1707   assert(stopped(), "");
1708 }
1709 
1710 
1711 //--------------------------merge_memory_edges---------------------------------
1712 void Parse::merge_memory_edges(MergeMemNode* n, int pnum, bool nophi) {
1713   // (nophi means we must not create phis, because we already parsed here)
1714   assert(n != NULL, "");
1715   // Merge the inputs to the MergeMems


2042 
2043   // frame pointer is always same, already captured
2044   if (value != NULL) {
2045     // If returning oops to an interface-return, there is a silent free
2046     // cast from oop to interface allowed by the Verifier.  Make it explicit
2047     // here.
2048     Node* phi = _exits.argument(0);
2049     const TypeInstPtr *tr = phi->bottom_type()->isa_instptr();
2050     if( tr && tr->klass()->is_loaded() &&
2051         tr->klass()->is_interface() ) {
2052       const TypeInstPtr *tp = value->bottom_type()->isa_instptr();
2053       if (tp && tp->klass()->is_loaded() &&
2054           !tp->klass()->is_interface()) {
2055         // sharpen the type eagerly; this eases certain assert checking
2056         if (tp->higher_equal(TypeInstPtr::NOTNULL))
2057           tr = tr->join(TypeInstPtr::NOTNULL)->is_instptr();
2058         value = _gvn.transform(new (C) CheckCastPPNode(0,value,tr));
2059       }
2060     }
2061     phi->add_req(value);







2062   }
2063 
2064   stop_and_kill_map();          // This CFG path dies here
2065 }
2066 
2067 
2068 //------------------------------add_safepoint----------------------------------
2069 void Parse::add_safepoint() {
2070   // See if we can avoid this safepoint.  No need for a SafePoint immediately
2071   // after a Call (except Leaf Call) or another SafePoint.
2072   Node *proj = control();
2073   bool add_poll_param = SafePointNode::needs_polling_address_input();
2074   uint parms = add_poll_param ? TypeFunc::Parms+1 : TypeFunc::Parms;
2075   if( proj->is_Proj() ) {
2076     Node *n0 = proj->in(0);
2077     if( n0->is_Catch() ) {
2078       n0 = n0->in(0)->in(0);
2079       assert( n0->is_Call(), "expect a call here" );
2080     }
2081     if( n0->is_Call() ) {




 378     set_map(types_are_good);
 379   }
 380 }
 381 
 382 //------------------------------Parse------------------------------------------
 383 // Main parser constructor.
 384 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
 385   : _exits(caller)
 386 {
 387   // Init some variables
 388   _caller = caller;
 389   _method = parse_method;
 390   _expected_uses = expected_uses;
 391   _depth = 1 + (caller->has_method() ? caller->depth() : 0);
 392   _wrote_final = false;
 393   // Add MemBarRelease for constructors which write volatile field (PPC64).
 394   PPC64_ONLY(_wrote_volatile = false;)
 395   _entry_bci = InvocationEntryBci;
 396   _tf = NULL;
 397   _block = NULL;
 398   _first_return = true;
 399   _replaced_nodes_for_exceptions = false;
 400   _new_idx = C->unique();
 401   debug_only(_block_count = -1);
 402   debug_only(_blocks = (Block*)-1);
 403 #ifndef PRODUCT
 404   if (PrintCompilation || PrintOpto) {
 405     // Make sure I have an inline tree, so I can print messages about it.
 406     JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
 407     InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
 408   }
 409   _max_switch_depth = 0;
 410   _est_switch_depth = 0;
 411 #endif
 412 
 413   _tf = TypeFunc::make(method());
 414   _iter.reset_to_method(method());
 415   _flow = method()->get_flow_analysis();
 416   if (_flow->failing()) {
 417     C->record_method_not_compilable_all_tiers(_flow->failure_reason());
 418   }
 419 
 420 #ifndef PRODUCT


 904     }
 905   }
 906 
 907   // We now return to our regularly scheduled program:
 908 }
 909 
 910 //---------------------------throw_to_exit-------------------------------------
 911 // Merge the given map into an exception exit from this method.
 912 // The exception exit will handle any unlocking of receiver.
 913 // The ex_oop must be saved within the ex_map, unlike merge_exception.
 914 void Parse::throw_to_exit(SafePointNode* ex_map) {
 915   // Pop the JVMS to (a copy of) the caller.
 916   GraphKit caller;
 917   caller.set_map_clone(_caller->map());
 918   caller.set_bci(_caller->bci());
 919   caller.set_sp(_caller->sp());
 920   // Copy out the standard machine state:
 921   for (uint i = 0; i < TypeFunc::Parms; i++) {
 922     caller.map()->set_req(i, ex_map->in(i));
 923   }
 924   if (ex_map->has_replaced_nodes()) {
 925     _replaced_nodes_for_exceptions = true;
 926   }
 927   caller.map()->transfer_replaced_nodes_from(ex_map, _new_idx);
 928   // ...and the exception:
 929   Node*          ex_oop        = saved_ex_oop(ex_map);
 930   SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
 931   // Finally, collect the new exception state in my exits:
 932   _exits.add_exception_state(caller_ex_map);
 933 }
 934 
 935 //------------------------------do_exits---------------------------------------
 936 void Parse::do_exits() {
 937   set_parse_bci(InvocationEntryBci);
 938 
 939   // Now peephole on the return bits
 940   Node* region = _exits.control();
 941   _exits.set_control(gvn().transform(region));
 942 
 943   Node* iophi = _exits.i_o();
 944   _exits.set_i_o(gvn().transform(iophi));
 945 
 946   // Add MemBarRelease for constructors which write volatile field (PPC64).
 947   // Intention is to avoid that other threads can observe initial values even though the


 977   if (tf()->range()->cnt() > TypeFunc::Parms) {
 978     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 979     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 980     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 981     if (ret_type->isa_int()) {
 982       BasicType ret_bt = method()->return_type()->basic_type();
 983       ret_phi = mask_int_value(ret_phi, ret_bt, &_gvn);
 984     }
 985     _exits.push_node(ret_type->basic_type(), ret_phi);
 986   }
 987 
 988   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
 989 
 990   // Unlock along the exceptional paths.
 991   // This is done late so that we can common up equivalent exceptions
 992   // (e.g., null checks) arising from multiple points within this method.
 993   // See GraphKit::add_exception_state, which performs the commoning.
 994   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
 995 
 996   // record exit from a method if compiled while Dtrace is turned on.
 997   if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
 998     // First move the exception list out of _exits:
 999     GraphKit kit(_exits.transfer_exceptions_into_jvms());
1000     SafePointNode* normal_map = kit.map();  // keep this guy safe
1001     // Now re-collect the exceptions into _exits:
1002     SafePointNode* ex_map;
1003     while ((ex_map = kit.pop_exception_state()) != NULL) {
1004       Node* ex_oop = kit.use_exception_state(ex_map);
1005       // Force the exiting JVM state to have this method at InvocationEntryBci.
1006       // The exiting JVM state is otherwise a copy of the calling JVMS.
1007       JVMState* caller = kit.jvms();
1008       JVMState* ex_jvms = caller->clone_shallow(C);
1009       ex_jvms->set_map(kit.clone_map());
1010       ex_jvms->map()->set_jvms(ex_jvms);
1011       ex_jvms->set_bci(   InvocationEntryBci);
1012       kit.set_jvms(ex_jvms);
1013       if (do_synch) {
1014         // Add on the synchronized-method box/object combo
1015         kit.map()->push_monitor(_synch_lock);
1016         // Unlock!
1017         kit.shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
1018       }
1019       if (C->env()->dtrace_method_probes()) {
1020         kit.make_dtrace_method_exit(method());
1021       }
1022       if (_replaced_nodes_for_exceptions) {
1023         kit.map()->apply_replaced_nodes(_new_idx);
1024       }
1025       // Done with exception-path processing.
1026       ex_map = kit.make_exception_state(ex_oop);
1027       assert(ex_jvms->same_calls_as(ex_map->jvms()), "sanity");
1028       // Pop the last vestige of this method:
1029       ex_map->set_jvms(caller->clone_shallow(C));
1030       ex_map->jvms()->set_map(ex_map);
1031       _exits.push_exception_state(ex_map);
1032     }
1033     assert(_exits.map() == normal_map, "keep the same return state");
1034   }
1035 
1036   {
1037     // Capture very early exceptions (receiver null checks) from caller JVMS
1038     GraphKit caller(_caller);
1039     SafePointNode* ex_map;
1040     while ((ex_map = caller.pop_exception_state()) != NULL) {
1041       _exits.add_exception_state(ex_map);
1042     }
1043   }
1044   _exits.map()->apply_replaced_nodes(_new_idx);
1045 }
1046 
1047 //-----------------------------create_entry_map-------------------------------
1048 // Initialize our parser map to contain the types at method entry.
1049 // For OSR, the map contains a single RawPtr parameter.
1050 // Initial monitor locking for sync. methods is performed by do_method_entry.
1051 SafePointNode* Parse::create_entry_map() {
1052   // Check for really stupid bail-out cases.
1053   uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1054   if (len >= 32760) {
1055     C->record_method_not_compilable_all_tiers("too many local variables");
1056     return NULL;
1057   }
1058 
1059   // clear current replaced nodes that are of no use from here on (map was cloned in build_exits).
1060   _caller->map()->delete_replaced_nodes();
1061 
1062   // If this is an inlined method, we may have to do a receiver null check.
1063   if (_caller->has_method() && is_normal_parse() && !method()->is_static()) {
1064     GraphKit kit(_caller);
1065     kit.null_check_receiver_before_call(method());
1066     _caller = kit.transfer_exceptions_into_jvms();
1067     if (kit.stopped()) {
1068       _exits.add_exception_states_from(_caller);
1069       _exits.set_jvms(_caller);
1070       return NULL;
1071     }
1072   }
1073 
1074   assert(method() != NULL, "parser must have a method");
1075 
1076   // Create an initial safepoint to hold JVM state during parsing
1077   JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : NULL);
1078   set_map(new (C) SafePointNode(len, jvms));
1079   jvms->set_map(map());
1080   record_for_igvn(map());
1081   assert(jvms->endoff() == len, "correct jvms sizing");
1082 
1083   SafePointNode* inmap = _caller->map();
1084   assert(inmap != NULL, "must have inmap");
1085   // In case of null check on receiver above
1086   map()->transfer_replaced_nodes_from(inmap, _new_idx);
1087 
1088   uint i;
1089 
1090   // Pass thru the predefined input parameters.
1091   for (i = 0; i < TypeFunc::Parms; i++) {
1092     map()->init_req(i, inmap->in(i));
1093   }
1094 
1095   if (depth() == 1) {
1096     assert(map()->memory()->Opcode() == Op_Parm, "");
1097     // Insert the memory aliasing node
1098     set_all_memory(reset_memory());
1099   }
1100   assert(merged_memory(), "");
1101 
1102   // Now add the locals which are initially bound to arguments:
1103   uint arg_size = tf()->domain()->cnt();
1104   ensure_stack(arg_size - TypeFunc::Parms);  // OSR methods have funny args
1105   for (i = TypeFunc::Parms; i < arg_size; i++) {
1106     map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));


1692           // So far, Phis have had a reasonable type from ciTypeFlow.
1693           // Now _gvn will join that with the meet of current inputs.
1694           // BOTTOM is never permissible here, 'cause pessimistically
1695           // Phis of pointers cannot lose the basic pointer type.
1696           debug_only(const Type* bt1 = phi->bottom_type());
1697           assert(bt1 != Type::BOTTOM, "should not be building conflict phis");
1698           map()->set_req(j, _gvn.transform_no_reclaim(phi));
1699           debug_only(const Type* bt2 = phi->bottom_type());
1700           assert(bt2->higher_equal(bt1), "must be consistent with type-flow");
1701           record_for_igvn(phi);
1702         }
1703       }
1704     } // End of for all values to be merged
1705 
1706     if (pnum == PhiNode::Input &&
1707         !r->in(0)) {         // The occasional useless Region
1708       assert(control() == r, "");
1709       set_control(r->nonnull_req());
1710     }
1711 
1712     map()->merge_replaced_nodes_with(newin);
1713 
1714     // newin has been subsumed into the lazy merge, and is now dead.
1715     set_block(save_block);
1716 
1717     stop();                     // done with this guy, for now
1718   }
1719 
1720   if (TraceOptoParse) {
1721     tty->print_cr(" on path %d", pnum);
1722   }
1723 
1724   // Done with this parser state.
1725   assert(stopped(), "");
1726 }
1727 
1728 
1729 //--------------------------merge_memory_edges---------------------------------
1730 void Parse::merge_memory_edges(MergeMemNode* n, int pnum, bool nophi) {
1731   // (nophi means we must not create phis, because we already parsed here)
1732   assert(n != NULL, "");
1733   // Merge the inputs to the MergeMems


2060 
2061   // frame pointer is always same, already captured
2062   if (value != NULL) {
2063     // If returning oops to an interface-return, there is a silent free
2064     // cast from oop to interface allowed by the Verifier.  Make it explicit
2065     // here.
2066     Node* phi = _exits.argument(0);
2067     const TypeInstPtr *tr = phi->bottom_type()->isa_instptr();
2068     if( tr && tr->klass()->is_loaded() &&
2069         tr->klass()->is_interface() ) {
2070       const TypeInstPtr *tp = value->bottom_type()->isa_instptr();
2071       if (tp && tp->klass()->is_loaded() &&
2072           !tp->klass()->is_interface()) {
2073         // sharpen the type eagerly; this eases certain assert checking
2074         if (tp->higher_equal(TypeInstPtr::NOTNULL))
2075           tr = tr->join(TypeInstPtr::NOTNULL)->is_instptr();
2076         value = _gvn.transform(new (C) CheckCastPPNode(0,value,tr));
2077       }
2078     }
2079     phi->add_req(value);
2080   }
2081 
2082   if (_first_return) {
2083     _exits.map()->transfer_replaced_nodes_from(map(), _new_idx);
2084     _first_return = false;
2085   } else {
2086     _exits.map()->merge_replaced_nodes_with(map());
2087   }
2088 
2089   stop_and_kill_map();          // This CFG path dies here
2090 }
2091 
2092 
2093 //------------------------------add_safepoint----------------------------------
2094 void Parse::add_safepoint() {
2095   // See if we can avoid this safepoint.  No need for a SafePoint immediately
2096   // after a Call (except Leaf Call) or another SafePoint.
2097   Node *proj = control();
2098   bool add_poll_param = SafePointNode::needs_polling_address_input();
2099   uint parms = add_poll_param ? TypeFunc::Parms+1 : TypeFunc::Parms;
2100   if( proj->is_Proj() ) {
2101     Node *n0 = proj->in(0);
2102     if( n0->is_Catch() ) {
2103       n0 = n0->in(0)->in(0);
2104       assert( n0->is_Call(), "expect a call here" );
2105     }
2106     if( n0->is_Call() ) {


< prev index next >