366
367 if (bad_type_exit->control()->req() > 1) {
368 // Build an uncommon trap here, if any inputs can be unexpected.
369 bad_type_exit->set_control(_gvn.transform( bad_type_exit->control() ));
370 record_for_igvn(bad_type_exit->control());
371 SafePointNode* types_are_good = map();
372 set_map(bad_type_exit);
373 // The unexpected type happens because a new edge is active
374 // in the CFG, which typeflow had previously ignored.
375 // E.g., Object x = coldAtFirst() && notReached()? "str": new Integer(123).
376 // This x will be typed as Integer if notReached is not yet linked.
377 // It could also happen due to a problem in ciTypeFlow analysis.
378 uncommon_trap(Deoptimization::Reason_constraint,
379 Deoptimization::Action_reinterpret);
380 set_map(types_are_good);
381 }
382 }
383
384 //------------------------------Parse------------------------------------------
385 // Main parser constructor.
386 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses, Parse* parent)
387 : _exits(caller), _parent(parent)
388 {
389 // Init some variables
390 _caller = caller;
391 _method = parse_method;
392 _expected_uses = expected_uses;
393 _depth = 1 + (caller->has_method() ? caller->depth() : 0);
394 _wrote_final = false;
395 _wrote_volatile = false;
396 _wrote_stable = false;
397 _wrote_fields = false;
398 _alloc_with_final = NULL;
399 _entry_bci = InvocationEntryBci;
400 _tf = NULL;
401 _block = NULL;
402 debug_only(_block_count = -1);
403 debug_only(_blocks = (Block*)-1);
404 #ifndef PRODUCT
405 if (PrintCompilation || PrintOpto) {
406 // Make sure I have an inline tree, so I can print messages about it.
407 JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
408 InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
409 }
410 _max_switch_depth = 0;
411 _est_switch_depth = 0;
412 #endif
413
414 _tf = TypeFunc::make(method());
415 _iter.reset_to_method(method());
416 _flow = method()->get_flow_analysis();
417 if (_flow->failing()) {
418 C->record_method_not_compilable_all_tiers(_flow->failure_reason());
419 }
420
421 #ifndef PRODUCT
884 }
885 }
886
887 // We now return to our regularly scheduled program:
888 }
889
890 //---------------------------throw_to_exit-------------------------------------
891 // Merge the given map into an exception exit from this method.
892 // The exception exit will handle any unlocking of receiver.
893 // The ex_oop must be saved within the ex_map, unlike merge_exception.
894 void Parse::throw_to_exit(SafePointNode* ex_map) {
895 // Pop the JVMS to (a copy of) the caller.
896 GraphKit caller;
897 caller.set_map_clone(_caller->map());
898 caller.set_bci(_caller->bci());
899 caller.set_sp(_caller->sp());
900 // Copy out the standard machine state:
901 for (uint i = 0; i < TypeFunc::Parms; i++) {
902 caller.map()->set_req(i, ex_map->in(i));
903 }
904 // ...and the exception:
905 Node* ex_oop = saved_ex_oop(ex_map);
906 SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
907 // Finally, collect the new exception state in my exits:
908 _exits.add_exception_state(caller_ex_map);
909 }
910
911 //------------------------------do_exits---------------------------------------
912 void Parse::do_exits() {
913 set_parse_bci(InvocationEntryBci);
914
915 // Now peephole on the return bits
916 Node* region = _exits.control();
917 _exits.set_control(gvn().transform(region));
918
919 Node* iophi = _exits.i_o();
920 _exits.set_i_o(gvn().transform(iophi));
921
922 // Figure out if we need to emit the trailing barrier. The barrier is only
923 // needed in the constructors, and only in three cases:
974 // transform each slice of the original memphi:
975 mms.set_memory(_gvn.transform(mms.memory()));
976 }
977
978 if (tf()->range()->cnt() > TypeFunc::Parms) {
979 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
980 Node* ret_phi = _gvn.transform( _exits.argument(0) );
981 assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
982 _exits.push_node(ret_type->basic_type(), ret_phi);
983 }
984
985 // Note: Logic for creating and optimizing the ReturnNode is in Compile.
986
987 // Unlock along the exceptional paths.
988 // This is done late so that we can common up equivalent exceptions
989 // (e.g., null checks) arising from multiple points within this method.
990 // See GraphKit::add_exception_state, which performs the commoning.
991 bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
992
993 // record exit from a method if compiled while Dtrace is turned on.
994 if (do_synch || C->env()->dtrace_method_probes()) {
995 // First move the exception list out of _exits:
996 GraphKit kit(_exits.transfer_exceptions_into_jvms());
997 SafePointNode* normal_map = kit.map(); // keep this guy safe
998 // Now re-collect the exceptions into _exits:
999 SafePointNode* ex_map;
1000 while ((ex_map = kit.pop_exception_state()) != NULL) {
1001 Node* ex_oop = kit.use_exception_state(ex_map);
1002 // Force the exiting JVM state to have this method at InvocationEntryBci.
1003 // The exiting JVM state is otherwise a copy of the calling JVMS.
1004 JVMState* caller = kit.jvms();
1005 JVMState* ex_jvms = caller->clone_shallow(C);
1006 ex_jvms->set_map(kit.clone_map());
1007 ex_jvms->map()->set_jvms(ex_jvms);
1008 ex_jvms->set_bci( InvocationEntryBci);
1009 kit.set_jvms(ex_jvms);
1010 if (do_synch) {
1011 // Add on the synchronized-method box/object combo
1012 kit.map()->push_monitor(_synch_lock);
1013 // Unlock!
1014 kit.shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
1015 }
1016 if (C->env()->dtrace_method_probes()) {
1017 kit.make_dtrace_method_exit(method());
1018 }
1019 // Done with exception-path processing.
1020 ex_map = kit.make_exception_state(ex_oop);
1021 assert(ex_jvms->same_calls_as(ex_map->jvms()), "sanity");
1022 // Pop the last vestige of this method:
1023 ex_map->set_jvms(caller->clone_shallow(C));
1024 ex_map->jvms()->set_map(ex_map);
1025 _exits.push_exception_state(ex_map);
1026 }
1027 assert(_exits.map() == normal_map, "keep the same return state");
1028 }
1029
1030 {
1031 // Capture very early exceptions (receiver null checks) from caller JVMS
1032 GraphKit caller(_caller);
1033 SafePointNode* ex_map;
1034 while ((ex_map = caller.pop_exception_state()) != NULL) {
1035 _exits.add_exception_state(ex_map);
1036 }
1037 }
1038 }
1039
1040 //-----------------------------create_entry_map-------------------------------
1041 // Initialize our parser map to contain the types at method entry.
1042 // For OSR, the map contains a single RawPtr parameter.
1043 // Initial monitor locking for sync. methods is performed by do_method_entry.
1044 SafePointNode* Parse::create_entry_map() {
1045 // Check for really stupid bail-out cases.
1046 uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1047 if (len >= 32760) {
1048 C->record_method_not_compilable_all_tiers("too many local variables");
1049 return NULL;
1050 }
1051
1052 // If this is an inlined method, we may have to do a receiver null check.
1053 if (_caller->has_method() && is_normal_parse() && !method()->is_static()) {
1054 GraphKit kit(_caller);
1055 kit.null_check_receiver_before_call(method());
1056 _caller = kit.transfer_exceptions_into_jvms();
1057 if (kit.stopped()) {
1058 _exits.add_exception_states_from(_caller);
1059 _exits.set_jvms(_caller);
1060 return NULL;
1061 }
1062 }
1063
1064 assert(method() != NULL, "parser must have a method");
1065
1066 // Create an initial safepoint to hold JVM state during parsing
1067 JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : NULL);
1068 set_map(new SafePointNode(len, jvms));
1069 jvms->set_map(map());
1070 record_for_igvn(map());
1071 assert(jvms->endoff() == len, "correct jvms sizing");
1072
1073 SafePointNode* inmap = _caller->map();
1074 assert(inmap != NULL, "must have inmap");
1075
1076 uint i;
1077
1078 // Pass thru the predefined input parameters.
1079 for (i = 0; i < TypeFunc::Parms; i++) {
1080 map()->init_req(i, inmap->in(i));
1081 }
1082
1083 if (depth() == 1) {
1084 assert(map()->memory()->Opcode() == Op_Parm, "");
1085 // Insert the memory aliasing node
1086 set_all_memory(reset_memory());
1087 }
1088 assert(merged_memory(), "");
1089
1090 // Now add the locals which are initially bound to arguments:
1091 uint arg_size = tf()->domain()->cnt();
1092 ensure_stack(arg_size - TypeFunc::Parms); // OSR methods have funny args
1093 for (i = TypeFunc::Parms; i < arg_size; i++) {
1094 map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));
1684 // So far, Phis have had a reasonable type from ciTypeFlow.
1685 // Now _gvn will join that with the meet of current inputs.
1686 // BOTTOM is never permissible here, 'cause pessimistically
1687 // Phis of pointers cannot lose the basic pointer type.
1688 debug_only(const Type* bt1 = phi->bottom_type());
1689 assert(bt1 != Type::BOTTOM, "should not be building conflict phis");
1690 map()->set_req(j, _gvn.transform_no_reclaim(phi));
1691 debug_only(const Type* bt2 = phi->bottom_type());
1692 assert(bt2->higher_equal_speculative(bt1), "must be consistent with type-flow");
1693 record_for_igvn(phi);
1694 }
1695 }
1696 } // End of for all values to be merged
1697
1698 if (pnum == PhiNode::Input &&
1699 !r->in(0)) { // The occasional useless Region
1700 assert(control() == r, "");
1701 set_control(r->nonnull_req());
1702 }
1703
1704 // newin has been subsumed into the lazy merge, and is now dead.
1705 set_block(save_block);
1706
1707 stop(); // done with this guy, for now
1708 }
1709
1710 if (TraceOptoParse) {
1711 tty->print_cr(" on path %d", pnum);
1712 }
1713
1714 // Done with this parser state.
1715 assert(stopped(), "");
1716 }
1717
1718
1719 //--------------------------merge_memory_edges---------------------------------
1720 void Parse::merge_memory_edges(MergeMemNode* n, int pnum, bool nophi) {
1721 // (nophi means we must not create phis, because we already parsed here)
1722 assert(n != NULL, "");
1723 // Merge the inputs to the MergeMems
2111
2112 // frame pointer is always same, already captured
2113 if (value != NULL) {
2114 // If returning oops to an interface-return, there is a silent free
2115 // cast from oop to interface allowed by the Verifier. Make it explicit
2116 // here.
2117 Node* phi = _exits.argument(0);
2118 const TypeInstPtr *tr = phi->bottom_type()->isa_instptr();
2119 if( tr && tr->klass()->is_loaded() &&
2120 tr->klass()->is_interface() ) {
2121 const TypeInstPtr *tp = value->bottom_type()->isa_instptr();
2122 if (tp && tp->klass()->is_loaded() &&
2123 !tp->klass()->is_interface()) {
2124 // sharpen the type eagerly; this eases certain assert checking
2125 if (tp->higher_equal(TypeInstPtr::NOTNULL))
2126 tr = tr->join_speculative(TypeInstPtr::NOTNULL)->is_instptr();
2127 value = _gvn.transform(new CheckCastPPNode(0,value,tr));
2128 }
2129 }
2130 phi->add_req(value);
2131 }
2132
2133 stop_and_kill_map(); // This CFG path dies here
2134 }
2135
2136
2137 //------------------------------add_safepoint----------------------------------
2138 void Parse::add_safepoint() {
2139 // See if we can avoid this safepoint. No need for a SafePoint immediately
2140 // after a Call (except Leaf Call) or another SafePoint.
2141 Node *proj = control();
2142 bool add_poll_param = SafePointNode::needs_polling_address_input();
2143 uint parms = add_poll_param ? TypeFunc::Parms+1 : TypeFunc::Parms;
2144 if( proj->is_Proj() ) {
2145 Node *n0 = proj->in(0);
2146 if( n0->is_Catch() ) {
2147 n0 = n0->in(0)->in(0);
2148 assert( n0->is_Call(), "expect a call here" );
2149 }
2150 if( n0->is_Call() ) {
|
366
367 if (bad_type_exit->control()->req() > 1) {
368 // Build an uncommon trap here, if any inputs can be unexpected.
369 bad_type_exit->set_control(_gvn.transform( bad_type_exit->control() ));
370 record_for_igvn(bad_type_exit->control());
371 SafePointNode* types_are_good = map();
372 set_map(bad_type_exit);
373 // The unexpected type happens because a new edge is active
374 // in the CFG, which typeflow had previously ignored.
375 // E.g., Object x = coldAtFirst() && notReached()? "str": new Integer(123).
376 // This x will be typed as Integer if notReached is not yet linked.
377 // It could also happen due to a problem in ciTypeFlow analysis.
378 uncommon_trap(Deoptimization::Reason_constraint,
379 Deoptimization::Action_reinterpret);
380 set_map(types_are_good);
381 }
382 }
383
384 //------------------------------Parse------------------------------------------
385 // Main parser constructor.
386 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
387 : _exits(caller)
388 {
389 // Init some variables
390 _caller = caller;
391 _method = parse_method;
392 _expected_uses = expected_uses;
393 _depth = 1 + (caller->has_method() ? caller->depth() : 0);
394 _wrote_final = false;
395 _wrote_volatile = false;
396 _wrote_stable = false;
397 _wrote_fields = false;
398 _alloc_with_final = NULL;
399 _entry_bci = InvocationEntryBci;
400 _tf = NULL;
401 _block = NULL;
402 _first_return = true;
403 _replaced_nodes_for_exceptions = false;
404 _new_idx = C->unique();
405 debug_only(_block_count = -1);
406 debug_only(_blocks = (Block*)-1);
407 #ifndef PRODUCT
408 if (PrintCompilation || PrintOpto) {
409 // Make sure I have an inline tree, so I can print messages about it.
410 JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
411 InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
412 }
413 _max_switch_depth = 0;
414 _est_switch_depth = 0;
415 #endif
416
417 _tf = TypeFunc::make(method());
418 _iter.reset_to_method(method());
419 _flow = method()->get_flow_analysis();
420 if (_flow->failing()) {
421 C->record_method_not_compilable_all_tiers(_flow->failure_reason());
422 }
423
424 #ifndef PRODUCT
887 }
888 }
889
890 // We now return to our regularly scheduled program:
891 }
892
893 //---------------------------throw_to_exit-------------------------------------
894 // Merge the given map into an exception exit from this method.
895 // The exception exit will handle any unlocking of receiver.
896 // The ex_oop must be saved within the ex_map, unlike merge_exception.
897 void Parse::throw_to_exit(SafePointNode* ex_map) {
898 // Pop the JVMS to (a copy of) the caller.
899 GraphKit caller;
900 caller.set_map_clone(_caller->map());
901 caller.set_bci(_caller->bci());
902 caller.set_sp(_caller->sp());
903 // Copy out the standard machine state:
904 for (uint i = 0; i < TypeFunc::Parms; i++) {
905 caller.map()->set_req(i, ex_map->in(i));
906 }
907 if (ex_map->has_replaced_nodes()) {
908 _replaced_nodes_for_exceptions = true;
909 }
910 caller.map()->transfer_replaced_nodes_from(ex_map, _new_idx);
911 // ...and the exception:
912 Node* ex_oop = saved_ex_oop(ex_map);
913 SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
914 // Finally, collect the new exception state in my exits:
915 _exits.add_exception_state(caller_ex_map);
916 }
917
918 //------------------------------do_exits---------------------------------------
919 void Parse::do_exits() {
920 set_parse_bci(InvocationEntryBci);
921
922 // Now peephole on the return bits
923 Node* region = _exits.control();
924 _exits.set_control(gvn().transform(region));
925
926 Node* iophi = _exits.i_o();
927 _exits.set_i_o(gvn().transform(iophi));
928
929 // Figure out if we need to emit the trailing barrier. The barrier is only
930 // needed in the constructors, and only in three cases:
981 // transform each slice of the original memphi:
982 mms.set_memory(_gvn.transform(mms.memory()));
983 }
984
985 if (tf()->range()->cnt() > TypeFunc::Parms) {
986 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
987 Node* ret_phi = _gvn.transform( _exits.argument(0) );
988 assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
989 _exits.push_node(ret_type->basic_type(), ret_phi);
990 }
991
992 // Note: Logic for creating and optimizing the ReturnNode is in Compile.
993
994 // Unlock along the exceptional paths.
995 // This is done late so that we can common up equivalent exceptions
996 // (e.g., null checks) arising from multiple points within this method.
997 // See GraphKit::add_exception_state, which performs the commoning.
998 bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
999
1000 // record exit from a method if compiled while Dtrace is turned on.
1001 if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
1002 // First move the exception list out of _exits:
1003 GraphKit kit(_exits.transfer_exceptions_into_jvms());
1004 SafePointNode* normal_map = kit.map(); // keep this guy safe
1005 // Now re-collect the exceptions into _exits:
1006 SafePointNode* ex_map;
1007 while ((ex_map = kit.pop_exception_state()) != NULL) {
1008 Node* ex_oop = kit.use_exception_state(ex_map);
1009 // Force the exiting JVM state to have this method at InvocationEntryBci.
1010 // The exiting JVM state is otherwise a copy of the calling JVMS.
1011 JVMState* caller = kit.jvms();
1012 JVMState* ex_jvms = caller->clone_shallow(C);
1013 ex_jvms->set_map(kit.clone_map());
1014 ex_jvms->map()->set_jvms(ex_jvms);
1015 ex_jvms->set_bci( InvocationEntryBci);
1016 kit.set_jvms(ex_jvms);
1017 if (do_synch) {
1018 // Add on the synchronized-method box/object combo
1019 kit.map()->push_monitor(_synch_lock);
1020 // Unlock!
1021 kit.shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
1022 }
1023 if (C->env()->dtrace_method_probes()) {
1024 kit.make_dtrace_method_exit(method());
1025 }
1026 if (_replaced_nodes_for_exceptions) {
1027 kit.map()->apply_replaced_nodes();
1028 }
1029 // Done with exception-path processing.
1030 ex_map = kit.make_exception_state(ex_oop);
1031 assert(ex_jvms->same_calls_as(ex_map->jvms()), "sanity");
1032 // Pop the last vestige of this method:
1033 ex_map->set_jvms(caller->clone_shallow(C));
1034 ex_map->jvms()->set_map(ex_map);
1035 _exits.push_exception_state(ex_map);
1036 }
1037 assert(_exits.map() == normal_map, "keep the same return state");
1038 }
1039
1040 {
1041 // Capture very early exceptions (receiver null checks) from caller JVMS
1042 GraphKit caller(_caller);
1043 SafePointNode* ex_map;
1044 while ((ex_map = caller.pop_exception_state()) != NULL) {
1045 _exits.add_exception_state(ex_map);
1046 }
1047 }
1048 _exits.map()->apply_replaced_nodes();
1049 }
1050
1051 //-----------------------------create_entry_map-------------------------------
1052 // Initialize our parser map to contain the types at method entry.
1053 // For OSR, the map contains a single RawPtr parameter.
1054 // Initial monitor locking for sync. methods is performed by do_method_entry.
1055 SafePointNode* Parse::create_entry_map() {
1056 // Check for really stupid bail-out cases.
1057 uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1058 if (len >= 32760) {
1059 C->record_method_not_compilable_all_tiers("too many local variables");
1060 return NULL;
1061 }
1062
1063 // clear current replaced nodes that are of no use from here on (map was cloned in build_exits).
1064 _caller->map()->delete_replaced_nodes();
1065
1066 // If this is an inlined method, we may have to do a receiver null check.
1067 if (_caller->has_method() && is_normal_parse() && !method()->is_static()) {
1068 GraphKit kit(_caller);
1069 kit.null_check_receiver_before_call(method());
1070 _caller = kit.transfer_exceptions_into_jvms();
1071 if (kit.stopped()) {
1072 _exits.add_exception_states_from(_caller);
1073 _exits.set_jvms(_caller);
1074 return NULL;
1075 }
1076 }
1077
1078 assert(method() != NULL, "parser must have a method");
1079
1080 // Create an initial safepoint to hold JVM state during parsing
1081 JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : NULL);
1082 set_map(new SafePointNode(len, jvms));
1083 jvms->set_map(map());
1084 record_for_igvn(map());
1085 assert(jvms->endoff() == len, "correct jvms sizing");
1086
1087 SafePointNode* inmap = _caller->map();
1088 assert(inmap != NULL, "must have inmap");
1089 // In case of null check on receiver above
1090 map()->transfer_replaced_nodes_from(inmap, _new_idx);
1091
1092 uint i;
1093
1094 // Pass thru the predefined input parameters.
1095 for (i = 0; i < TypeFunc::Parms; i++) {
1096 map()->init_req(i, inmap->in(i));
1097 }
1098
1099 if (depth() == 1) {
1100 assert(map()->memory()->Opcode() == Op_Parm, "");
1101 // Insert the memory aliasing node
1102 set_all_memory(reset_memory());
1103 }
1104 assert(merged_memory(), "");
1105
1106 // Now add the locals which are initially bound to arguments:
1107 uint arg_size = tf()->domain()->cnt();
1108 ensure_stack(arg_size - TypeFunc::Parms); // OSR methods have funny args
1109 for (i = TypeFunc::Parms; i < arg_size; i++) {
1110 map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));
1700 // So far, Phis have had a reasonable type from ciTypeFlow.
1701 // Now _gvn will join that with the meet of current inputs.
1702 // BOTTOM is never permissible here, 'cause pessimistically
1703 // Phis of pointers cannot lose the basic pointer type.
1704 debug_only(const Type* bt1 = phi->bottom_type());
1705 assert(bt1 != Type::BOTTOM, "should not be building conflict phis");
1706 map()->set_req(j, _gvn.transform_no_reclaim(phi));
1707 debug_only(const Type* bt2 = phi->bottom_type());
1708 assert(bt2->higher_equal_speculative(bt1), "must be consistent with type-flow");
1709 record_for_igvn(phi);
1710 }
1711 }
1712 } // End of for all values to be merged
1713
1714 if (pnum == PhiNode::Input &&
1715 !r->in(0)) { // The occasional useless Region
1716 assert(control() == r, "");
1717 set_control(r->nonnull_req());
1718 }
1719
1720 map()->merge_replaced_nodes_with(newin);
1721
1722 // newin has been subsumed into the lazy merge, and is now dead.
1723 set_block(save_block);
1724
1725 stop(); // done with this guy, for now
1726 }
1727
1728 if (TraceOptoParse) {
1729 tty->print_cr(" on path %d", pnum);
1730 }
1731
1732 // Done with this parser state.
1733 assert(stopped(), "");
1734 }
1735
1736
1737 //--------------------------merge_memory_edges---------------------------------
1738 void Parse::merge_memory_edges(MergeMemNode* n, int pnum, bool nophi) {
1739 // (nophi means we must not create phis, because we already parsed here)
1740 assert(n != NULL, "");
1741 // Merge the inputs to the MergeMems
2129
2130 // frame pointer is always same, already captured
2131 if (value != NULL) {
2132 // If returning oops to an interface-return, there is a silent free
2133 // cast from oop to interface allowed by the Verifier. Make it explicit
2134 // here.
2135 Node* phi = _exits.argument(0);
2136 const TypeInstPtr *tr = phi->bottom_type()->isa_instptr();
2137 if( tr && tr->klass()->is_loaded() &&
2138 tr->klass()->is_interface() ) {
2139 const TypeInstPtr *tp = value->bottom_type()->isa_instptr();
2140 if (tp && tp->klass()->is_loaded() &&
2141 !tp->klass()->is_interface()) {
2142 // sharpen the type eagerly; this eases certain assert checking
2143 if (tp->higher_equal(TypeInstPtr::NOTNULL))
2144 tr = tr->join_speculative(TypeInstPtr::NOTNULL)->is_instptr();
2145 value = _gvn.transform(new CheckCastPPNode(0,value,tr));
2146 }
2147 }
2148 phi->add_req(value);
2149 }
2150
2151 if (_first_return) {
2152 _exits.map()->transfer_replaced_nodes_from(map(), _new_idx);
2153 _first_return = false;
2154 } else {
2155 _exits.map()->merge_replaced_nodes_with(map());
2156 }
2157
2158 stop_and_kill_map(); // This CFG path dies here
2159 }
2160
2161
2162 //------------------------------add_safepoint----------------------------------
2163 void Parse::add_safepoint() {
2164 // See if we can avoid this safepoint. No need for a SafePoint immediately
2165 // after a Call (except Leaf Call) or another SafePoint.
2166 Node *proj = control();
2167 bool add_poll_param = SafePointNode::needs_polling_address_input();
2168 uint parms = add_poll_param ? TypeFunc::Parms+1 : TypeFunc::Parms;
2169 if( proj->is_Proj() ) {
2170 Node *n0 = proj->in(0);
2171 if( n0->is_Catch() ) {
2172 n0 = n0->in(0)->in(0);
2173 assert( n0->is_Call(), "expect a call here" );
2174 }
2175 if( n0->is_Call() ) {
|