< prev index next >

src/share/vm/opto/parse1.cpp

Print this page
rev 9165 : 8141551: C2 can not handle returns with inccompatible interface arrays


 974 #ifndef PRODUCT
 975     if (PrintOpto && (Verbose || WizardMode)) {
 976       method()->print_name();
 977       tty->print_cr(" writes @Stable and needs a memory barrier");
 978     }
 979 #endif
 980   }
 981 
 982   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 983     // transform each slice of the original memphi:
 984     mms.set_memory(_gvn.transform(mms.memory()));
 985   }
 986 
 987   if (tf()->range()->cnt() > TypeFunc::Parms) {
 988     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 989     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 990     if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) {
 991       // In case of concurrent class loading, the type we set for the
 992       // ret_phi in build_exits() may have been too optimistic and the
 993       // ret_phi may be top now.
 994 #ifdef ASSERT


 995       {
 996         MutexLockerEx ml(Compile_lock, Mutex::_no_safepoint_check_flag);
 997         assert(ret_type->isa_ptr() && C->env()->system_dictionary_modification_counter_changed(), "return value must be well defined");
 998       }
 999 #endif
1000       C->record_failure(C2Compiler::retry_class_loading_during_parsing());





1001     }
1002     _exits.push_node(ret_type->basic_type(), ret_phi);
1003   }
1004 
1005   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
1006 
1007   // Unlock along the exceptional paths.
1008   // This is done late so that we can common up equivalent exceptions
1009   // (e.g., null checks) arising from multiple points within this method.
1010   // See GraphKit::add_exception_state, which performs the commoning.
1011   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
1012 
1013   // record exit from a method if compiled while Dtrace is turned on.
1014   if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
1015     // First move the exception list out of _exits:
1016     GraphKit kit(_exits.transfer_exceptions_into_jvms());
1017     SafePointNode* normal_map = kit.map();  // keep this guy safe
1018     // Now re-collect the exceptions into _exits:
1019     SafePointNode* ex_map;
1020     while ((ex_map = kit.pop_exception_state()) != NULL) {




 974 #ifndef PRODUCT
 975     if (PrintOpto && (Verbose || WizardMode)) {
 976       method()->print_name();
 977       tty->print_cr(" writes @Stable and needs a memory barrier");
 978     }
 979 #endif
 980   }
 981 
 982   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 983     // transform each slice of the original memphi:
 984     mms.set_memory(_gvn.transform(mms.memory()));
 985   }
 986 
 987   if (tf()->range()->cnt() > TypeFunc::Parms) {
 988     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 989     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 990     if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) {
 991       // In case of concurrent class loading, the type we set for the
 992       // ret_phi in build_exits() may have been too optimistic and the
 993       // ret_phi may be top now.
 994       // Otherwise, we've encountered an error and have to mark the method as
 995       // not compilable. Just using an assertion instead would be dangerous
 996       // as this could lead to an infinite compile loop in non-debug builds.
 997       {
 998         MutexLockerEx ml(Compile_lock, Mutex::_no_safepoint_check_flag);
 999         if (C->env()->system_dictionary_modification_counter_changed()) {


1000           C->record_failure(C2Compiler::retry_class_loading_during_parsing());
1001         } else {
1002           C->record_method_not_compilable("Can't determine return type.");
1003         }
1004       }
1005       return;
1006     }
1007     _exits.push_node(ret_type->basic_type(), ret_phi);
1008   }
1009 
1010   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
1011 
1012   // Unlock along the exceptional paths.
1013   // This is done late so that we can common up equivalent exceptions
1014   // (e.g., null checks) arising from multiple points within this method.
1015   // See GraphKit::add_exception_state, which performs the commoning.
1016   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
1017 
1018   // record exit from a method if compiled while Dtrace is turned on.
1019   if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
1020     // First move the exception list out of _exits:
1021     GraphKit kit(_exits.transfer_exceptions_into_jvms());
1022     SafePointNode* normal_map = kit.map();  // keep this guy safe
1023     // Now re-collect the exceptions into _exits:
1024     SafePointNode* ex_map;
1025     while ((ex_map = kit.pop_exception_state()) != NULL) {


< prev index next >