src/share/vm/opto/parse1.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/parse1.cpp

Print this page
rev 7602 : 8055530: assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty()) failed: return value must be well defined
Summary: concurrent class loading causes return phi to become top
Reviewed-by:


 969   // those also. If there is a predecessor allocation node, bind the
 970   // barrier there.
 971   if (wrote_stable()) {
 972     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 973 #ifndef PRODUCT
 974     if (PrintOpto && (Verbose || WizardMode)) {
 975       method()->print_name();
 976       tty->print_cr(" writes @Stable and needs a memory barrier");
 977     }
 978 #endif
 979   }
 980 
 981   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 982     // transform each slice of the original memphi:
 983     mms.set_memory(_gvn.transform(mms.memory()));
 984   }
 985 
 986   if (tf()->range()->cnt() > TypeFunc::Parms) {
 987     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 988     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 989     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");









 990     _exits.push_node(ret_type->basic_type(), ret_phi);
 991   }
 992 
 993   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
 994 
 995   // Unlock along the exceptional paths.
 996   // This is done late so that we can common up equivalent exceptions
 997   // (e.g., null checks) arising from multiple points within this method.
 998   // See GraphKit::add_exception_state, which performs the commoning.
 999   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
1000 
1001   // record exit from a method if compiled while Dtrace is turned on.
1002   if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
1003     // First move the exception list out of _exits:
1004     GraphKit kit(_exits.transfer_exceptions_into_jvms());
1005     SafePointNode* normal_map = kit.map();  // keep this guy safe
1006     // Now re-collect the exceptions into _exits:
1007     SafePointNode* ex_map;
1008     while ((ex_map = kit.pop_exception_state()) != NULL) {
1009       Node* ex_oop = kit.use_exception_state(ex_map);




 969   // those also. If there is a predecessor allocation node, bind the
 970   // barrier there.
 971   if (wrote_stable()) {
 972     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 973 #ifndef PRODUCT
 974     if (PrintOpto && (Verbose || WizardMode)) {
 975       method()->print_name();
 976       tty->print_cr(" writes @Stable and needs a memory barrier");
 977     }
 978 #endif
 979   }
 980 
 981   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 982     // transform each slice of the original memphi:
 983     mms.set_memory(_gvn.transform(mms.memory()));
 984   }
 985 
 986   if (tf()->range()->cnt() > TypeFunc::Parms) {
 987     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 988     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 989 #ifdef ASSERT 
 990     {
 991       // In case of concurrent class loading, the type we set for the
 992       // ret_phi in build_exits() may have been too optimistic and the
 993       // ret_phi may be top now.
 994       MutexLockerEx ml(Compile_lock, Mutex::_no_safepoint_check_flag);
 995       assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty() ||
 996              (ret_type->isa_ptr() && C->env()->system_dictionary_modification_counter_changed()), "return value must be well defined");
 997     }
 998 #endif
 999     _exits.push_node(ret_type->basic_type(), ret_phi);
1000   }
1001 
1002   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
1003 
1004   // Unlock along the exceptional paths.
1005   // This is done late so that we can common up equivalent exceptions
1006   // (e.g., null checks) arising from multiple points within this method.
1007   // See GraphKit::add_exception_state, which performs the commoning.
1008   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
1009 
1010   // record exit from a method if compiled while Dtrace is turned on.
1011   if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
1012     // First move the exception list out of _exits:
1013     GraphKit kit(_exits.transfer_exceptions_into_jvms());
1014     SafePointNode* normal_map = kit.map();  // keep this guy safe
1015     // Now re-collect the exceptions into _exits:
1016     SafePointNode* ex_map;
1017     while ((ex_map = kit.pop_exception_state()) != NULL) {
1018       Node* ex_oop = kit.use_exception_state(ex_map);


src/share/vm/opto/parse1.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File