950 // 951 // "All bets are off" unless the first publication occurs after a 952 // normal return from the constructor. We do not attempt to detect 953 // such unusual early publications. But no barrier is needed on 954 // exceptional returns, since they cannot publish normally. 955 // 956 if (method()->is_initializer() && 957 (wrote_final() || 958 PPC64_ONLY(wrote_volatile() ||) 959 (AlwaysSafeConstructors && wrote_fields()))) { 960 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final()); 961 #ifndef PRODUCT 962 if (PrintOpto && (Verbose || WizardMode)) { 963 method()->print_name(); 964 tty->print_cr(" writes finals and needs a memory barrier"); 965 } 966 #endif 967 } 968 969 // Any method can write a @Stable field; insert memory barriers after 970 // those also. If there is a predecessor allocation node, bind the 971 // barrier there. 972 if (wrote_stable()) { 973 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final()); 974 #ifndef PRODUCT 975 if (PrintOpto && (Verbose || WizardMode)) { 976 method()->print_name(); 977 tty->print_cr(" writes @Stable and needs a memory barrier"); 978 } 979 #endif 980 } 981 982 for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) { 983 // transform each slice of the original memphi: 984 mms.set_memory(_gvn.transform(mms.memory())); 985 } 986 987 if (tf()->range()->cnt() > TypeFunc::Parms) { 988 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms); 989 Node* ret_phi = _gvn.transform( _exits.argument(0) ); 990 if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) { 991 // In case of concurrent class loading, the type we set for the 992 // ret_phi in build_exits() may have been too optimistic and the 993 // ret_phi may be top now. | 950 // 951 // "All bets are off" unless the first publication occurs after a 952 // normal return from the constructor. We do not attempt to detect 953 // such unusual early publications. But no barrier is needed on 954 // exceptional returns, since they cannot publish normally. 955 // 956 if (method()->is_initializer() && 957 (wrote_final() || 958 PPC64_ONLY(wrote_volatile() ||) 959 (AlwaysSafeConstructors && wrote_fields()))) { 960 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final()); 961 #ifndef PRODUCT 962 if (PrintOpto && (Verbose || WizardMode)) { 963 method()->print_name(); 964 tty->print_cr(" writes finals and needs a memory barrier"); 965 } 966 #endif 967 } 968 969 // Any method can write a @Stable field; insert memory barriers after 970 // those also. Can't bind predecessor allocation node with barrier. 971 // Because allocation doesn't always dominate MemBarRelease. Example 972 // foo(StableFieldObject s) { 973 // if (s != null) return s; 974 // s = new StableFieldObject(); 975 // s.stable = 1; 976 // } 977 // MemBarRelease inserted at exit is not dominated by allocation 978 // (early return path might take) and will trigger assertion when 979 // verify_dominance, because use (MemBarRelease) is not dominate by 980 // def (allocation). 981 if (wrote_stable()) { 982 _exits.insert_mem_bar(Op_MemBarRelease); 983 #ifndef PRODUCT 984 if (PrintOpto && (Verbose || WizardMode)) { 985 method()->print_name(); 986 tty->print_cr(" writes @Stable and needs a memory barrier"); 987 } 988 #endif 989 } 990 991 for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) { 992 // transform each slice of the original memphi: 993 mms.set_memory(_gvn.transform(mms.memory())); 994 } 995 996 if (tf()->range()->cnt() > TypeFunc::Parms) { 997 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms); 998 Node* ret_phi = _gvn.transform( _exits.argument(0) ); 999 if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) { 1000 // In case of concurrent class loading, the type we set for the 1001 // ret_phi in build_exits() may have been too optimistic and the 1002 // ret_phi may be top now. |