--- old/src/hotspot/share/opto/bytecodeInfo.cpp 2019-05-01 16:26:51.000000000 -0700 +++ new/src/hotspot/share/opto/bytecodeInfo.cpp 2019-05-01 16:26:50.000000000 -0700 @@ -206,7 +206,7 @@ fail_msg = "abstract method"; // // note: we allow ik->is_abstract() } else if (!callee_method->holder()->is_initialized() && // access allowed in the context of static initializer - !C->is_compiling_clinit_for(callee_method->holder())) { + C->needs_clinit_barrier(callee_method, caller_method)) { fail_msg = "method holder not initialized"; } else if ( callee_method->is_native()) { fail_msg = "native method"; @@ -481,7 +481,7 @@ if( !callee_holder->is_loaded() ) return false; if( !callee_holder->is_initialized() && // access allowed in the context of static initializer - !C->is_compiling_clinit_for(callee_holder)) { + C->needs_clinit_barrier(callee_method, caller_method)) { return false; } if( !UseInterpreter ) /* running Xcomp */ { --- old/src/hotspot/share/opto/compile.cpp 2019-05-01 16:26:51.000000000 -0700 +++ new/src/hotspot/share/opto/compile.cpp 2019-05-01 16:26:51.000000000 -0700 @@ -3827,9 +3827,42 @@ } } -bool Compile::is_compiling_clinit_for(ciKlass* k) { - ciMethod* root = method(); // the root method of compilation - return root->is_static_initializer() && root->holder() == k; // access in the context of clinit +bool Compile::needs_clinit_barrier(ciMethod* method, ciMethod* accessing_method) { + return method->is_static() && needs_clinit_barrier(method->holder(), accessing_method); +} + +bool Compile::needs_clinit_barrier(ciField* field, ciMethod* accessing_method) { + return field->is_static() && needs_clinit_barrier(field->holder(), accessing_method); +} + +bool Compile::needs_clinit_barrier(ciInstanceKlass* holder, ciMethod* accessing_method) { + if (holder->is_initialized()) { + return false; + } + if (holder->is_being_initialized()) { + if (accessing_method->holder() == holder) { + // Access inside a class. The barrier can be elided when access happens in , + // , or a static method. In all those cases, there was an initialization + // barrier on the holder klass passed. + if (accessing_method->is_static_initializer() || + accessing_method->is_object_initializer() || + accessing_method->is_static()) { + return false; + } + } else if (accessing_method->holder()->is_subclass_of(holder)) { + // Access from a subclass. The barrier can be elided only when access happens in . + // In case of or a static method, the barrier is on the subclass is not enough: + // child class can become fully initialized while its parent class is still being initialized. + if (accessing_method->is_static_initializer()) { + return false; + } + } + ciMethod* root = method(); // the root method of compilation + if (root != accessing_method) { + return needs_clinit_barrier(holder, root); // check access in the context of compilation root + } + } + return true; } #ifndef PRODUCT --- old/src/hotspot/share/opto/compile.hpp 2019-05-01 16:26:52.000000000 -0700 +++ new/src/hotspot/share/opto/compile.hpp 2019-05-01 16:26:52.000000000 -0700 @@ -1378,7 +1378,9 @@ CloneMap& clone_map(); void set_clone_map(Dict* d); - bool is_compiling_clinit_for(ciKlass* k); + bool needs_clinit_barrier(ciField* ik, ciMethod* accessing_method); + bool needs_clinit_barrier(ciMethod* ik, ciMethod* accessing_method); + bool needs_clinit_barrier(ciInstanceKlass* ik, ciMethod* accessing_method); }; #endif // SHARE_OPTO_COMPILE_HPP --- old/src/hotspot/share/opto/graphKit.cpp 2019-05-01 16:26:52.000000000 -0700 +++ new/src/hotspot/share/opto/graphKit.cpp 2019-05-01 16:26:52.000000000 -0700 @@ -2847,6 +2847,60 @@ return false; } +void GraphKit::guard_klass_being_initialized(Node* klass) { + int init_state_off = in_bytes(InstanceKlass::init_state_offset()); + Node* adr = basic_plus_adr(top(), klass, init_state_off); + Node* init_state = LoadNode::make(_gvn, NULL, immutable_memory(), adr, + adr->bottom_type()->is_ptr(), TypeInt::BYTE, + T_BYTE, MemNode::unordered); + init_state = _gvn.transform(init_state); + + Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized)); + + Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state)); + Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq)); + + { BuildCutout unless(this, tst, PROB_MAX); + uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret); + } +} + +void GraphKit::guard_init_thread(Node* klass) { + int init_thread_off = in_bytes(InstanceKlass::init_thread_offset()); + Node* adr = basic_plus_adr(top(), klass, init_thread_off); + + Node* init_thread = LoadNode::make(_gvn, NULL, immutable_memory(), adr, + adr->bottom_type()->is_ptr(), TypePtr::NOTNULL, + T_ADDRESS, MemNode::unordered); + init_thread = _gvn.transform(init_thread); + + Node* cur_thread = _gvn.transform(new ThreadLocalNode()); + + Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread)); + Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq)); + + { BuildCutout unless(this, tst, PROB_MAX); + uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none); + } +} + +void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) { + if (ik->is_being_initialized()) { + if (C->needs_clinit_barrier(ik, context)) { + Node* klass = makecon(TypeKlassPtr::make(ik)); + guard_klass_being_initialized(klass); + guard_init_thread(klass); + insert_mem_bar(Op_MemBarCPUOrder); + } + } else if (ik->is_initialized()) { + return; // no barrier needed + } else { + uncommon_trap(Deoptimization::Reason_uninitialized, + Deoptimization::Action_reinterpret, + NULL); + } +} + //------------------------maybe_cast_profiled_receiver------------------------- // If the profile has seen exactly one type, narrow to exactly that type. // Subsequent type checks will always fold up. --- old/src/hotspot/share/opto/graphKit.hpp 2019-05-01 16:26:53.000000000 -0700 +++ new/src/hotspot/share/opto/graphKit.hpp 2019-05-01 16:26:53.000000000 -0700 @@ -386,6 +386,11 @@ // Check the null_seen bit. bool seems_never_null(Node* obj, ciProfileData* data, bool& speculating); + void guard_klass_being_initialized(Node* klass); + void guard_init_thread(Node* klass); + + void clinit_barrier(ciInstanceKlass* ik, ciMethod* context); + // Check for unique class for receiver at call ciKlass* profile_has_unique_klass() { ciCallProfile profile = method()->call_profile_at_bci(bci()); --- old/src/hotspot/share/opto/parse.hpp 2019-05-01 16:26:54.000000000 -0700 +++ new/src/hotspot/share/opto/parse.hpp 2019-05-01 16:26:53.000000000 -0700 @@ -532,14 +532,12 @@ // common code for making initial checks and forming addresses void do_field_access(bool is_get, bool is_field); - bool static_field_ok_in_clinit(ciField *field, ciMethod *method); // common code for actually performing the load or store void do_get_xxx(Node* obj, ciField* field, bool is_field); void do_put_xxx(Node* obj, ciField* field, bool is_field); // implementation of object creation bytecodes - void emit_guard_for_new(ciInstanceKlass* klass); void do_new(); void do_newarray(BasicType elemtype); void do_anewarray(); --- old/src/hotspot/share/opto/parse1.cpp 2019-05-01 16:26:54.000000000 -0700 +++ new/src/hotspot/share/opto/parse1.cpp 2019-05-01 16:26:54.000000000 -0700 @@ -2118,19 +2118,7 @@ set_parse_bci(0); Node* holder = makecon(TypeKlassPtr::make(method()->holder())); - int init_state_off = in_bytes(InstanceKlass::init_state_offset()); - const Type* t = TypeKlassPtr::make(method()->holder(), init_state_off); - Node* adr = basic_plus_adr(top(), holder, init_state_off); - Node* init_state = make_load(control(), adr, TypeInt::BYTE, T_BYTE, MemNode::unordered); - - Node* fully_initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized)); - - Node* chk = gvn().transform(new CmpINode(init_state, fully_initialized_state)); - Node* tst = gvn().transform(new BoolNode(chk, BoolTest::ne)); - - { BuildCutout unless(this, tst, PROB_MAX); - uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret); - } + guard_klass_being_initialized(holder); } // Add check to deoptimize if RTM state is not ProfileRTM --- old/src/hotspot/share/opto/parse3.cpp 2019-05-01 16:26:55.000000000 -0700 +++ new/src/hotspot/share/opto/parse3.cpp 2019-05-01 16:26:54.000000000 -0700 @@ -40,39 +40,6 @@ //============================================================================= // Helper methods for _get* and _put* bytecodes //============================================================================= -bool Parse::static_field_ok_in_clinit(ciField *field, ciMethod *method) { - // Could be the field_holder's method, or for a subklass. - // Better to check now than to Deoptimize as soon as we execute - assert( field->is_static(), "Only check if field is static"); - // is_being_initialized() is too generous. It allows access to statics - // by threads that are not running the before the finishes. - // return field->holder()->is_being_initialized(); - - // The following restriction is correct but conservative. - // It is also desirable to allow compilation of methods called from - // but this generated code will need to be made safe for execution by - // other threads, or the transition from interpreted to compiled code would - // need to be guarded. - ciInstanceKlass *field_holder = field->holder(); - - if (method->holder()->is_subclass_of(field_holder)) { - if (method->is_static_initializer()) { - // OK to access static fields inside initializer - return true; - } else if (method->is_object_initializer()) { - // It's also OK to access static fields inside a constructor, - // because any thread calling the constructor must first have - // synchronized on the class by executing a '_new' bytecode. - return true; - } - } - if (C->is_compiling_clinit_for(field_holder)) { - return true; // access in the context of static initializer - } - return false; -} - - void Parse::do_field_access(bool is_get, bool is_field) { bool will_link; ciField* field = iter().get_field(will_link); @@ -88,15 +55,6 @@ return; } - if (!is_field && !field_holder->is_initialized()) { - if (!static_field_ok_in_clinit(field, method())) { - uncommon_trap(Deoptimization::Reason_uninitialized, - Deoptimization::Action_reinterpret, - NULL, "!static_field_ok_in_clinit"); - return; - } - } - // Deoptimize on putfield writes to call site target field. if (!is_get && field->is_call_site_target()) { uncommon_trap(Deoptimization::Reason_unhandled, @@ -105,6 +63,11 @@ return; } + if (C->needs_clinit_barrier(field, method())) { + clinit_barrier(field_holder, method()); + if (stopped()) return; + } + assert(field->will_link(method(), bc()), "getfield: typeflow responsibility"); // Note: We do not check for an unloaded field type here any more. --- old/src/hotspot/share/opto/parseHelper.cpp 2019-05-01 16:26:55.000000000 -0700 +++ new/src/hotspot/share/opto/parseHelper.cpp 2019-05-01 16:26:55.000000000 -0700 @@ -235,45 +235,6 @@ } -void Parse::emit_guard_for_new(ciInstanceKlass* klass) { - // Emit guarded new - // if (klass->_init_thread != current_thread || - // klass->_init_state != being_initialized) - // uncommon_trap - Node* cur_thread = _gvn.transform( new ThreadLocalNode() ); - Node* merge = new RegionNode(3); - _gvn.set_type(merge, Type::CONTROL); - Node* kls = makecon(TypeKlassPtr::make(klass)); - - Node* init_thread_offset = _gvn.MakeConX(in_bytes(InstanceKlass::init_thread_offset())); - Node* adr_node = basic_plus_adr(kls, kls, init_thread_offset); - Node* init_thread = make_load(NULL, adr_node, TypeRawPtr::BOTTOM, T_ADDRESS, MemNode::unordered); - Node *tst = Bool( CmpP( init_thread, cur_thread), BoolTest::eq); - IfNode* iff = create_and_map_if(control(), tst, PROB_ALWAYS, COUNT_UNKNOWN); - set_control(IfTrue(iff)); - merge->set_req(1, IfFalse(iff)); - - Node* init_state_offset = _gvn.MakeConX(in_bytes(InstanceKlass::init_state_offset())); - adr_node = basic_plus_adr(kls, kls, init_state_offset); - // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler - // can generate code to load it as unsigned byte. - Node* init_state = make_load(NULL, adr_node, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered); - Node* being_init = _gvn.intcon(InstanceKlass::being_initialized); - tst = Bool( CmpI( init_state, being_init), BoolTest::eq); - iff = create_and_map_if(control(), tst, PROB_ALWAYS, COUNT_UNKNOWN); - set_control(IfTrue(iff)); - merge->set_req(2, IfFalse(iff)); - - PreserveJVMState pjvms(this); - record_for_igvn(merge); - set_control(merge); - - uncommon_trap(Deoptimization::Reason_uninitialized, - Deoptimization::Action_reinterpret, - klass); -} - - //------------------------------do_new----------------------------------------- void Parse::do_new() { kill_dead_locals(); @@ -282,18 +243,19 @@ ciInstanceKlass* klass = iter().get_klass(will_link)->as_instance_klass(); assert(will_link, "_new: typeflow responsibility"); - // Should initialize, or throw an InstantiationError? - if ((!klass->is_initialized() && !klass->is_being_initialized()) || - klass->is_abstract() || klass->is_interface() || + // Should throw an InstantiationError? + if (klass->is_abstract() || klass->is_interface() || klass->name() == ciSymbol::java_lang_Class() || iter().is_unresolved_klass()) { - uncommon_trap(Deoptimization::Reason_uninitialized, - Deoptimization::Action_reinterpret, + uncommon_trap(Deoptimization::Reason_unhandled, + Deoptimization::Action_none, klass); return; } - if (klass->is_being_initialized()) { - emit_guard_for_new(klass); + + if (C->needs_clinit_barrier(klass, method())) { + clinit_barrier(klass, method()); + if (stopped()) return; } Node* kls = makecon(TypeKlassPtr::make(klass));