< prev index next >
src/hotspot/share/opto/graphKit.cpp
Print this page
*** 2845,2854 ****
--- 2845,2908 ----
}
speculating = false;
return false;
}
+ void GraphKit::guard_klass_being_initialized(Node* klass) {
+ int init_state_off = in_bytes(InstanceKlass::init_state_offset());
+ Node* adr = basic_plus_adr(top(), klass, init_state_off);
+ Node* init_state = LoadNode::make(_gvn, NULL, immutable_memory(), adr,
+ adr->bottom_type()->is_ptr(), TypeInt::BYTE,
+ T_BYTE, MemNode::unordered);
+ init_state = _gvn.transform(init_state);
+
+ Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
+
+ Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
+ Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
+
+ { BuildCutout unless(this, tst, PROB_MAX);
+ uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
+ }
+ }
+
+ void GraphKit::guard_init_thread(Node* klass) {
+ int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
+ Node* adr = basic_plus_adr(top(), klass, init_thread_off);
+
+ Node* init_thread = LoadNode::make(_gvn, NULL, immutable_memory(), adr,
+ adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
+ T_ADDRESS, MemNode::unordered);
+ init_thread = _gvn.transform(init_thread);
+
+ Node* cur_thread = _gvn.transform(new ThreadLocalNode());
+
+ Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
+ Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
+
+ { BuildCutout unless(this, tst, PROB_MAX);
+ uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
+ }
+ }
+
+ void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
+ if (ik->is_being_initialized()) {
+ if (C->needs_clinit_barrier(ik, context)) {
+ Node* klass = makecon(TypeKlassPtr::make(ik));
+ guard_klass_being_initialized(klass);
+ guard_init_thread(klass);
+ insert_mem_bar(Op_MemBarCPUOrder);
+ }
+ } else if (ik->is_initialized()) {
+ return; // no barrier needed
+ } else {
+ uncommon_trap(Deoptimization::Reason_uninitialized,
+ Deoptimization::Action_reinterpret,
+ NULL);
+ }
+ }
+
//------------------------maybe_cast_profiled_receiver-------------------------
// If the profile has seen exactly one type, narrow to exactly that type.
// Subsequent type checks will always fold up.
Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
ciKlass* require_klass,
< prev index next >