2168
2169 // First, check mark word for the biased lock pattern.
2170 Node* mark_node = make_load(ctrl, mem, obj, oopDesc::mark_offset_in_bytes(), TypeX_X, TypeX_X->basic_type());
2171
2172 // Get fast path - mark word has the biased lock pattern.
2173 ctrl = opt_bits_test(ctrl, fast_lock_region, 1, mark_node,
2174 markOopDesc::biased_lock_mask_in_place,
2175 markOopDesc::biased_lock_pattern, true);
2176 // fast_lock_region->in(1) is set to slow path.
2177 fast_lock_mem_phi->init_req(1, mem);
2178
2179 // Now check that the lock is biased to the current thread and has
2180 // the same epoch and bias as Klass::_prototype_header.
2181
2182 // Special-case a fresh allocation to avoid building nodes:
2183 Node* klass_node = AllocateNode::Ideal_klass(obj, &_igvn);
2184 if (klass_node == NULL) {
2185 Node* k_adr = basic_plus_adr(obj, oopDesc::klass_offset_in_bytes());
2186 klass_node = transform_later( LoadKlassNode::make(_igvn, mem, k_adr, _igvn.type(k_adr)->is_ptr()) );
2187 #ifdef _LP64
2188 if (UseCompressedKlassPointers && klass_node->is_DecodeNKlass()) {
2189 assert(klass_node->in(1)->Opcode() == Op_LoadNKlass, "sanity");
2190 klass_node->in(1)->init_req(0, ctrl);
2191 } else
2192 #endif
2193 klass_node->init_req(0, ctrl);
2194 }
2195 Node *proto_node = make_load(ctrl, mem, klass_node, in_bytes(Klass::prototype_header_offset()), TypeX_X, TypeX_X->basic_type());
2196
2197 Node* thread = transform_later(new (C) ThreadLocalNode());
2198 Node* cast_thread = transform_later(new (C) CastP2XNode(ctrl, thread));
2199 Node* o_node = transform_later(new (C) OrXNode(cast_thread, proto_node));
2200 Node* x_node = transform_later(new (C) XorXNode(o_node, mark_node));
2201
2202 // Get slow path - mark word does NOT match the value.
2203 Node* not_biased_ctrl = opt_bits_test(ctrl, region, 3, x_node,
2204 (~markOopDesc::age_mask_in_place), 0);
2205 // region->in(3) is set to fast path - the object is biased to the current thread.
2206 mem_phi->init_req(3, mem);
2207
2208
|
2168
2169 // First, check mark word for the biased lock pattern.
2170 Node* mark_node = make_load(ctrl, mem, obj, oopDesc::mark_offset_in_bytes(), TypeX_X, TypeX_X->basic_type());
2171
2172 // Get fast path - mark word has the biased lock pattern.
2173 ctrl = opt_bits_test(ctrl, fast_lock_region, 1, mark_node,
2174 markOopDesc::biased_lock_mask_in_place,
2175 markOopDesc::biased_lock_pattern, true);
2176 // fast_lock_region->in(1) is set to slow path.
2177 fast_lock_mem_phi->init_req(1, mem);
2178
2179 // Now check that the lock is biased to the current thread and has
2180 // the same epoch and bias as Klass::_prototype_header.
2181
2182 // Special-case a fresh allocation to avoid building nodes:
2183 Node* klass_node = AllocateNode::Ideal_klass(obj, &_igvn);
2184 if (klass_node == NULL) {
2185 Node* k_adr = basic_plus_adr(obj, oopDesc::klass_offset_in_bytes());
2186 klass_node = transform_later( LoadKlassNode::make(_igvn, mem, k_adr, _igvn.type(k_adr)->is_ptr()) );
2187 #ifdef _LP64
2188 if (UseCompressedClassPointers && klass_node->is_DecodeNKlass()) {
2189 assert(klass_node->in(1)->Opcode() == Op_LoadNKlass, "sanity");
2190 klass_node->in(1)->init_req(0, ctrl);
2191 } else
2192 #endif
2193 klass_node->init_req(0, ctrl);
2194 }
2195 Node *proto_node = make_load(ctrl, mem, klass_node, in_bytes(Klass::prototype_header_offset()), TypeX_X, TypeX_X->basic_type());
2196
2197 Node* thread = transform_later(new (C) ThreadLocalNode());
2198 Node* cast_thread = transform_later(new (C) CastP2XNode(ctrl, thread));
2199 Node* o_node = transform_later(new (C) OrXNode(cast_thread, proto_node));
2200 Node* x_node = transform_later(new (C) XorXNode(o_node, mark_node));
2201
2202 // Get slow path - mark word does NOT match the value.
2203 Node* not_biased_ctrl = opt_bits_test(ctrl, region, 3, x_node,
2204 (~markOopDesc::age_mask_in_place), 0);
2205 // region->in(3) is set to fast path - the object is biased to the current thread.
2206 mem_phi->init_req(3, mem);
2207
2208
|