< prev index next >
src/hotspot/share/opto/library_call.cpp
Print this page
@@ -183,11 +183,10 @@
}
Node* generate_access_flags_guard(Node* kls,
int modifier_mask, int modifier_bits,
RegionNode* region);
Node* generate_interface_guard(Node* kls, RegionNode* region);
- Node* generate_value_guard(Node* kls, RegionNode* region);
enum ArrayKind {
AnyArray,
NonArray,
ObjectArray,
@@ -3413,14 +3412,10 @@
}
Node* LibraryCallKit::generate_interface_guard(Node* kls, RegionNode* region) {
return generate_access_flags_guard(kls, JVM_ACC_INTERFACE, 0, region);
}
-Node* LibraryCallKit::generate_value_guard(Node* kls, RegionNode* region) {
- return generate_access_flags_guard(kls, JVM_ACC_VALUE, 0, region);
-}
-
//-------------------------inline_native_Class_query-------------------
bool LibraryCallKit::inline_native_Class_query(vmIntrinsics::ID id) {
const Type* return_type = TypeInt::BOOL;
Node* prim_return_value = top(); // what happens if it's a primitive class?
bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
@@ -4345,17 +4340,10 @@
// We only go to the fast case code if we pass a number of guards. The
// paths which do not pass are accumulated in the slow_region.
RegionNode* slow_region = new RegionNode(1);
record_for_igvn(slow_region);
- const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
- assert(!obj_type->isa_valuetype() || !obj_type->is_valuetypeptr(), "no value type here");
- if (is_static && obj_type->can_be_value_type()) {
- Node* obj_klass = load_object_klass(obj);
- generate_value_guard(obj_klass, slow_region);
- }
-
// If this is a virtual call, we generate a funny guard. We pull out
// the vtable entry corresponding to hashCode() from the target object.
// If the target method which we are calling happens to be the native
// Object hashCode() method, we pass the guard. We do not need this
// guard for non-virtual calls -- the caller is known to be the native
@@ -4372,10 +4360,11 @@
// the null check after castPP removal.
Node* no_ctrl = NULL;
Node* header = make_load(no_ctrl, header_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
// Test the header to see if it is unlocked.
+ // This also serves as guard against value types (they have the always_locked_pattern set).
Node *lock_mask = _gvn.MakeConX(markWord::biased_lock_mask_in_place);
Node *lmasked_header = _gvn.transform(new AndXNode(header, lock_mask));
Node *unlocked_val = _gvn.MakeConX(markWord::unlocked_value);
Node *chk_unlocked = _gvn.transform(new CmpXNode( lmasked_header, unlocked_val));
Node *test_unlocked = _gvn.transform(new BoolNode( chk_unlocked, BoolTest::ne));
< prev index next >