< prev index next >

src/share/vm/opto/parse3.cpp

Print this page

        

*** 178,188 **** // Build the resultant type of the load const Type *type; bool must_assert_null = false; ! if( bt == T_OBJECT ) { if (!field->type()->is_loaded()) { type = TypeInstPtr::BOTTOM; must_assert_null = true; } else if (field->is_constant() && field->is_static()) { // This can happen if the constant oop is non-perm. --- 178,188 ---- // Build the resultant type of the load const Type *type; bool must_assert_null = false; ! if (bt == T_OBJECT || bt == T_VALUETYPE) { if (!field->type()->is_loaded()) { type = TypeInstPtr::BOTTOM; must_assert_null = true; } else if (field->is_constant() && field->is_static()) { // This can happen if the constant oop is non-perm.
*** 198,207 **** --- 198,215 ---- type = Type::get_const_basic_type(bt); } if (support_IRIW_for_not_multiple_copy_atomic_cpu && field->is_volatile()) { insert_mem_bar(Op_MemBarVolatile); // StoreLoad barrier } + + if (type->isa_valuetypeptr()) { + // Load value type from flattened field + Node* vt = ValueTypeNode::make(_gvn, field_klass->as_value_klass(), map()->memory(), field->holder(), obj, offset); + push_node(bt, vt); + return; + } + // Build the load. // MemNode::MemOrd mo = is_vol ? MemNode::acquire : MemNode::unordered; bool needs_atomic_access = is_vol || AlwaysAtomicAccesses; Node* ld = make_load(NULL, adr, type, bt, adr_type, mo, LoadNode::DependsOnlyOnTest, needs_atomic_access);
*** 280,289 **** --- 288,300 ---- field_type = TypeInstPtr::BOTTOM; } else { field_type = TypeOopPtr::make_from_klass(field->type()->as_klass()); } store = store_oop_to_object(control(), obj, adr, adr_type, val, field_type, bt, mo); + } else if (bt == T_VALUETYPE) { + // Store value type to flattened field + val->as_ValueType()->store_to_field(this, field->holder(), obj, offset); } else { bool needs_atomic_access = is_vol || AlwaysAtomicAccesses; store = store_to_memory(control(), adr, val, bt, adr_type, mo, needs_atomic_access); }
< prev index next >